Autopsy  4.16.0
Graphical digital forensics platform for The Sleuth Kit and other tools.
CommandLineIngestManager.java
Go to the documentation of this file.
1 /*
2  * Autopsy Forensic Browser
3  *
4  * Copyright 2019-2020 Basis Technology Corp.
5  * Contact: carrier <at> sleuthkit <dot> org
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  * http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19 package org.sleuthkit.autopsy.commandlineingest;
20 
21 import java.beans.PropertyChangeEvent;
22 import java.beans.PropertyChangeListener;
23 import java.io.File;
24 import java.io.FilenameFilter;
25 import java.nio.file.Path;
26 import java.nio.file.Paths;
27 import java.util.ArrayList;
28 import java.util.Arrays;
29 import java.util.List;
30 import java.util.UUID;
31 import java.util.Collection;
32 import java.util.EnumSet;
33 import java.util.Iterator;
34 import java.util.Map;
35 import java.util.Set;
36 import java.util.logging.Level;
37 import org.netbeans.spi.sendopts.OptionProcessor;
38 import org.openide.LifecycleManager;
39 import org.openide.util.Lookup;
68 import org.sleuthkit.datamodel.Content;
69 import org.sleuthkit.datamodel.TskCoreException;
70 
76 public class CommandLineIngestManager extends CommandLineManager{
77 
78  private static final Logger LOGGER = Logger.getLogger(CommandLineIngestManager.class.getName());
80  private Case caseForJob = null;
82  private static final String LOG_DIR_NAME = "Command Output";
83 
85  }
86 
87  public void start() {
88  new Thread(new JobProcessingTask()).start();
89  }
90 
91  public void stop() {
92  try {
93  // close current case if there is one open
95  } catch (CaseActionException ex) {
96  LOGGER.log(Level.WARNING, "Unable to close the case while shutting down command line ingest manager", ex); //NON-NLS
97  }
98 
99  // shut down Autopsy
100  LifecycleManager.getDefault().exit();
101  }
102 
103  private final class JobProcessingTask implements Runnable {
104 
105  private final Object ingestLock;
106 
107  private JobProcessingTask() {
108  ingestLock = new Object();
109  try {
111  LOGGER.log(Level.INFO, "Set running with desktop GUI runtime property to false");
113  LOGGER.log(Level.SEVERE, "Failed to set running with desktop GUI runtime property to false", ex);
114  }
115  }
116 
121  @Override
122  public void run() {
123  LOGGER.log(Level.INFO, "Job processing task started");
124 
125  try {
126  // read command line inputs
127  LOGGER.log(Level.INFO, "Autopsy is running from command line"); //NON-NLS
128  List<CommandLineCommand> commands = null;
129 
130  // first look up all OptionProcessors and get input data from CommandLineOptionProcessor
131  Collection<? extends OptionProcessor> optionProcessors = Lookup.getDefault().lookupAll(OptionProcessor.class);
132  Iterator<? extends OptionProcessor> optionsIterator = optionProcessors.iterator();
133  while (optionsIterator.hasNext()) {
134  // find CommandLineOptionProcessor
135  OptionProcessor processor = optionsIterator.next();
136  if (processor instanceof CommandLineOptionProcessor) {
137  // check if we are running from command line
138  commands = ((CommandLineOptionProcessor) processor).getCommands();
139  }
140  }
141 
142  if (commands == null || commands.isEmpty()) {
143  LOGGER.log(Level.SEVERE, "No command line commands specified");
144  System.out.println("No command line commands specified");
145  return;
146  }
147 
148  try {
149  // Commands are already stored in order in which they should be executed
150  for (CommandLineCommand command : commands) {
151  CommandLineCommand.CommandType type = command.getType();
152  switch (type) {
153  case CREATE_CASE:
154  try {
155  LOGGER.log(Level.INFO, "Processing 'Create Case' command");
156  System.out.println("Processing 'Create Case' command");
157  Map<String, String> inputs = command.getInputs();
158  String baseCaseName = inputs.get(CommandLineCommand.InputType.CASE_NAME.name());
159  String rootOutputDirectory = inputs.get(CommandLineCommand.InputType.CASES_BASE_DIR_PATH.name());
161  String caseTypeString = inputs.get(CommandLineCommand.InputType.CASE_TYPE.name());
162  if (caseTypeString != null && caseTypeString.equalsIgnoreCase(CommandLineOptionProcessor.CASETYPE_MULTI)) {
163  caseType = CaseType.MULTI_USER_CASE;
164  }
165  openCase(baseCaseName, rootOutputDirectory, caseType);
166 
167  String outputDirPath = getOutputDirPath(caseForJob);
168  OutputGenerator.saveCreateCaseOutput(caseForJob, outputDirPath, baseCaseName);
169  } catch (CaseActionException ex) {
170  String baseCaseName = command.getInputs().get(CommandLineCommand.InputType.CASE_NAME.name());
171  LOGGER.log(Level.SEVERE, "Error creating or opening case " + baseCaseName, ex);
172  System.out.println("Error creating or opening case " + baseCaseName);
173  // Do not process any other commands
174  return;
175  }
176  break;
177  case ADD_DATA_SOURCE:
178  try {
179  LOGGER.log(Level.INFO, "Processing 'Add Data Source' command");
180  System.out.println("Processing 'Add Data Source' command");
181  Map<String, String> inputs = command.getInputs();
182 
183  // open the case, if it hasn't been already opened by CREATE_CASE command
184  if (caseForJob == null) {
185  String caseDirPath = inputs.get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
186  caseForJob = CommandLineIngestManager.this.openCase(caseDirPath);
187  }
188 
189  String dataSourcePath = inputs.get(CommandLineCommand.InputType.DATA_SOURCE_PATH.name());
190  dataSource = new AutoIngestDataSource("", Paths.get(dataSourcePath));
191  runDataSourceProcessor(caseForJob, dataSource);
192 
193  String outputDirPath = getOutputDirPath(caseForJob);
194  OutputGenerator.saveAddDataSourceOutput(caseForJob, dataSource, outputDirPath);
196  String dataSourcePath = command.getInputs().get(CommandLineCommand.InputType.DATA_SOURCE_PATH.name());
197  LOGGER.log(Level.SEVERE, "Error adding data source " + dataSourcePath, ex);
198  System.out.println("Error adding data source " + dataSourcePath);
199  // Do not process any other commands
200  return;
201  }
202  break;
203  case RUN_INGEST:
204  try {
205  LOGGER.log(Level.INFO, "Processing 'Run Ingest' command");
206  System.out.println("Processing 'Run Ingest' command");
207  Map<String, String> inputs = command.getInputs();
208 
209  // open the case, if it hasn't been already opened by CREATE_CASE or ADD_DATA_SOURCE commands
210  if (caseForJob == null) {
211  String caseDirPath = inputs.get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
212  caseForJob = CommandLineIngestManager.this.openCase(caseDirPath);
213  }
214 
215  // populate the AutoIngestDataSource structure, if that hasn't been done by ADD_DATA_SOURCE command
216  if (dataSource == null) {
217 
218  String dataSourceId = inputs.get(CommandLineCommand.InputType.DATA_SOURCE_ID.name());
219  Long dataSourceObjId = Long.valueOf(dataSourceId);
220 
221  // get Content object for the data source
222  Content content = null;
223  try {
224  content = Case.getCurrentCaseThrows().getSleuthkitCase().getContentById(dataSourceObjId);
225  } catch (TskCoreException ex) {
226  LOGGER.log(Level.SEVERE, "Exception while trying to find data source with object ID " + dataSourceId, ex);
227  System.out.println("Exception while trying to find data source with object ID " + dataSourceId);
228  // Do not process any other commands
229  return;
230  }
231 
232  if (content == null) {
233  LOGGER.log(Level.SEVERE, "Unable to find data source with object ID {0}", dataSourceId);
234  System.out.println("Unable to find data source with object ID " + dataSourceId);
235  // Do not process any other commands
236  return;
237  }
238 
239  // populate the AutoIngestDataSource structure
240  dataSource = new AutoIngestDataSource("", Paths.get(content.getName()));
241  List<Content> contentList = Arrays.asList(new Content[]{content});
242  List<String> errorList = new ArrayList<>();
243  dataSource.setDataSourceProcessorOutput(NO_ERRORS, errorList, contentList);
244  }
245 
246  // run ingest
247  String ingestProfile = inputs.get(CommandLineCommand.InputType.INGEST_PROFILE_NAME.name());
248  analyze(dataSource, ingestProfile);
249  } catch (InterruptedException | CaseActionException ex) {
250  String dataSourcePath = command.getInputs().get(CommandLineCommand.InputType.DATA_SOURCE_PATH.name());
251  LOGGER.log(Level.SEVERE, "Error running ingest on data source " + dataSourcePath, ex);
252  System.out.println("Error running ingest on data source " + dataSourcePath);
253  // Do not process any other commands
254  return;
255  }
256  break;
257 
258  case LIST_ALL_DATA_SOURCES:
259  try {
260  LOGGER.log(Level.INFO, "Processing 'List All Data Sources' command");
261  System.out.println("Processing 'List All Data Sources' command");
262  Map<String, String> inputs = command.getInputs();
263 
264  // open the case, if it hasn't been already opened by previous command
265  if (caseForJob == null) {
266  String caseDirPath = inputs.get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
267  caseForJob = CommandLineIngestManager.this.openCase(caseDirPath);
268  }
269 
270  String outputDirPath = getOutputDirPath(caseForJob);
271  OutputGenerator.listAllDataSources(caseForJob, outputDirPath);
272  } catch (CaseActionException ex) {
273  String caseDirPath = command.getInputs().get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
274  LOGGER.log(Level.SEVERE, "Error opening case in case directory: " + caseDirPath, ex);
275  System.out.println("Error opening case in case directory: " + caseDirPath);
276  // Do not process any other commands
277  return;
278  }
279  break;
280 
281  case GENERATE_REPORTS:
282  try {
283  LOGGER.log(Level.INFO, "Processing 'Generate Reports' command");
284  System.out.println("Processing 'Generate Reports' command");
285  Map<String, String> inputs = command.getInputs();
286 
287  // open the case, if it hasn't been already opened by previous command
288  if (caseForJob == null) {
289  String caseDirPath = inputs.get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
290  caseForJob = CommandLineIngestManager.this.openCase(caseDirPath);
291  }
292  // generate reports
293  String reportName = inputs.get(CommandLineCommand.InputType.REPORT_PROFILE_NAME.name());
294  if (reportName == null) {
296  }
297 
298  // generate reports
300  ReportGenerator generator = new ReportGenerator(reportName, progressIndicator);
301  generator.generateReports();
302  } catch (CaseActionException ex) {
303  String caseDirPath = command.getInputs().get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
304  LOGGER.log(Level.SEVERE, "Error opening case in case directory: " + caseDirPath, ex);
305  System.out.println("Error opening case in case directory: " + caseDirPath);
306  // Do not process any other commands
307  return;
308  }
309  break;
310  default:
311  break;
312  }
313  }
314  } catch (Throwable ex) {
315  /*
316  * Unexpected runtime exceptions firewall. This task is
317  * designed to be able to be run in an executor service
318  * thread pool without calling get() on the task's
319  * Future<Void>, so this ensures that such errors get
320  * logged.
321  */
322  LOGGER.log(Level.SEVERE, "Unexpected error", ex);
323  System.out.println("Unexpected error. Exiting...");
324 
325  } finally {
326  try {
328  } catch (CaseActionException ex) {
329  LOGGER.log(Level.WARNING, "Exception while closing case", ex);
330  System.out.println("Exception while closing case");
331  }
332  }
333 
334  } finally {
335  LOGGER.log(Level.INFO, "Job processing task finished");
336  System.out.println("Job processing task finished");
337 
338  // shut down Autopsy
339  stop();
340  }
341  }
342 
354  private void openCase(String baseCaseName, String rootOutputDirectory, CaseType caseType) throws CaseActionException {
355 
356  LOGGER.log(Level.INFO, "Opening case {0} in directory {1}", new Object[]{baseCaseName, rootOutputDirectory});
357  Path caseDirectoryPath = findCaseDirectory(Paths.get(rootOutputDirectory), baseCaseName);
358  if (null != caseDirectoryPath) {
359  // found an existing case directory for same case name. the input case name must be unique. Exit.
360  LOGGER.log(Level.SEVERE, "Case {0} already exists. Case name must be unique. Exiting", baseCaseName);
361  throw new CaseActionException("Case " + baseCaseName + " already exists. Case name must be unique. Exiting");
362  } else {
363  caseDirectoryPath = createCaseFolderPath(Paths.get(rootOutputDirectory), baseCaseName);
364 
365  // Create the case directory
366  Case.createCaseDirectory(caseDirectoryPath.toString(), Case.CaseType.SINGLE_USER_CASE);
367 
368  CaseDetails caseDetails = new CaseDetails(baseCaseName);
369  Case.createAsCurrentCase(caseType, caseDirectoryPath.toString(), caseDetails);
370  }
371 
372  caseForJob = Case.getCurrentCase();
373  LOGGER.log(Level.INFO, "Opened case {0}", caseForJob.getName());
374  }
375 
390 
391  LOGGER.log(Level.INFO, "Adding data source {0} ", dataSource.getPath().toString());
392 
393  // Get an ordered list of data source processors to try
394  List<AutoIngestDataSourceProcessor> validDataSourceProcessors;
395  try {
396  validDataSourceProcessors = DataSourceProcessorUtility.getOrderedListOfDataSourceProcessors(dataSource.getPath());
398  LOGGER.log(Level.SEVERE, "Exception while determining best data source processor for {0}", dataSource.getPath());
399  // rethrow the exception.
400  throw ex;
401  }
402 
403  // did we find a data source processor that can process the data source
404  if (validDataSourceProcessors.isEmpty()) {
405  // This should never happen. We should add all unsupported data sources as logical files.
406  LOGGER.log(Level.SEVERE, "Unsupported data source {0}", dataSource.getPath()); // NON-NLS
407  return;
408  }
409 
411  synchronized (ingestLock) {
412  // Try each DSP in decreasing order of confidence
413  for (AutoIngestDataSourceProcessor selectedProcessor : validDataSourceProcessors) {
414  UUID taskId = UUID.randomUUID();
415  caseForJob.notifyAddingDataSource(taskId);
416  DataSourceProcessorCallback callBack = new AddDataSourceCallback(caseForJob, dataSource, taskId, ingestLock);
417  caseForJob.notifyAddingDataSource(taskId);
418  LOGGER.log(Level.INFO, "Identified data source type for {0} as {1}", new Object[]{dataSource.getPath(), selectedProcessor.getDataSourceType()});
419  selectedProcessor.process(dataSource.getDeviceId(), dataSource.getPath(), progressMonitor, callBack);
420  ingestLock.wait();
421 
422  // at this point we got the content object(s) from the current DSP.
423  // check whether the data source was processed successfully
424  if ((dataSource.getResultDataSourceProcessorResultCode() == CRITICAL_ERRORS)
425  || dataSource.getContent().isEmpty()) {
426  // move onto the the next DSP that can process this data source
427  logDataSourceProcessorResult(dataSource);
428  continue;
429  }
430 
431  logDataSourceProcessorResult(dataSource);
432  return;
433  }
434  // If we get to this point, none of the processors were successful
435  LOGGER.log(Level.SEVERE, "All data source processors failed to process {0}", dataSource.getPath());
436  // Throw an exception. It will get caught & handled upstream and will result in AIM auto-pause.
437  throw new AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException("Failed to process " + dataSource.getPath() + " with all data source processors");
438  }
439  }
440 
448 
450  if (null != resultCode) {
451  switch (resultCode) {
452  case NO_ERRORS:
453  LOGGER.log(Level.INFO, "Added data source to case");
454  if (dataSource.getContent().isEmpty()) {
455  LOGGER.log(Level.SEVERE, "Data source failed to produce content");
456  }
457  break;
458 
459  case NONCRITICAL_ERRORS:
460  for (String errorMessage : dataSource.getDataSourceProcessorErrorMessages()) {
461  LOGGER.log(Level.WARNING, "Non-critical error running data source processor for {0}: {1}", new Object[]{dataSource.getPath(), errorMessage});
462  }
463  LOGGER.log(Level.INFO, "Added data source to case");
464  if (dataSource.getContent().isEmpty()) {
465  LOGGER.log(Level.SEVERE, "Data source failed to produce content");
466  }
467  break;
468 
469  case CRITICAL_ERRORS:
470  for (String errorMessage : dataSource.getDataSourceProcessorErrorMessages()) {
471  LOGGER.log(Level.SEVERE, "Critical error running data source processor for {0}: {1}", new Object[]{dataSource.getPath(), errorMessage});
472  }
473  LOGGER.log(Level.SEVERE, "Failed to add data source to case");
474  break;
475  }
476  } else {
477  LOGGER.log(Level.WARNING, "No result code for data source processor for {0}", dataSource.getPath());
478  }
479  }
480 
498  private void analyze(AutoIngestDataSource dataSource, String ingestProfileName) throws AnalysisStartupException, InterruptedException {
499 
500  LOGGER.log(Level.INFO, "Starting ingest modules analysis for {0} ", dataSource.getPath());
501 
502  // configure ingest profile and file filter
503  IngestProfiles.IngestProfile selectedProfile = null;
504  FilesSet selectedFileSet = null;
505  if (!ingestProfileName.isEmpty()) {
506  selectedProfile = getSelectedProfile(ingestProfileName);
507  if (selectedProfile == null) {
508  // unable to find the user specified profile
509  LOGGER.log(Level.SEVERE, "Unable to find ingest profile: {0}. Ingest cancelled!", ingestProfileName);
510  System.out.println("Unable to find ingest profile: " + ingestProfileName + ". Ingest cancelled!");
511  return;
512  }
513 
514  // get FileSet filter associated with this profile
515  selectedFileSet = getSelectedFilter(selectedProfile.getFileIngestFilter());
516  if (selectedFileSet == null) {
517  // unable to find the user specified profile
518  LOGGER.log(Level.SEVERE, "Unable to find file filter {0} for ingest profile: {1}. Ingest cancelled!", new Object[]{selectedProfile.getFileIngestFilter(), ingestProfileName});
519  System.out.println("Unable to find file filter " + selectedProfile.getFileIngestFilter() + " for ingest profile: " + ingestProfileName + ". Ingest cancelled!");
520  return;
521  }
522  }
523 
524  IngestJobEventListener ingestJobEventListener = new IngestJobEventListener();
526  try {
527  synchronized (ingestLock) {
528  IngestJobSettings ingestJobSettings;
529  if (selectedProfile == null || selectedFileSet == null) {
530  // use baseline configuration
532  } else {
533  // load the custom ingest
534  ingestJobSettings = new IngestJobSettings(selectedProfile.toString());
535  ingestJobSettings.setFileFilter(selectedFileSet);
536  }
537 
538  List<String> settingsWarnings = ingestJobSettings.getWarnings();
539  if (settingsWarnings.isEmpty()) {
540  IngestJobStartResult ingestJobStartResult = IngestManager.getInstance().beginIngestJob(dataSource.getContent(), ingestJobSettings);
541  IngestJob ingestJob = ingestJobStartResult.getJob();
542  if (null != ingestJob) {
543  /*
544  * Block until notified by the ingest job event
545  * listener or until interrupted because auto ingest
546  * is shutting down.
547  */
548  ingestLock.wait();
549  LOGGER.log(Level.INFO, "Finished ingest modules analysis for {0} ", dataSource.getPath());
550  IngestJob.ProgressSnapshot jobSnapshot = ingestJob.getSnapshot();
551  for (IngestJob.ProgressSnapshot.DataSourceProcessingSnapshot snapshot : jobSnapshot.getDataSourceSnapshots()) {
552  if (!snapshot.isCancelled()) {
553  List<String> cancelledModules = snapshot.getCancelledDataSourceIngestModules();
554  if (!cancelledModules.isEmpty()) {
555  LOGGER.log(Level.WARNING, String.format("Ingest module(s) cancelled for %s", dataSource.getPath()));
556  for (String module : snapshot.getCancelledDataSourceIngestModules()) {
557  LOGGER.log(Level.WARNING, String.format("%s ingest module cancelled for %s", module, dataSource.getPath()));
558  }
559  }
560  LOGGER.log(Level.INFO, "Analysis of data source completed");
561  } else {
562  LOGGER.log(Level.WARNING, "Analysis of data source cancelled");
563  IngestJob.CancellationReason cancellationReason = snapshot.getCancellationReason();
564  if (IngestJob.CancellationReason.NOT_CANCELLED != cancellationReason && IngestJob.CancellationReason.USER_CANCELLED != cancellationReason) {
565  throw new AnalysisStartupException(String.format("Analysis cancelled due to %s for %s", cancellationReason.getDisplayName(), dataSource.getPath()));
566  }
567  }
568  }
569  } else if (!ingestJobStartResult.getModuleErrors().isEmpty()) {
570  for (IngestModuleError error : ingestJobStartResult.getModuleErrors()) {
571  LOGGER.log(Level.SEVERE, String.format("%s ingest module startup error for %s", error.getModuleDisplayName(), dataSource.getPath()), error.getThrowable());
572  }
573  LOGGER.log(Level.SEVERE, "Failed to analyze data source due to ingest job startup error");
574  throw new AnalysisStartupException(String.format("Error(s) during ingest module startup for %s", dataSource.getPath()));
575  } else {
576  LOGGER.log(Level.SEVERE, String.format("Ingest manager ingest job start error for %s", dataSource.getPath()), ingestJobStartResult.getStartupException());
577  throw new AnalysisStartupException("Ingest manager error starting job", ingestJobStartResult.getStartupException());
578  }
579  } else {
580  for (String warning : settingsWarnings) {
581  LOGGER.log(Level.SEVERE, "Ingest job settings error for {0}: {1}", new Object[]{dataSource.getPath(), warning});
582  }
583  LOGGER.log(Level.SEVERE, "Failed to analyze data source due to settings errors");
584  throw new AnalysisStartupException("Error(s) in ingest job settings");
585  }
586  }
587  } finally {
588  IngestManager.getInstance().removeIngestJobEventListener(ingestJobEventListener);
589  }
590  }
591 
600  private IngestProfiles.IngestProfile getSelectedProfile(String ingestProfileName) {
601 
602  IngestProfiles.IngestProfile selectedProfile = null;
603  // lookup the profile by name
605  if (profile.toString().equalsIgnoreCase(ingestProfileName)) {
606  // found the profile
607  selectedProfile = profile;
608  break;
609  }
610  }
611  return selectedProfile;
612  }
613 
622  private FilesSet getSelectedFilter(String filterName) {
623  try {
624  Map<String, FilesSet> fileIngestFilters = FilesSetsManager.getInstance()
627  fileIngestFilters.put(fSet.getName(), fSet);
628  }
629  return fileIngestFilters.get(filterName);
631  LOGGER.log(Level.SEVERE, "Failed to get file ingest filter: " + filterName, ex); //NON-NLS
632  return null;
633  }
634  }
635 
645  private Path createCaseFolderPath(Path caseFoldersPath, String caseName) {
646  String folderName = caseName + "_" + TimeStampUtils.createTimeStamp();
647  return Paths.get(caseFoldersPath.toString(), folderName);
648  }
649 
660  private Path findCaseDirectory(Path folderToSearch, String caseName) {
661  File searchFolder = new File(folderToSearch.toString());
662  if (!searchFolder.isDirectory()) {
663  return null;
664  }
665  Path caseFolderPath = null;
666  String[] candidateFolders = searchFolder.list(new CaseFolderFilter(caseName));
667  long mostRecentModified = 0;
668  for (String candidateFolder : candidateFolders) {
669  File file = new File(candidateFolder);
670  if (file.lastModified() >= mostRecentModified) {
671  mostRecentModified = file.lastModified();
672  caseFolderPath = Paths.get(folderToSearch.toString(), file.getPath());
673  }
674  }
675  return caseFolderPath;
676  }
677 
685  private String getOutputDirPath(Case caseForJob) {
686  return caseForJob.getCaseDirectory() + File.separator + LOG_DIR_NAME;
687  }
688 
698  private class IngestJobEventListener implements PropertyChangeListener {
699 
707  @Override
708  public void propertyChange(PropertyChangeEvent event) {
709  if (AutopsyEvent.SourceType.LOCAL == ((AutopsyEvent) event).getSourceType()) {
710  String eventType = event.getPropertyName();
711  if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
712  synchronized (ingestLock) {
713  ingestLock.notify();
714  }
715  }
716  }
717  }
718  };
719 
726 
732  @Override
733  public void setIndeterminate(final boolean indeterminate) {
734  }
735 
741  @Override
742  public void setProgress(final int progress) {
743  }
744 
750  @Override
751  public void setProgressText(final String text) {
752  }
753  }
754 
760  private final class AnalysisStartupException extends Exception {
761 
762  private static final long serialVersionUID = 1L;
763 
764  private AnalysisStartupException(String message) {
765  super(message);
766  }
767 
768  private AnalysisStartupException(String message, Throwable cause) {
769  super(message, cause);
770  }
771  }
772  }
773 
774  private static class CaseFolderFilter implements FilenameFilter {
775 
776  private final String caseName;
777  private final static String CASE_METADATA_EXT = CaseMetadata.getFileExtension();
778 
779  CaseFolderFilter(String caseName) {
780  this.caseName = caseName;
781  }
782 
783  @Override
784  public boolean accept(File folder, String fileName) {
785  File file = new File(folder, fileName);
786  if (fileName.length() > TimeStampUtils.getTimeStampLength() && file.isDirectory()) {
787  if (TimeStampUtils.endsWithTimeStamp(fileName)) {
788  if (null != caseName) {
789  String fileNamePrefix = fileName.substring(0, fileName.length() - TimeStampUtils.getTimeStampLength());
790  if (fileNamePrefix.equals(caseName)) {
791  return hasCaseMetadataFile(file);
792  }
793  } else {
794  return hasCaseMetadataFile(file);
795  }
796  }
797  }
798  return false;
799  }
800 
809  private static boolean hasCaseMetadataFile(File folder) {
810  for (File file : folder.listFiles()) {
811  if (file.getName().toLowerCase().endsWith(CASE_METADATA_EXT) && file.isFile()) {
812  return true;
813  }
814  }
815  return false;
816  }
817  }
818 
819 }
static List< AutoIngestDataSourceProcessor > getOrderedListOfDataSourceProcessors(Path dataSourcePath)
void openCase(String baseCaseName, String rootOutputDirectory, CaseType caseType)
static synchronized IngestManager getInstance()
static void createCaseDirectory(String caseDirPath, CaseType caseType)
Definition: Case.java:950
IngestJobStartResult beginIngestJob(Collection< Content > dataSources, IngestJobSettings settings)
synchronized DataSourceProcessorResult getResultDataSourceProcessorResultCode()
static final Set< IngestManager.IngestJobEvent > INGEST_JOB_EVENTS_OF_INTEREST
static boolean endsWithTimeStamp(String inputString)
void removeIngestJobEventListener(final PropertyChangeListener listener)
void addIngestJobEventListener(final PropertyChangeListener listener)
static synchronized List< IngestProfile > getIngestProfiles()
void analyze(AutoIngestDataSource dataSource, String ingestProfileName)
static synchronized void setRunningWithGUI(boolean runningWithGUI)
synchronized void setDataSourceProcessorOutput(DataSourceProcessorResult result, List< String > errorMessages, List< Content > content)
synchronized static Logger getLogger(String name)
Definition: Logger.java:124
void notifyAddingDataSource(UUID eventId)
Definition: Case.java:1489
static void createAsCurrentCase(String caseDir, String caseDisplayName, String caseNumber, String examiner, CaseType caseType)
Definition: Case.java:581
IngestManager.IngestManagerException getStartupException()

Copyright © 2012-2020 Basis Technology. Generated on: Tue Sep 22 2020
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.