Autopsy  4.13.0
Graphical digital forensics platform for The Sleuth Kit and other tools.
CommandLineIngestManager.java
Go to the documentation of this file.
1 /*
2  * Autopsy Forensic Browser
3  *
4  * Copyright 2019-2019 Basis Technology Corp.
5  * Contact: carrier <at> sleuthkit <dot> org
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  * http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19 package org.sleuthkit.autopsy.commandlineingest;
20 
21 import java.beans.PropertyChangeEvent;
22 import java.beans.PropertyChangeListener;
23 import java.io.File;
24 import java.io.FilenameFilter;
25 import java.nio.file.Path;
26 import java.nio.file.Paths;
27 import java.util.ArrayList;
28 import java.util.Arrays;
29 import java.util.List;
30 import java.util.UUID;
31 import java.util.Collection;
32 import java.util.EnumSet;
33 import java.util.Iterator;
34 import java.util.Map;
35 import java.util.Set;
36 import java.util.logging.Level;
37 import org.netbeans.spi.sendopts.OptionProcessor;
38 import org.openide.LifecycleManager;
39 import org.openide.util.Lookup;
68 import org.sleuthkit.datamodel.Content;
69 import org.sleuthkit.datamodel.TskCoreException;
70 
77 
78  private static final Logger LOGGER = Logger.getLogger(CommandLineIngestManager.class.getName());
80  private Case caseForJob = null;
82  private static final String LOG_DIR_NAME = "Command Output";
83 
85  }
86 
87  public void start() {
88  new Thread(new JobProcessingTask()).start();
89  }
90 
91  public void stop() {
92  try {
93  // close current case if there is one open
95  } catch (CaseActionException ex) {
96  LOGGER.log(Level.WARNING, "Unable to close the case while shutting down command line ingest manager", ex); //NON-NLS
97  }
98 
99  // shut down Autopsy
100  LifecycleManager.getDefault().exit();
101  }
102 
103  private final class JobProcessingTask implements Runnable {
104 
105  private final Object ingestLock;
106 
107  private JobProcessingTask() {
108  ingestLock = new Object();
109  try {
111  LOGGER.log(Level.INFO, "Set running with desktop GUI runtime property to false");
113  LOGGER.log(Level.SEVERE, "Failed to set running with desktop GUI runtime property to false", ex);
114  }
115  }
116 
121  @Override
122  public void run() {
123  LOGGER.log(Level.INFO, "Job processing task started");
124 
125  try {
126  // read command line inputs
127  LOGGER.log(Level.INFO, "Autopsy is running from command line"); //NON-NLS
128  List<CommandLineCommand> commands = null;
129 
130  // first look up all OptionProcessors and get input data from CommandLineOptionProcessor
131  Collection<? extends OptionProcessor> optionProcessors = Lookup.getDefault().lookupAll(OptionProcessor.class);
132  Iterator<? extends OptionProcessor> optionsIterator = optionProcessors.iterator();
133  while (optionsIterator.hasNext()) {
134  // find CommandLineOptionProcessor
135  OptionProcessor processor = optionsIterator.next();
136  if (processor instanceof CommandLineOptionProcessor) {
137  // check if we are running from command line
138  commands = ((CommandLineOptionProcessor) processor).getCommands();
139  }
140  }
141 
142  if (commands == null || commands.isEmpty()) {
143  LOGGER.log(Level.SEVERE, "No command line commands specified");
144  System.err.println("No command line commands specified");
145  return;
146  }
147 
148  try {
149  // Commands are already stored in order in which they should be executed
150  for (CommandLineCommand command : commands) {
151  CommandLineCommand.CommandType type = command.getType();
152  switch (type) {
153  case CREATE_CASE:
154  try {
155  LOGGER.log(Level.INFO, "Processing 'Create Case' command");
156  System.out.println("Processing 'Create Case' command");
157  Map<String, String> inputs = command.getInputs();
158  String baseCaseName = inputs.get(CommandLineCommand.InputType.CASE_NAME.name());
159  String rootOutputDirectory = inputs.get(CommandLineCommand.InputType.CASES_BASE_DIR_PATH.name());
160  openCase(baseCaseName, rootOutputDirectory);
161 
162  String outputDirPath = getOutputDirPath(caseForJob);
163  OutputGenerator.saveCreateCaseOutput(caseForJob, outputDirPath, baseCaseName);
164  } catch (CaseActionException ex) {
165  String baseCaseName = command.getInputs().get(CommandLineCommand.InputType.CASE_NAME.name());
166  LOGGER.log(Level.SEVERE, "Error creating or opening case " + baseCaseName, ex);
167  System.err.println("Error creating or opening case " + baseCaseName);
168  // Do not process any other commands
169  return;
170  }
171  break;
172  case ADD_DATA_SOURCE:
173  try {
174  LOGGER.log(Level.INFO, "Processing 'Add Data Source' command");
175  System.out.println("Processing 'Add Data Source' command");
176  Map<String, String> inputs = command.getInputs();
177 
178  // open the case, if it hasn't been already opened by CREATE_CASE command
179  if (caseForJob == null) {
180  String caseDirPath = inputs.get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
181  openCase(caseDirPath);
182  }
183 
184  String dataSourcePath = inputs.get(CommandLineCommand.InputType.DATA_SOURCE_PATH.name());
185  dataSource = new AutoIngestDataSource("", Paths.get(dataSourcePath));
186  runDataSourceProcessor(caseForJob, dataSource);
187 
188  String outputDirPath = getOutputDirPath(caseForJob);
189  OutputGenerator.saveAddDataSourceOutput(caseForJob, dataSource, outputDirPath);
191  String dataSourcePath = command.getInputs().get(CommandLineCommand.InputType.DATA_SOURCE_PATH.name());
192  LOGGER.log(Level.SEVERE, "Error adding data source " + dataSourcePath, ex);
193  System.err.println("Error adding data source " + dataSourcePath);
194  // Do not process any other commands
195  return;
196  }
197  break;
198  case RUN_INGEST:
199  try {
200  LOGGER.log(Level.INFO, "Processing 'Run Ingest' command");
201  System.out.println("Processing 'Run Ingest' command");
202  Map<String, String> inputs = command.getInputs();
203 
204  // open the case, if it hasn't been already opened by CREATE_CASE or ADD_DATA_SOURCE commands
205  if (caseForJob == null) {
206  String caseDirPath = inputs.get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
207  openCase(caseDirPath);
208  }
209 
210  // populate the AutoIngestDataSource structure, if that hasn't been done by ADD_DATA_SOURCE command
211  if (dataSource == null) {
212 
213  String dataSourceId = inputs.get(CommandLineCommand.InputType.DATA_SOURCE_ID.name());
214  Long dataSourceObjId = Long.valueOf(dataSourceId);
215 
216  // get Content object for the data source
217  Content content = null;
218  try {
219  content = Case.getCurrentCaseThrows().getSleuthkitCase().getContentById(dataSourceObjId);
220  } catch (TskCoreException ex) {
221  LOGGER.log(Level.SEVERE, "Exception while trying to find data source with object ID " + dataSourceId, ex);
222  System.err.println("Exception while trying to find data source with object ID " + dataSourceId);
223  // Do not process any other commands
224  return;
225  }
226 
227  if (content == null) {
228  LOGGER.log(Level.SEVERE, "Unable to find data source with object ID {0}", dataSourceId);
229  System.out.println("Unable to find data source with object ID " + dataSourceId);
230  // Do not process any other commands
231  return;
232  }
233 
234  // populate the AutoIngestDataSource structure
235  dataSource = new AutoIngestDataSource("", Paths.get(content.getName()));
236  List<Content> contentList = Arrays.asList(new Content[]{content});
237  List<String> errorList = new ArrayList<>();
238  dataSource.setDataSourceProcessorOutput(NO_ERRORS, errorList, contentList);
239  }
240 
241  // run ingest
242  String ingestProfile = inputs.get(CommandLineCommand.InputType.INGEST_PROFILE_NAME.name());
243  analyze(dataSource, ingestProfile);
244  } catch (InterruptedException | CaseActionException ex) {
245  String dataSourcePath = command.getInputs().get(CommandLineCommand.InputType.DATA_SOURCE_PATH.name());
246  LOGGER.log(Level.SEVERE, "Error running ingest on data source " + dataSourcePath, ex);
247  System.err.println("Error running ingest on data source " + dataSourcePath);
248  // Do not process any other commands
249  return;
250  }
251  break;
252 
253  case LIST_ALL_DATA_SOURCES:
254  try {
255  LOGGER.log(Level.INFO, "Processing 'List All Data Sources' command");
256  System.out.println("Processing 'List All Data Sources' command");
257  Map<String, String> inputs = command.getInputs();
258 
259  // open the case, if it hasn't been already opened by previous command
260  if (caseForJob == null) {
261  String caseDirPath = inputs.get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
262  openCase(caseDirPath);
263  }
264 
265  String outputDirPath = getOutputDirPath(caseForJob);
266  OutputGenerator.listAllDataSources(caseForJob, outputDirPath);
267  } catch (CaseActionException ex) {
268  String caseDirPath = command.getInputs().get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
269  LOGGER.log(Level.SEVERE, "Error opening case in case directory: " + caseDirPath, ex);
270  System.err.println("Error opening case in case directory: " + caseDirPath);
271  // Do not process any other commands
272  return;
273  }
274  break;
275 
276  case GENERATE_REPORTS:
277  try {
278  LOGGER.log(Level.INFO, "Processing 'Generate Reports' command");
279  System.out.println("Processing 'Generate Reports' command");
280  Map<String, String> inputs = command.getInputs();
281 
282  // open the case, if it hasn't been already opened by previous command
283  if (caseForJob == null) {
284  String caseDirPath = inputs.get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
285  openCase(caseDirPath);
286  }
287 
288  // generate reports
291  generator.generateReports();
292  } catch (CaseActionException ex) {
293  String caseDirPath = command.getInputs().get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
294  LOGGER.log(Level.SEVERE, "Error opening case in case directory: " + caseDirPath, ex);
295  System.err.println("Error opening case in case directory: " + caseDirPath);
296  // Do not process any other commands
297  return;
298  }
299  break;
300  default:
301  break;
302  }
303  }
304  } catch (Throwable ex) {
305  /*
306  * Unexpected runtime exceptions firewall. This task is
307  * designed to be able to be run in an executor service
308  * thread pool without calling get() on the task's
309  * Future<Void>, so this ensures that such errors get
310  * logged.
311  */
312  LOGGER.log(Level.SEVERE, "Unexpected error", ex);
313  System.err.println("Unexpected error. Exiting...");
314 
315  } finally {
316  try {
318  } catch (CaseActionException ex) {
319  LOGGER.log(Level.WARNING, "Exception while closing case", ex);
320  System.err.println("Exception while closing case");
321  }
322  }
323 
324  } finally {
325  LOGGER.log(Level.INFO, "Job processing task finished");
326  System.out.println("Job processing task finished");
327 
328  // shut down Autopsy
329  stop();
330  }
331  }
332 
343  private void openCase(String baseCaseName, String rootOutputDirectory) throws CaseActionException {
344 
345  LOGGER.log(Level.INFO, "Opening case {0} in directory {1}", new Object[]{baseCaseName, rootOutputDirectory});
346  Path caseDirectoryPath = findCaseDirectory(Paths.get(rootOutputDirectory), baseCaseName);
347  if (null != caseDirectoryPath) {
348  // found an existing case directory for same case name. the input case name must be unique. Exit.
349  LOGGER.log(Level.SEVERE, "Case {0} already exists. Case name must be unique. Exiting", baseCaseName);
350  throw new CaseActionException("Case " + baseCaseName + " already exists. Case name must be unique. Exiting");
351  } else {
352  caseDirectoryPath = createCaseFolderPath(Paths.get(rootOutputDirectory), baseCaseName);
353 
354  // Create the case directory
355  Case.createCaseDirectory(caseDirectoryPath.toString(), Case.CaseType.SINGLE_USER_CASE);
356 
357  CaseDetails caseDetails = new CaseDetails(baseCaseName);
358  Case.createAsCurrentCase(Case.CaseType.SINGLE_USER_CASE, caseDirectoryPath.toString(), caseDetails);
359  }
360 
361  caseForJob = Case.getCurrentCase();
362  LOGGER.log(Level.INFO, "Opened case {0}", caseForJob.getName());
363  }
364 
372  private void openCase(String caseFolderPath) throws CaseActionException {
373 
374  LOGGER.log(Level.INFO, "Opening case in directory {0}", caseFolderPath);
375 
376  String metadataFilePath = findAutFile(caseFolderPath);
377  Case.openAsCurrentCase(metadataFilePath);
378 
379  caseForJob = Case.getCurrentCase();
380  LOGGER.log(Level.INFO, "Opened case {0}", caseForJob.getName());
381  }
382 
392  private String findAutFile(String caseDirectory) throws CaseActionException {
393  File caseFolder = Paths.get(caseDirectory).toFile();
394  if (caseFolder.exists()) {
395  /*
396  * Search for '*.aut' files.
397  */
398  File[] fileArray = caseFolder.listFiles();
399  if (fileArray == null) {
400  throw new CaseActionException("No files found in case directory");
401  }
402  String autFilePath = null;
403  for (File file : fileArray) {
404  String name = file.getName().toLowerCase();
405  if (autFilePath == null && name.endsWith(getFileExtension())) {
406  return file.getAbsolutePath();
407  }
408  }
409  throw new CaseActionException("No .aut files found in case directory");
410  }
411  throw new CaseActionException("Case directory was not found");
412  }
413 
434 
435  LOGGER.log(Level.INFO, "Adding data source {0} ", dataSource.getPath().toString());
436 
437  // Get an ordered list of data source processors to try
438  List<AutoIngestDataSourceProcessor> validDataSourceProcessors;
439  try {
440  validDataSourceProcessors = DataSourceProcessorUtility.getOrderedListOfDataSourceProcessors(dataSource.getPath());
442  LOGGER.log(Level.SEVERE, "Exception while determining best data source processor for {0}", dataSource.getPath());
443  // rethrow the exception.
444  throw ex;
445  }
446 
447  // did we find a data source processor that can process the data source
448  if (validDataSourceProcessors.isEmpty()) {
449  // This should never happen. We should add all unsupported data sources as logical files.
450  LOGGER.log(Level.SEVERE, "Unsupported data source {0}", dataSource.getPath()); // NON-NLS
451  return;
452  }
453 
455  synchronized (ingestLock) {
456  // Try each DSP in decreasing order of confidence
457  for (AutoIngestDataSourceProcessor selectedProcessor : validDataSourceProcessors) {
458  UUID taskId = UUID.randomUUID();
459  caseForJob.notifyAddingDataSource(taskId);
460  DataSourceProcessorCallback callBack = new AddDataSourceCallback(caseForJob, dataSource, taskId, ingestLock);
461  caseForJob.notifyAddingDataSource(taskId);
462  LOGGER.log(Level.INFO, "Identified data source type for {0} as {1}", new Object[]{dataSource.getPath(), selectedProcessor.getDataSourceType()});
463  selectedProcessor.process(dataSource.getDeviceId(), dataSource.getPath(), progressMonitor, callBack);
464  ingestLock.wait();
465 
466  // at this point we got the content object(s) from the current DSP.
467  // check whether the data source was processed successfully
468  if ((dataSource.getResultDataSourceProcessorResultCode() == CRITICAL_ERRORS)
469  || dataSource.getContent().isEmpty()) {
470  // move onto the the next DSP that can process this data source
471  logDataSourceProcessorResult(dataSource);
472  continue;
473  }
474 
475  logDataSourceProcessorResult(dataSource);
476  return;
477  }
478  // If we get to this point, none of the processors were successful
479  LOGGER.log(Level.SEVERE, "All data source processors failed to process {0}", dataSource.getPath());
480  // Throw an exception. It will get caught & handled upstream and will result in AIM auto-pause.
481  throw new AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException("Failed to process " + dataSource.getPath() + " with all data source processors");
482  }
483  }
484 
492 
494  if (null != resultCode) {
495  switch (resultCode) {
496  case NO_ERRORS:
497  LOGGER.log(Level.INFO, "Added data source to case");
498  if (dataSource.getContent().isEmpty()) {
499  LOGGER.log(Level.SEVERE, "Data source failed to produce content");
500  }
501  break;
502 
503  case NONCRITICAL_ERRORS:
504  for (String errorMessage : dataSource.getDataSourceProcessorErrorMessages()) {
505  LOGGER.log(Level.WARNING, "Non-critical error running data source processor for {0}: {1}", new Object[]{dataSource.getPath(), errorMessage});
506  }
507  LOGGER.log(Level.INFO, "Added data source to case");
508  if (dataSource.getContent().isEmpty()) {
509  LOGGER.log(Level.SEVERE, "Data source failed to produce content");
510  }
511  break;
512 
513  case CRITICAL_ERRORS:
514  for (String errorMessage : dataSource.getDataSourceProcessorErrorMessages()) {
515  LOGGER.log(Level.SEVERE, "Critical error running data source processor for {0}: {1}", new Object[]{dataSource.getPath(), errorMessage});
516  }
517  LOGGER.log(Level.SEVERE, "Failed to add data source to case");
518  break;
519  }
520  } else {
521  LOGGER.log(Level.WARNING, "No result code for data source processor for {0}", dataSource.getPath());
522  }
523  }
524 
542  private void analyze(AutoIngestDataSource dataSource, String ingestProfileName) throws AnalysisStartupException, InterruptedException {
543 
544  LOGGER.log(Level.INFO, "Starting ingest modules analysis for {0} ", dataSource.getPath());
545 
546  // configure ingest profile and file filter
547  IngestProfiles.IngestProfile selectedProfile = null;
548  FilesSet selectedFileSet = null;
549  if (!ingestProfileName.isEmpty()) {
550  selectedProfile = getSelectedProfile(ingestProfileName);
551  if (selectedProfile == null) {
552  // unable to find the user specified profile
553  LOGGER.log(Level.SEVERE, "Unable to find ingest profile: {0}. Ingest cancelled!", ingestProfileName);
554  System.err.println("Unable to find ingest profile: " + ingestProfileName + ". Ingest cancelled!");
555  return;
556  }
557 
558  // get FileSet filter associated with this profile
559  selectedFileSet = getSelectedFilter(selectedProfile.getFileIngestFilter());
560  if (selectedFileSet == null) {
561  // unable to find the user specified profile
562  LOGGER.log(Level.SEVERE, "Unable to find file filter {0} for ingest profile: {1}. Ingest cancelled!", new Object[]{selectedProfile.getFileIngestFilter(), ingestProfileName});
563  System.err.println("Unable to find file filter " + selectedProfile.getFileIngestFilter() + " for ingest profile: " + ingestProfileName + ". Ingest cancelled!");
564  return;
565  }
566  }
567 
568  IngestJobEventListener ingestJobEventListener = new IngestJobEventListener();
570  try {
571  synchronized (ingestLock) {
572  IngestJobSettings ingestJobSettings;
573  if (selectedProfile == null || selectedFileSet == null) {
574  // use baseline configuration
576  } else {
577  // load the custom ingest
578  ingestJobSettings = new IngestJobSettings(selectedProfile.toString());
579  ingestJobSettings.setFileFilter(selectedFileSet);
580  }
581 
582  List<String> settingsWarnings = ingestJobSettings.getWarnings();
583  if (settingsWarnings.isEmpty()) {
584  IngestJobStartResult ingestJobStartResult = IngestManager.getInstance().beginIngestJob(dataSource.getContent(), ingestJobSettings);
585  IngestJob ingestJob = ingestJobStartResult.getJob();
586  if (null != ingestJob) {
587  /*
588  * Block until notified by the ingest job event
589  * listener or until interrupted because auto ingest
590  * is shutting down.
591  */
592  ingestLock.wait();
593  LOGGER.log(Level.INFO, "Finished ingest modules analysis for {0} ", dataSource.getPath());
594  IngestJob.ProgressSnapshot jobSnapshot = ingestJob.getSnapshot();
595  for (IngestJob.ProgressSnapshot.DataSourceProcessingSnapshot snapshot : jobSnapshot.getDataSourceSnapshots()) {
596  if (!snapshot.isCancelled()) {
597  List<String> cancelledModules = snapshot.getCancelledDataSourceIngestModules();
598  if (!cancelledModules.isEmpty()) {
599  LOGGER.log(Level.WARNING, String.format("Ingest module(s) cancelled for %s", dataSource.getPath()));
600  for (String module : snapshot.getCancelledDataSourceIngestModules()) {
601  LOGGER.log(Level.WARNING, String.format("%s ingest module cancelled for %s", module, dataSource.getPath()));
602  }
603  }
604  LOGGER.log(Level.INFO, "Analysis of data source completed");
605  } else {
606  LOGGER.log(Level.WARNING, "Analysis of data source cancelled");
607  IngestJob.CancellationReason cancellationReason = snapshot.getCancellationReason();
608  if (IngestJob.CancellationReason.NOT_CANCELLED != cancellationReason && IngestJob.CancellationReason.USER_CANCELLED != cancellationReason) {
609  throw new AnalysisStartupException(String.format("Analysis cancelled due to %s for %s", cancellationReason.getDisplayName(), dataSource.getPath()));
610  }
611  }
612  }
613  } else if (!ingestJobStartResult.getModuleErrors().isEmpty()) {
614  for (IngestModuleError error : ingestJobStartResult.getModuleErrors()) {
615  LOGGER.log(Level.SEVERE, String.format("%s ingest module startup error for %s", error.getModuleDisplayName(), dataSource.getPath()), error.getThrowable());
616  }
617  LOGGER.log(Level.SEVERE, "Failed to analyze data source due to ingest job startup error");
618  throw new AnalysisStartupException(String.format("Error(s) during ingest module startup for %s", dataSource.getPath()));
619  } else {
620  LOGGER.log(Level.SEVERE, String.format("Ingest manager ingest job start error for %s", dataSource.getPath()), ingestJobStartResult.getStartupException());
621  throw new AnalysisStartupException("Ingest manager error starting job", ingestJobStartResult.getStartupException());
622  }
623  } else {
624  for (String warning : settingsWarnings) {
625  LOGGER.log(Level.SEVERE, "Ingest job settings error for {0}: {1}", new Object[]{dataSource.getPath(), warning});
626  }
627  LOGGER.log(Level.SEVERE, "Failed to analyze data source due to settings errors");
628  throw new AnalysisStartupException("Error(s) in ingest job settings");
629  }
630  }
631  } finally {
632  IngestManager.getInstance().removeIngestJobEventListener(ingestJobEventListener);
633  }
634  }
635 
644  private IngestProfiles.IngestProfile getSelectedProfile(String ingestProfileName) {
645 
646  IngestProfiles.IngestProfile selectedProfile = null;
647  // lookup the profile by name
649  if (profile.toString().equalsIgnoreCase(ingestProfileName)) {
650  // found the profile
651  selectedProfile = profile;
652  break;
653  }
654  }
655  return selectedProfile;
656  }
657 
666  private FilesSet getSelectedFilter(String filterName) {
667  try {
668  Map<String, FilesSet> fileIngestFilters = FilesSetsManager.getInstance()
671  fileIngestFilters.put(fSet.getName(), fSet);
672  }
673  return fileIngestFilters.get(filterName);
675  LOGGER.log(Level.SEVERE, "Failed to get file ingest filter: " + filterName, ex); //NON-NLS
676  return null;
677  }
678  }
679 
689  private Path createCaseFolderPath(Path caseFoldersPath, String caseName) {
690  String folderName = caseName + "_" + TimeStampUtils.createTimeStamp();
691  return Paths.get(caseFoldersPath.toString(), folderName);
692  }
693 
704  private Path findCaseDirectory(Path folderToSearch, String caseName) {
705  File searchFolder = new File(folderToSearch.toString());
706  if (!searchFolder.isDirectory()) {
707  return null;
708  }
709  Path caseFolderPath = null;
710  String[] candidateFolders = searchFolder.list(new CaseFolderFilter(caseName));
711  long mostRecentModified = 0;
712  for (String candidateFolder : candidateFolders) {
713  File file = new File(candidateFolder);
714  if (file.lastModified() >= mostRecentModified) {
715  mostRecentModified = file.lastModified();
716  caseFolderPath = Paths.get(folderToSearch.toString(), file.getPath());
717  }
718  }
719  return caseFolderPath;
720  }
721 
729  private String getOutputDirPath(Case caseForJob) {
730  return caseForJob.getCaseDirectory() + File.separator + LOG_DIR_NAME;
731  }
732 
742  private class IngestJobEventListener implements PropertyChangeListener {
743 
751  @Override
752  public void propertyChange(PropertyChangeEvent event) {
753  if (AutopsyEvent.SourceType.LOCAL == ((AutopsyEvent) event).getSourceType()) {
754  String eventType = event.getPropertyName();
755  if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
756  synchronized (ingestLock) {
757  ingestLock.notify();
758  }
759  }
760  }
761  }
762  };
763 
770 
776  @Override
777  public void setIndeterminate(final boolean indeterminate) {
778  }
779 
785  @Override
786  public void setProgress(final int progress) {
787  }
788 
794  @Override
795  public void setProgressText(final String text) {
796  }
797  }
798 
804  private final class AnalysisStartupException extends Exception {
805 
806  private static final long serialVersionUID = 1L;
807 
808  private AnalysisStartupException(String message) {
809  super(message);
810  }
811 
812  private AnalysisStartupException(String message, Throwable cause) {
813  super(message, cause);
814  }
815  }
816  }
817 
818  private static class CaseFolderFilter implements FilenameFilter {
819 
820  private final String caseName;
821  private final static String CASE_METADATA_EXT = CaseMetadata.getFileExtension();
822 
823  CaseFolderFilter(String caseName) {
824  this.caseName = caseName;
825  }
826 
827  @Override
828  public boolean accept(File folder, String fileName) {
829  File file = new File(folder, fileName);
830  if (fileName.length() > TimeStampUtils.getTimeStampLength() && file.isDirectory()) {
831  if (TimeStampUtils.endsWithTimeStamp(fileName)) {
832  if (null != caseName) {
833  String fileNamePrefix = fileName.substring(0, fileName.length() - TimeStampUtils.getTimeStampLength());
834  if (fileNamePrefix.equals(caseName)) {
835  return hasCaseMetadataFile(file);
836  }
837  } else {
838  return hasCaseMetadataFile(file);
839  }
840  }
841  }
842  return false;
843  }
844 
853  private static boolean hasCaseMetadataFile(File folder) {
854  for (File file : folder.listFiles()) {
855  if (file.getName().toLowerCase().endsWith(CASE_METADATA_EXT) && file.isFile()) {
856  return true;
857  }
858  }
859  return false;
860  }
861  }
862 
863 }
static List< AutoIngestDataSourceProcessor > getOrderedListOfDataSourceProcessors(Path dataSourcePath)
static synchronized IngestManager getInstance()
static void createCaseDirectory(String caseDirPath, CaseType caseType)
Definition: Case.java:945
IngestJobStartResult beginIngestJob(Collection< Content > dataSources, IngestJobSettings settings)
synchronized DataSourceProcessorResult getResultDataSourceProcessorResultCode()
static final Set< IngestManager.IngestJobEvent > INGEST_JOB_EVENTS_OF_INTEREST
static boolean endsWithTimeStamp(String inputString)
void removeIngestJobEventListener(final PropertyChangeListener listener)
void addIngestJobEventListener(final PropertyChangeListener listener)
static synchronized List< IngestProfile > getIngestProfiles()
static void openAsCurrentCase(String caseMetadataFilePath)
Definition: Case.java:630
void analyze(AutoIngestDataSource dataSource, String ingestProfileName)
static synchronized void setRunningWithGUI(boolean runningWithGUI)
synchronized void setDataSourceProcessorOutput(DataSourceProcessorResult result, List< String > errorMessages, List< Content > content)
synchronized static Logger getLogger(String name)
Definition: Logger.java:124
void notifyAddingDataSource(UUID eventId)
Definition: Case.java:1499
static void createAsCurrentCase(String caseDir, String caseDisplayName, String caseNumber, String examiner, CaseType caseType)
Definition: Case.java:576
IngestManager.IngestManagerException getStartupException()

Copyright © 2012-2019 Basis Technology. Generated on: Tue Jan 7 2020
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.