Autopsy  4.15.0
Graphical digital forensics platform for The Sleuth Kit and other tools.
CommandLineIngestManager.java
Go to the documentation of this file.
1 /*
2  * Autopsy Forensic Browser
3  *
4  * Copyright 2019-2019 Basis Technology Corp.
5  * Contact: carrier <at> sleuthkit <dot> org
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  * http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19 package org.sleuthkit.autopsy.commandlineingest;
20 
21 import java.beans.PropertyChangeEvent;
22 import java.beans.PropertyChangeListener;
23 import java.io.File;
24 import java.io.FilenameFilter;
25 import java.nio.file.Path;
26 import java.nio.file.Paths;
27 import java.util.ArrayList;
28 import java.util.Arrays;
29 import java.util.List;
30 import java.util.UUID;
31 import java.util.Collection;
32 import java.util.EnumSet;
33 import java.util.Iterator;
34 import java.util.Map;
35 import java.util.Set;
36 import java.util.logging.Level;
37 import org.netbeans.spi.sendopts.OptionProcessor;
38 import org.openide.LifecycleManager;
39 import org.openide.util.Lookup;
69 import org.sleuthkit.datamodel.Content;
70 import org.sleuthkit.datamodel.TskCoreException;
71 
78 
79  private static final Logger LOGGER = Logger.getLogger(CommandLineIngestManager.class.getName());
81  private Case caseForJob = null;
83  private static final String LOG_DIR_NAME = "Command Output";
84 
86  }
87 
88  public void start() {
89  new Thread(new JobProcessingTask()).start();
90  }
91 
92  public void stop() {
93  try {
94  // close current case if there is one open
96  } catch (CaseActionException ex) {
97  LOGGER.log(Level.WARNING, "Unable to close the case while shutting down command line ingest manager", ex); //NON-NLS
98  }
99 
100  // shut down Autopsy
101  LifecycleManager.getDefault().exit();
102  }
103 
104  private final class JobProcessingTask implements Runnable {
105 
106  private final Object ingestLock;
107 
108  private JobProcessingTask() {
109  ingestLock = new Object();
110  try {
112  LOGGER.log(Level.INFO, "Set running with desktop GUI runtime property to false");
114  LOGGER.log(Level.SEVERE, "Failed to set running with desktop GUI runtime property to false", ex);
115  }
116  }
117 
122  @Override
123  public void run() {
124  LOGGER.log(Level.INFO, "Job processing task started");
125 
126  try {
127  // read command line inputs
128  LOGGER.log(Level.INFO, "Autopsy is running from command line"); //NON-NLS
129  List<CommandLineCommand> commands = null;
130 
131  // first look up all OptionProcessors and get input data from CommandLineOptionProcessor
132  Collection<? extends OptionProcessor> optionProcessors = Lookup.getDefault().lookupAll(OptionProcessor.class);
133  Iterator<? extends OptionProcessor> optionsIterator = optionProcessors.iterator();
134  while (optionsIterator.hasNext()) {
135  // find CommandLineOptionProcessor
136  OptionProcessor processor = optionsIterator.next();
137  if (processor instanceof CommandLineOptionProcessor) {
138  // check if we are running from command line
139  commands = ((CommandLineOptionProcessor) processor).getCommands();
140  }
141  }
142 
143  if (commands == null || commands.isEmpty()) {
144  LOGGER.log(Level.SEVERE, "No command line commands specified");
145  System.err.println("No command line commands specified");
146  return;
147  }
148 
149  try {
150  // Commands are already stored in order in which they should be executed
151  for (CommandLineCommand command : commands) {
152  CommandLineCommand.CommandType type = command.getType();
153  switch (type) {
154  case CREATE_CASE:
155  try {
156  LOGGER.log(Level.INFO, "Processing 'Create Case' command");
157  System.out.println("Processing 'Create Case' command");
158  Map<String, String> inputs = command.getInputs();
159  String baseCaseName = inputs.get(CommandLineCommand.InputType.CASE_NAME.name());
160  String rootOutputDirectory = inputs.get(CommandLineCommand.InputType.CASES_BASE_DIR_PATH.name());
162  String caseTypeString = inputs.get(CommandLineCommand.InputType.CASE_TYPE.name());
163  if (caseTypeString != null && caseTypeString.equalsIgnoreCase(CommandLineOptionProcessor.CASETYPE_MULTI)) {
164  caseType = CaseType.MULTI_USER_CASE;
165  }
166  openCase(baseCaseName, rootOutputDirectory, caseType);
167 
168  String outputDirPath = getOutputDirPath(caseForJob);
169  OutputGenerator.saveCreateCaseOutput(caseForJob, outputDirPath, baseCaseName);
170  } catch (CaseActionException ex) {
171  String baseCaseName = command.getInputs().get(CommandLineCommand.InputType.CASE_NAME.name());
172  LOGGER.log(Level.SEVERE, "Error creating or opening case " + baseCaseName, ex);
173  System.err.println("Error creating or opening case " + baseCaseName);
174  // Do not process any other commands
175  return;
176  }
177  break;
178  case ADD_DATA_SOURCE:
179  try {
180  LOGGER.log(Level.INFO, "Processing 'Add Data Source' command");
181  System.out.println("Processing 'Add Data Source' command");
182  Map<String, String> inputs = command.getInputs();
183 
184  // open the case, if it hasn't been already opened by CREATE_CASE command
185  if (caseForJob == null) {
186  String caseDirPath = inputs.get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
187  openCase(caseDirPath);
188  }
189 
190  String dataSourcePath = inputs.get(CommandLineCommand.InputType.DATA_SOURCE_PATH.name());
191  dataSource = new AutoIngestDataSource("", Paths.get(dataSourcePath));
192  runDataSourceProcessor(caseForJob, dataSource);
193 
194  String outputDirPath = getOutputDirPath(caseForJob);
195  OutputGenerator.saveAddDataSourceOutput(caseForJob, dataSource, outputDirPath);
197  String dataSourcePath = command.getInputs().get(CommandLineCommand.InputType.DATA_SOURCE_PATH.name());
198  LOGGER.log(Level.SEVERE, "Error adding data source " + dataSourcePath, ex);
199  System.err.println("Error adding data source " + dataSourcePath);
200  // Do not process any other commands
201  return;
202  }
203  break;
204  case RUN_INGEST:
205  try {
206  LOGGER.log(Level.INFO, "Processing 'Run Ingest' command");
207  System.out.println("Processing 'Run Ingest' command");
208  Map<String, String> inputs = command.getInputs();
209 
210  // open the case, if it hasn't been already opened by CREATE_CASE or ADD_DATA_SOURCE commands
211  if (caseForJob == null) {
212  String caseDirPath = inputs.get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
213  openCase(caseDirPath);
214  }
215 
216  // populate the AutoIngestDataSource structure, if that hasn't been done by ADD_DATA_SOURCE command
217  if (dataSource == null) {
218 
219  String dataSourceId = inputs.get(CommandLineCommand.InputType.DATA_SOURCE_ID.name());
220  Long dataSourceObjId = Long.valueOf(dataSourceId);
221 
222  // get Content object for the data source
223  Content content = null;
224  try {
225  content = Case.getCurrentCaseThrows().getSleuthkitCase().getContentById(dataSourceObjId);
226  } catch (TskCoreException ex) {
227  LOGGER.log(Level.SEVERE, "Exception while trying to find data source with object ID " + dataSourceId, ex);
228  System.err.println("Exception while trying to find data source with object ID " + dataSourceId);
229  // Do not process any other commands
230  return;
231  }
232 
233  if (content == null) {
234  LOGGER.log(Level.SEVERE, "Unable to find data source with object ID {0}", dataSourceId);
235  System.out.println("Unable to find data source with object ID " + dataSourceId);
236  // Do not process any other commands
237  return;
238  }
239 
240  // populate the AutoIngestDataSource structure
241  dataSource = new AutoIngestDataSource("", Paths.get(content.getName()));
242  List<Content> contentList = Arrays.asList(new Content[]{content});
243  List<String> errorList = new ArrayList<>();
244  dataSource.setDataSourceProcessorOutput(NO_ERRORS, errorList, contentList);
245  }
246 
247  // run ingest
248  String ingestProfile = inputs.get(CommandLineCommand.InputType.INGEST_PROFILE_NAME.name());
249  analyze(dataSource, ingestProfile);
250  } catch (InterruptedException | CaseActionException ex) {
251  String dataSourcePath = command.getInputs().get(CommandLineCommand.InputType.DATA_SOURCE_PATH.name());
252  LOGGER.log(Level.SEVERE, "Error running ingest on data source " + dataSourcePath, ex);
253  System.err.println("Error running ingest on data source " + dataSourcePath);
254  // Do not process any other commands
255  return;
256  }
257  break;
258 
259  case LIST_ALL_DATA_SOURCES:
260  try {
261  LOGGER.log(Level.INFO, "Processing 'List All Data Sources' command");
262  System.out.println("Processing 'List All Data Sources' command");
263  Map<String, String> inputs = command.getInputs();
264 
265  // open the case, if it hasn't been already opened by previous command
266  if (caseForJob == null) {
267  String caseDirPath = inputs.get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
268  openCase(caseDirPath);
269  }
270 
271  String outputDirPath = getOutputDirPath(caseForJob);
272  OutputGenerator.listAllDataSources(caseForJob, outputDirPath);
273  } catch (CaseActionException ex) {
274  String caseDirPath = command.getInputs().get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
275  LOGGER.log(Level.SEVERE, "Error opening case in case directory: " + caseDirPath, ex);
276  System.err.println("Error opening case in case directory: " + caseDirPath);
277  // Do not process any other commands
278  return;
279  }
280  break;
281 
282  case GENERATE_REPORTS:
283  try {
284  LOGGER.log(Level.INFO, "Processing 'Generate Reports' command");
285  System.out.println("Processing 'Generate Reports' command");
286  Map<String, String> inputs = command.getInputs();
287 
288  // open the case, if it hasn't been already opened by previous command
289  if (caseForJob == null) {
290  String caseDirPath = inputs.get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
291  openCase(caseDirPath);
292  }
293 
294  // generate reports
297  generator.generateReports();
298  } catch (CaseActionException ex) {
299  String caseDirPath = command.getInputs().get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
300  LOGGER.log(Level.SEVERE, "Error opening case in case directory: " + caseDirPath, ex);
301  System.err.println("Error opening case in case directory: " + caseDirPath);
302  // Do not process any other commands
303  return;
304  }
305  break;
306  default:
307  break;
308  }
309  }
310  } catch (Throwable ex) {
311  /*
312  * Unexpected runtime exceptions firewall. This task is
313  * designed to be able to be run in an executor service
314  * thread pool without calling get() on the task's
315  * Future<Void>, so this ensures that such errors get
316  * logged.
317  */
318  LOGGER.log(Level.SEVERE, "Unexpected error", ex);
319  System.err.println("Unexpected error. Exiting...");
320 
321  } finally {
322  try {
324  } catch (CaseActionException ex) {
325  LOGGER.log(Level.WARNING, "Exception while closing case", ex);
326  System.err.println("Exception while closing case");
327  }
328  }
329 
330  } finally {
331  LOGGER.log(Level.INFO, "Job processing task finished");
332  System.out.println("Job processing task finished");
333 
334  // shut down Autopsy
335  stop();
336  }
337  }
338 
350  private void openCase(String baseCaseName, String rootOutputDirectory, CaseType caseType) throws CaseActionException {
351 
352  LOGGER.log(Level.INFO, "Opening case {0} in directory {1}", new Object[]{baseCaseName, rootOutputDirectory});
353  Path caseDirectoryPath = findCaseDirectory(Paths.get(rootOutputDirectory), baseCaseName);
354  if (null != caseDirectoryPath) {
355  // found an existing case directory for same case name. the input case name must be unique. Exit.
356  LOGGER.log(Level.SEVERE, "Case {0} already exists. Case name must be unique. Exiting", baseCaseName);
357  throw new CaseActionException("Case " + baseCaseName + " already exists. Case name must be unique. Exiting");
358  } else {
359  caseDirectoryPath = createCaseFolderPath(Paths.get(rootOutputDirectory), baseCaseName);
360 
361  // Create the case directory
362  Case.createCaseDirectory(caseDirectoryPath.toString(), Case.CaseType.SINGLE_USER_CASE);
363 
364  CaseDetails caseDetails = new CaseDetails(baseCaseName);
365  Case.createAsCurrentCase(caseType, caseDirectoryPath.toString(), caseDetails);
366  }
367 
368  caseForJob = Case.getCurrentCase();
369  LOGGER.log(Level.INFO, "Opened case {0}", caseForJob.getName());
370  }
371 
379  private void openCase(String caseFolderPath) throws CaseActionException {
380 
381  LOGGER.log(Level.INFO, "Opening case in directory {0}", caseFolderPath);
382 
383  String metadataFilePath = findAutFile(caseFolderPath);
384  Case.openAsCurrentCase(metadataFilePath);
385 
386  caseForJob = Case.getCurrentCase();
387  LOGGER.log(Level.INFO, "Opened case {0}", caseForJob.getName());
388  }
389 
399  private String findAutFile(String caseDirectory) throws CaseActionException {
400  File caseFolder = Paths.get(caseDirectory).toFile();
401  if (caseFolder.exists()) {
402  /*
403  * Search for '*.aut' files.
404  */
405  File[] fileArray = caseFolder.listFiles();
406  if (fileArray == null) {
407  throw new CaseActionException("No files found in case directory");
408  }
409  String autFilePath = null;
410  for (File file : fileArray) {
411  String name = file.getName().toLowerCase();
412  if (autFilePath == null && name.endsWith(getFileExtension())) {
413  return file.getAbsolutePath();
414  }
415  }
416  throw new CaseActionException("No .aut files found in case directory");
417  }
418  throw new CaseActionException("Case directory was not found");
419  }
420 
441 
442  LOGGER.log(Level.INFO, "Adding data source {0} ", dataSource.getPath().toString());
443 
444  // Get an ordered list of data source processors to try
445  List<AutoIngestDataSourceProcessor> validDataSourceProcessors;
446  try {
447  validDataSourceProcessors = DataSourceProcessorUtility.getOrderedListOfDataSourceProcessors(dataSource.getPath());
449  LOGGER.log(Level.SEVERE, "Exception while determining best data source processor for {0}", dataSource.getPath());
450  // rethrow the exception.
451  throw ex;
452  }
453 
454  // did we find a data source processor that can process the data source
455  if (validDataSourceProcessors.isEmpty()) {
456  // This should never happen. We should add all unsupported data sources as logical files.
457  LOGGER.log(Level.SEVERE, "Unsupported data source {0}", dataSource.getPath()); // NON-NLS
458  return;
459  }
460 
462  synchronized (ingestLock) {
463  // Try each DSP in decreasing order of confidence
464  for (AutoIngestDataSourceProcessor selectedProcessor : validDataSourceProcessors) {
465  UUID taskId = UUID.randomUUID();
466  caseForJob.notifyAddingDataSource(taskId);
467  DataSourceProcessorCallback callBack = new AddDataSourceCallback(caseForJob, dataSource, taskId, ingestLock);
468  caseForJob.notifyAddingDataSource(taskId);
469  LOGGER.log(Level.INFO, "Identified data source type for {0} as {1}", new Object[]{dataSource.getPath(), selectedProcessor.getDataSourceType()});
470  selectedProcessor.process(dataSource.getDeviceId(), dataSource.getPath(), progressMonitor, callBack);
471  ingestLock.wait();
472 
473  // at this point we got the content object(s) from the current DSP.
474  // check whether the data source was processed successfully
475  if ((dataSource.getResultDataSourceProcessorResultCode() == CRITICAL_ERRORS)
476  || dataSource.getContent().isEmpty()) {
477  // move onto the the next DSP that can process this data source
478  logDataSourceProcessorResult(dataSource);
479  continue;
480  }
481 
482  logDataSourceProcessorResult(dataSource);
483  return;
484  }
485  // If we get to this point, none of the processors were successful
486  LOGGER.log(Level.SEVERE, "All data source processors failed to process {0}", dataSource.getPath());
487  // Throw an exception. It will get caught & handled upstream and will result in AIM auto-pause.
488  throw new AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException("Failed to process " + dataSource.getPath() + " with all data source processors");
489  }
490  }
491 
499 
501  if (null != resultCode) {
502  switch (resultCode) {
503  case NO_ERRORS:
504  LOGGER.log(Level.INFO, "Added data source to case");
505  if (dataSource.getContent().isEmpty()) {
506  LOGGER.log(Level.SEVERE, "Data source failed to produce content");
507  }
508  break;
509 
510  case NONCRITICAL_ERRORS:
511  for (String errorMessage : dataSource.getDataSourceProcessorErrorMessages()) {
512  LOGGER.log(Level.WARNING, "Non-critical error running data source processor for {0}: {1}", new Object[]{dataSource.getPath(), errorMessage});
513  }
514  LOGGER.log(Level.INFO, "Added data source to case");
515  if (dataSource.getContent().isEmpty()) {
516  LOGGER.log(Level.SEVERE, "Data source failed to produce content");
517  }
518  break;
519 
520  case CRITICAL_ERRORS:
521  for (String errorMessage : dataSource.getDataSourceProcessorErrorMessages()) {
522  LOGGER.log(Level.SEVERE, "Critical error running data source processor for {0}: {1}", new Object[]{dataSource.getPath(), errorMessage});
523  }
524  LOGGER.log(Level.SEVERE, "Failed to add data source to case");
525  break;
526  }
527  } else {
528  LOGGER.log(Level.WARNING, "No result code for data source processor for {0}", dataSource.getPath());
529  }
530  }
531 
549  private void analyze(AutoIngestDataSource dataSource, String ingestProfileName) throws AnalysisStartupException, InterruptedException {
550 
551  LOGGER.log(Level.INFO, "Starting ingest modules analysis for {0} ", dataSource.getPath());
552 
553  // configure ingest profile and file filter
554  IngestProfiles.IngestProfile selectedProfile = null;
555  FilesSet selectedFileSet = null;
556  if (!ingestProfileName.isEmpty()) {
557  selectedProfile = getSelectedProfile(ingestProfileName);
558  if (selectedProfile == null) {
559  // unable to find the user specified profile
560  LOGGER.log(Level.SEVERE, "Unable to find ingest profile: {0}. Ingest cancelled!", ingestProfileName);
561  System.err.println("Unable to find ingest profile: " + ingestProfileName + ". Ingest cancelled!");
562  return;
563  }
564 
565  // get FileSet filter associated with this profile
566  selectedFileSet = getSelectedFilter(selectedProfile.getFileIngestFilter());
567  if (selectedFileSet == null) {
568  // unable to find the user specified profile
569  LOGGER.log(Level.SEVERE, "Unable to find file filter {0} for ingest profile: {1}. Ingest cancelled!", new Object[]{selectedProfile.getFileIngestFilter(), ingestProfileName});
570  System.err.println("Unable to find file filter " + selectedProfile.getFileIngestFilter() + " for ingest profile: " + ingestProfileName + ". Ingest cancelled!");
571  return;
572  }
573  }
574 
575  IngestJobEventListener ingestJobEventListener = new IngestJobEventListener();
577  try {
578  synchronized (ingestLock) {
579  IngestJobSettings ingestJobSettings;
580  if (selectedProfile == null || selectedFileSet == null) {
581  // use baseline configuration
583  } else {
584  // load the custom ingest
585  ingestJobSettings = new IngestJobSettings(selectedProfile.toString());
586  ingestJobSettings.setFileFilter(selectedFileSet);
587  }
588 
589  List<String> settingsWarnings = ingestJobSettings.getWarnings();
590  if (settingsWarnings.isEmpty()) {
591  IngestJobStartResult ingestJobStartResult = IngestManager.getInstance().beginIngestJob(dataSource.getContent(), ingestJobSettings);
592  IngestJob ingestJob = ingestJobStartResult.getJob();
593  if (null != ingestJob) {
594  /*
595  * Block until notified by the ingest job event
596  * listener or until interrupted because auto ingest
597  * is shutting down.
598  */
599  ingestLock.wait();
600  LOGGER.log(Level.INFO, "Finished ingest modules analysis for {0} ", dataSource.getPath());
601  IngestJob.ProgressSnapshot jobSnapshot = ingestJob.getSnapshot();
602  for (IngestJob.ProgressSnapshot.DataSourceProcessingSnapshot snapshot : jobSnapshot.getDataSourceSnapshots()) {
603  if (!snapshot.isCancelled()) {
604  List<String> cancelledModules = snapshot.getCancelledDataSourceIngestModules();
605  if (!cancelledModules.isEmpty()) {
606  LOGGER.log(Level.WARNING, String.format("Ingest module(s) cancelled for %s", dataSource.getPath()));
607  for (String module : snapshot.getCancelledDataSourceIngestModules()) {
608  LOGGER.log(Level.WARNING, String.format("%s ingest module cancelled for %s", module, dataSource.getPath()));
609  }
610  }
611  LOGGER.log(Level.INFO, "Analysis of data source completed");
612  } else {
613  LOGGER.log(Level.WARNING, "Analysis of data source cancelled");
614  IngestJob.CancellationReason cancellationReason = snapshot.getCancellationReason();
615  if (IngestJob.CancellationReason.NOT_CANCELLED != cancellationReason && IngestJob.CancellationReason.USER_CANCELLED != cancellationReason) {
616  throw new AnalysisStartupException(String.format("Analysis cancelled due to %s for %s", cancellationReason.getDisplayName(), dataSource.getPath()));
617  }
618  }
619  }
620  } else if (!ingestJobStartResult.getModuleErrors().isEmpty()) {
621  for (IngestModuleError error : ingestJobStartResult.getModuleErrors()) {
622  LOGGER.log(Level.SEVERE, String.format("%s ingest module startup error for %s", error.getModuleDisplayName(), dataSource.getPath()), error.getThrowable());
623  }
624  LOGGER.log(Level.SEVERE, "Failed to analyze data source due to ingest job startup error");
625  throw new AnalysisStartupException(String.format("Error(s) during ingest module startup for %s", dataSource.getPath()));
626  } else {
627  LOGGER.log(Level.SEVERE, String.format("Ingest manager ingest job start error for %s", dataSource.getPath()), ingestJobStartResult.getStartupException());
628  throw new AnalysisStartupException("Ingest manager error starting job", ingestJobStartResult.getStartupException());
629  }
630  } else {
631  for (String warning : settingsWarnings) {
632  LOGGER.log(Level.SEVERE, "Ingest job settings error for {0}: {1}", new Object[]{dataSource.getPath(), warning});
633  }
634  LOGGER.log(Level.SEVERE, "Failed to analyze data source due to settings errors");
635  throw new AnalysisStartupException("Error(s) in ingest job settings");
636  }
637  }
638  } finally {
639  IngestManager.getInstance().removeIngestJobEventListener(ingestJobEventListener);
640  }
641  }
642 
651  private IngestProfiles.IngestProfile getSelectedProfile(String ingestProfileName) {
652 
653  IngestProfiles.IngestProfile selectedProfile = null;
654  // lookup the profile by name
656  if (profile.toString().equalsIgnoreCase(ingestProfileName)) {
657  // found the profile
658  selectedProfile = profile;
659  break;
660  }
661  }
662  return selectedProfile;
663  }
664 
673  private FilesSet getSelectedFilter(String filterName) {
674  try {
675  Map<String, FilesSet> fileIngestFilters = FilesSetsManager.getInstance()
678  fileIngestFilters.put(fSet.getName(), fSet);
679  }
680  return fileIngestFilters.get(filterName);
682  LOGGER.log(Level.SEVERE, "Failed to get file ingest filter: " + filterName, ex); //NON-NLS
683  return null;
684  }
685  }
686 
696  private Path createCaseFolderPath(Path caseFoldersPath, String caseName) {
697  String folderName = caseName + "_" + TimeStampUtils.createTimeStamp();
698  return Paths.get(caseFoldersPath.toString(), folderName);
699  }
700 
711  private Path findCaseDirectory(Path folderToSearch, String caseName) {
712  File searchFolder = new File(folderToSearch.toString());
713  if (!searchFolder.isDirectory()) {
714  return null;
715  }
716  Path caseFolderPath = null;
717  String[] candidateFolders = searchFolder.list(new CaseFolderFilter(caseName));
718  long mostRecentModified = 0;
719  for (String candidateFolder : candidateFolders) {
720  File file = new File(candidateFolder);
721  if (file.lastModified() >= mostRecentModified) {
722  mostRecentModified = file.lastModified();
723  caseFolderPath = Paths.get(folderToSearch.toString(), file.getPath());
724  }
725  }
726  return caseFolderPath;
727  }
728 
736  private String getOutputDirPath(Case caseForJob) {
737  return caseForJob.getCaseDirectory() + File.separator + LOG_DIR_NAME;
738  }
739 
749  private class IngestJobEventListener implements PropertyChangeListener {
750 
758  @Override
759  public void propertyChange(PropertyChangeEvent event) {
760  if (AutopsyEvent.SourceType.LOCAL == ((AutopsyEvent) event).getSourceType()) {
761  String eventType = event.getPropertyName();
762  if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
763  synchronized (ingestLock) {
764  ingestLock.notify();
765  }
766  }
767  }
768  }
769  };
770 
777 
783  @Override
784  public void setIndeterminate(final boolean indeterminate) {
785  }
786 
792  @Override
793  public void setProgress(final int progress) {
794  }
795 
801  @Override
802  public void setProgressText(final String text) {
803  }
804  }
805 
811  private final class AnalysisStartupException extends Exception {
812 
813  private static final long serialVersionUID = 1L;
814 
815  private AnalysisStartupException(String message) {
816  super(message);
817  }
818 
819  private AnalysisStartupException(String message, Throwable cause) {
820  super(message, cause);
821  }
822  }
823  }
824 
825  private static class CaseFolderFilter implements FilenameFilter {
826 
827  private final String caseName;
828  private final static String CASE_METADATA_EXT = CaseMetadata.getFileExtension();
829 
830  CaseFolderFilter(String caseName) {
831  this.caseName = caseName;
832  }
833 
834  @Override
835  public boolean accept(File folder, String fileName) {
836  File file = new File(folder, fileName);
837  if (fileName.length() > TimeStampUtils.getTimeStampLength() && file.isDirectory()) {
838  if (TimeStampUtils.endsWithTimeStamp(fileName)) {
839  if (null != caseName) {
840  String fileNamePrefix = fileName.substring(0, fileName.length() - TimeStampUtils.getTimeStampLength());
841  if (fileNamePrefix.equals(caseName)) {
842  return hasCaseMetadataFile(file);
843  }
844  } else {
845  return hasCaseMetadataFile(file);
846  }
847  }
848  }
849  return false;
850  }
851 
860  private static boolean hasCaseMetadataFile(File folder) {
861  for (File file : folder.listFiles()) {
862  if (file.getName().toLowerCase().endsWith(CASE_METADATA_EXT) && file.isFile()) {
863  return true;
864  }
865  }
866  return false;
867  }
868  }
869 
870 }
static List< AutoIngestDataSourceProcessor > getOrderedListOfDataSourceProcessors(Path dataSourcePath)
void openCase(String baseCaseName, String rootOutputDirectory, CaseType caseType)
static synchronized IngestManager getInstance()
static void createCaseDirectory(String caseDirPath, CaseType caseType)
Definition: Case.java:950
IngestJobStartResult beginIngestJob(Collection< Content > dataSources, IngestJobSettings settings)
synchronized DataSourceProcessorResult getResultDataSourceProcessorResultCode()
static final Set< IngestManager.IngestJobEvent > INGEST_JOB_EVENTS_OF_INTEREST
static boolean endsWithTimeStamp(String inputString)
void removeIngestJobEventListener(final PropertyChangeListener listener)
void addIngestJobEventListener(final PropertyChangeListener listener)
static synchronized List< IngestProfile > getIngestProfiles()
static void openAsCurrentCase(String caseMetadataFilePath)
Definition: Case.java:635
void analyze(AutoIngestDataSource dataSource, String ingestProfileName)
static synchronized void setRunningWithGUI(boolean runningWithGUI)
synchronized void setDataSourceProcessorOutput(DataSourceProcessorResult result, List< String > errorMessages, List< Content > content)
synchronized static Logger getLogger(String name)
Definition: Logger.java:124
void notifyAddingDataSource(UUID eventId)
Definition: Case.java:1504
static void createAsCurrentCase(String caseDir, String caseDisplayName, String caseNumber, String examiner, CaseType caseType)
Definition: Case.java:581
IngestManager.IngestManagerException getStartupException()

Copyright © 2012-2020 Basis Technology. Generated on: Mon Jul 6 2020
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.