Autopsy  4.19.1
Graphical digital forensics platform for The Sleuth Kit and other tools.
CommandLineIngestManager.java
Go to the documentation of this file.
1 /*
2  * Autopsy Forensic Browser
3  *
4  * Copyright 2019-2021 Basis Technology Corp.
5  * Contact: carrier <at> sleuthkit <dot> org
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  * http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19 package org.sleuthkit.autopsy.commandlineingest;
20 
21 import java.beans.PropertyChangeEvent;
22 import java.beans.PropertyChangeListener;
23 import java.io.File;
24 import java.io.FilenameFilter;
25 import java.nio.file.Path;
26 import java.nio.file.Paths;
27 import java.util.ArrayList;
28 import java.util.Arrays;
29 import java.util.List;
30 import java.util.UUID;
31 import java.util.Collection;
32 import java.util.EnumSet;
33 import java.util.Iterator;
34 import java.util.Map;
35 import java.util.Set;
36 import java.util.logging.Level;
37 import org.netbeans.spi.sendopts.OptionProcessor;
38 import org.openide.LifecycleManager;
39 import org.openide.util.Lookup;
67 import org.sleuthkit.datamodel.Content;
68 import org.sleuthkit.datamodel.TskCoreException;
69 
75 public class CommandLineIngestManager extends CommandLineManager {
76 
77  private static final Logger LOGGER = Logger.getLogger(CommandLineIngestManager.class.getName());
79  private Case caseForJob = null;
81  private static final String LOG_DIR_NAME = "Command Output";
82 
84  }
85 
86  public void start() {
87  new Thread(new JobProcessingTask()).start();
88  }
89 
90  public void stop() {
91  try {
92  // close current case if there is one open
94  } catch (CaseActionException ex) {
95  LOGGER.log(Level.WARNING, "Unable to close the case while shutting down command line ingest manager", ex); //NON-NLS
96  }
97 
98  // shut down Autopsy
99  LifecycleManager.getDefault().exit();
100  }
101 
102  private final class JobProcessingTask implements Runnable {
103 
104  private final Object ingestLock;
105 
106  private JobProcessingTask() {
107  ingestLock = new Object();
108  try {
110  LOGGER.log(Level.INFO, "Set running with desktop GUI runtime property to false");
112  LOGGER.log(Level.SEVERE, "Failed to set running with desktop GUI runtime property to false", ex);
113  }
114  }
115 
120  @Override
121  public void run() {
122  LOGGER.log(Level.INFO, "Job processing task started");
123 
124  try {
125  // read command line inputs
126  LOGGER.log(Level.INFO, "Autopsy is running from command line"); //NON-NLS
127  List<CommandLineCommand> commands = null;
128 
129  // first look up all OptionProcessors and get input data from CommandLineOptionProcessor
130  Collection<? extends OptionProcessor> optionProcessors = Lookup.getDefault().lookupAll(OptionProcessor.class);
131  Iterator<? extends OptionProcessor> optionsIterator = optionProcessors.iterator();
132  while (optionsIterator.hasNext()) {
133  // find CommandLineOptionProcessor
134  OptionProcessor processor = optionsIterator.next();
135  if (processor instanceof CommandLineOptionProcessor) {
136  // check if we are running from command line
137  commands = ((CommandLineOptionProcessor) processor).getCommands();
138  }
139  }
140 
141  if (commands == null || commands.isEmpty()) {
142  LOGGER.log(Level.SEVERE, "No command line commands specified");
143  System.out.println("No command line commands specified");
144  return;
145  }
146 
147  try {
148  // Commands are already stored in order in which they should be executed
149  for (CommandLineCommand command : commands) {
150  CommandLineCommand.CommandType type = command.getType();
151  switch (type) {
152  case CREATE_CASE:
153  try {
154  LOGGER.log(Level.INFO, "Processing 'Create Case' command");
155  System.out.println("Processing 'Create Case' command");
156  Map<String, String> inputs = command.getInputs();
157  String baseCaseName = inputs.get(CommandLineCommand.InputType.CASE_NAME.name());
158  String rootOutputDirectory = inputs.get(CommandLineCommand.InputType.CASES_BASE_DIR_PATH.name());
160  String caseTypeString = inputs.get(CommandLineCommand.InputType.CASE_TYPE.name());
161  if (caseTypeString != null && caseTypeString.equalsIgnoreCase(CommandLineOptionProcessor.CASETYPE_MULTI)) {
162  caseType = CaseType.MULTI_USER_CASE;
163  }
164  openCase(baseCaseName, rootOutputDirectory, caseType);
165 
166  String outputDirPath = getOutputDirPath(caseForJob);
167  OutputGenerator.saveCreateCaseOutput(caseForJob, outputDirPath, baseCaseName);
168  } catch (CaseActionException ex) {
169  String baseCaseName = command.getInputs().get(CommandLineCommand.InputType.CASE_NAME.name());
170  LOGGER.log(Level.SEVERE, "Error creating or opening case " + baseCaseName, ex);
171  System.out.println("Error creating or opening case " + baseCaseName);
172  // Do not process any other commands
173  return;
174  }
175  break;
176  case ADD_DATA_SOURCE:
177  try {
178  LOGGER.log(Level.INFO, "Processing 'Add Data Source' command");
179  System.out.println("Processing 'Add Data Source' command");
180  Map<String, String> inputs = command.getInputs();
181 
182  // open the case, if it hasn't been already opened by CREATE_CASE command
183  if (caseForJob == null) {
184  String caseDirPath = inputs.get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
185  caseForJob = CommandLineIngestManager.this.openCase(caseDirPath);
186  }
187 
188  String dataSourcePath = inputs.get(CommandLineCommand.InputType.DATA_SOURCE_PATH.name());
189  dataSource = new AutoIngestDataSource(UUID.randomUUID().toString(), Paths.get(dataSourcePath));
190  runDataSourceProcessor(caseForJob, dataSource);
191 
192  String outputDirPath = getOutputDirPath(caseForJob);
193  OutputGenerator.saveAddDataSourceOutput(caseForJob, dataSource, outputDirPath);
195  String dataSourcePath = command.getInputs().get(CommandLineCommand.InputType.DATA_SOURCE_PATH.name());
196  LOGGER.log(Level.SEVERE, "Error adding data source " + dataSourcePath, ex);
197  System.out.println("Error adding data source " + dataSourcePath);
198  // Do not process any other commands
199  return;
200  }
201  break;
202  case RUN_INGEST:
203  try {
204  LOGGER.log(Level.INFO, "Processing 'Run Ingest' command");
205  System.out.println("Processing 'Run Ingest' command");
206  Map<String, String> inputs = command.getInputs();
207 
208  // open the case, if it hasn't been already opened by CREATE_CASE or ADD_DATA_SOURCE commands
209  if (caseForJob == null) {
210  String caseDirPath = inputs.get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
211  caseForJob = CommandLineIngestManager.this.openCase(caseDirPath);
212  }
213 
214  // populate the AutoIngestDataSource structure, if that hasn't been done by ADD_DATA_SOURCE command
215  if (dataSource == null) {
216 
217  String dataSourceId = inputs.get(CommandLineCommand.InputType.DATA_SOURCE_ID.name());
218  Long dataSourceObjId = Long.valueOf(dataSourceId);
219 
220  // get Content object for the data source
221  Content content = null;
222  try {
223  content = Case.getCurrentCaseThrows().getSleuthkitCase().getContentById(dataSourceObjId);
224  } catch (TskCoreException ex) {
225  LOGGER.log(Level.SEVERE, "Exception while trying to find data source with object ID " + dataSourceId, ex);
226  System.out.println("Exception while trying to find data source with object ID " + dataSourceId);
227  // Do not process any other commands
228  return;
229  }
230 
231  if (content == null) {
232  LOGGER.log(Level.SEVERE, "Unable to find data source with object ID {0}", dataSourceId);
233  System.out.println("Unable to find data source with object ID " + dataSourceId);
234  // Do not process any other commands
235  return;
236  }
237 
238  // populate the AutoIngestDataSource structure
239  dataSource = new AutoIngestDataSource("", Paths.get(content.getName()));
240  List<Content> contentList = Arrays.asList(new Content[]{content});
241  List<String> errorList = new ArrayList<>();
242  dataSource.setDataSourceProcessorOutput(NO_ERRORS, errorList, contentList);
243  }
244 
245  // run ingest
246  String ingestProfile = inputs.get(CommandLineCommand.InputType.INGEST_PROFILE_NAME.name());
247  analyze(dataSource, ingestProfile);
248  } catch (InterruptedException | CaseActionException ex) {
249  String dataSourcePath = command.getInputs().get(CommandLineCommand.InputType.DATA_SOURCE_PATH.name());
250  LOGGER.log(Level.SEVERE, "Error running ingest on data source " + dataSourcePath, ex);
251  System.out.println("Error running ingest on data source " + dataSourcePath);
252  // Do not process any other commands
253  return;
254  }
255  break;
256 
257  case LIST_ALL_DATA_SOURCES:
258  try {
259  LOGGER.log(Level.INFO, "Processing 'List All Data Sources' command");
260  System.out.println("Processing 'List All Data Sources' command");
261  Map<String, String> inputs = command.getInputs();
262 
263  // open the case, if it hasn't been already opened by previous command
264  if (caseForJob == null) {
265  String caseDirPath = inputs.get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
266  caseForJob = CommandLineIngestManager.this.openCase(caseDirPath);
267  }
268 
269  String outputDirPath = getOutputDirPath(caseForJob);
270  OutputGenerator.listAllDataSources(caseForJob, outputDirPath);
271  } catch (CaseActionException ex) {
272  String caseDirPath = command.getInputs().get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
273  LOGGER.log(Level.SEVERE, "Error opening case in case directory: " + caseDirPath, ex);
274  System.out.println("Error opening case in case directory: " + caseDirPath);
275  // Do not process any other commands
276  return;
277  }
278  break;
279 
280  case GENERATE_REPORTS:
281  try {
282  LOGGER.log(Level.INFO, "Processing 'Generate Reports' command");
283  System.out.println("Processing 'Generate Reports' command");
284  Map<String, String> inputs = command.getInputs();
285 
286  // open the case, if it hasn't been already opened by previous command
287  if (caseForJob == null) {
288  String caseDirPath = inputs.get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
289  caseForJob = CommandLineIngestManager.this.openCase(caseDirPath);
290  }
291  // generate reports
292  String reportName = inputs.get(CommandLineCommand.InputType.REPORT_PROFILE_NAME.name());
293  if (reportName == null) {
295  }
296 
297  // generate reports
299  ReportGenerator generator = new ReportGenerator(reportName, progressIndicator);
300  generator.generateReports();
301  } catch (CaseActionException ex) {
302  String caseDirPath = command.getInputs().get(CommandLineCommand.InputType.CASE_FOLDER_PATH.name());
303  LOGGER.log(Level.SEVERE, "Error opening case in case directory: " + caseDirPath, ex);
304  System.out.println("Error opening case in case directory: " + caseDirPath);
305  // Do not process any other commands
306  return;
307  }
308  break;
309  default:
310  break;
311  }
312  }
313  } catch (Throwable ex) {
314  /*
315  * Unexpected runtime exceptions firewall. This task is
316  * designed to be able to be run in an executor service
317  * thread pool without calling get() on the task's
318  * Future<Void>, so this ensures that such errors get
319  * logged.
320  */
321  LOGGER.log(Level.SEVERE, "Unexpected error", ex);
322  System.out.println("Unexpected error. Exiting...");
323 
324  } finally {
325  try {
327  } catch (CaseActionException ex) {
328  LOGGER.log(Level.WARNING, "Exception while closing case", ex);
329  System.out.println("Exception while closing case");
330  }
331  }
332 
333  } finally {
334  LOGGER.log(Level.INFO, "Job processing task finished");
335  System.out.println("Job processing task finished");
336 
337  // shut down Autopsy
338  stop();
339  }
340  }
341 
353  private void openCase(String baseCaseName, String rootOutputDirectory, CaseType caseType) throws CaseActionException {
354 
355  LOGGER.log(Level.INFO, "Opening case {0} in directory {1}", new Object[]{baseCaseName, rootOutputDirectory});
356  Path caseDirectoryPath = findCaseDirectory(Paths.get(rootOutputDirectory), baseCaseName);
357  if (null != caseDirectoryPath) {
358  // found an existing case directory for same case name. the input case name must be unique. Exit.
359  LOGGER.log(Level.SEVERE, "Case {0} already exists. Case name must be unique. Exiting", baseCaseName);
360  throw new CaseActionException("Case " + baseCaseName + " already exists. Case name must be unique. Exiting");
361  } else {
362  caseDirectoryPath = createCaseFolderPath(Paths.get(rootOutputDirectory), baseCaseName);
363 
364  // Create the case directory
365  Case.createCaseDirectory(caseDirectoryPath.toString(), Case.CaseType.SINGLE_USER_CASE);
366 
367  CaseDetails caseDetails = new CaseDetails(baseCaseName);
368  Case.createAsCurrentCase(caseType, caseDirectoryPath.toString(), caseDetails);
369  }
370 
371  caseForJob = Case.getCurrentCase();
372  LOGGER.log(Level.INFO, "Opened case {0}", caseForJob.getName());
373  }
374 
392 
393  LOGGER.log(Level.INFO, "Adding data source {0} ", dataSource.getPath().toString());
394 
395  // Get an ordered list of data source processors to try
396  List<AutoIngestDataSourceProcessor> validDataSourceProcessors;
397  try {
398  validDataSourceProcessors = DataSourceProcessorUtility.getOrderedListOfDataSourceProcessors(dataSource.getPath());
400  LOGGER.log(Level.SEVERE, "Exception while determining best data source processor for {0}", dataSource.getPath());
401  // rethrow the exception.
402  throw ex;
403  }
404 
405  // did we find a data source processor that can process the data source
406  if (validDataSourceProcessors.isEmpty()) {
407  // This should never happen. We should add all unsupported data sources as logical files.
408  LOGGER.log(Level.SEVERE, "Unsupported data source {0}", dataSource.getPath()); // NON-NLS
409  return;
410  }
411 
413  synchronized (ingestLock) {
414  // Try each DSP in decreasing order of confidence
415  for (AutoIngestDataSourceProcessor selectedProcessor : validDataSourceProcessors) {
416  UUID taskId = UUID.randomUUID();
417  caseForJob.notifyAddingDataSource(taskId);
418  DataSourceProcessorCallback callBack = new AddDataSourceCallback(caseForJob, dataSource, taskId, ingestLock);
419  caseForJob.notifyAddingDataSource(taskId);
420  LOGGER.log(Level.INFO, "Identified data source type for {0} as {1}", new Object[]{dataSource.getPath(), selectedProcessor.getDataSourceType()});
421  selectedProcessor.process(dataSource.getDeviceId(), dataSource.getPath(), progressMonitor, callBack);
422  ingestLock.wait();
423 
424  // at this point we got the content object(s) from the current DSP.
425  // check whether the data source was processed successfully
426  if ((dataSource.getResultDataSourceProcessorResultCode() == CRITICAL_ERRORS)
427  || dataSource.getContent().isEmpty()) {
428  // move onto the the next DSP that can process this data source
429  logDataSourceProcessorResult(dataSource);
430  continue;
431  }
432 
433  logDataSourceProcessorResult(dataSource);
434  return;
435  }
436  // If we get to this point, none of the processors were successful
437  LOGGER.log(Level.SEVERE, "All data source processors failed to process {0}", dataSource.getPath());
438  // Throw an exception. It will get caught & handled upstream and will result in AIM auto-pause.
439  throw new AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException("Failed to process " + dataSource.getPath() + " with all data source processors");
440  }
441  }
442 
450 
452  if (null != resultCode) {
453  switch (resultCode) {
454  case NO_ERRORS:
455  LOGGER.log(Level.INFO, "Added data source to case");
456  if (dataSource.getContent().isEmpty()) {
457  LOGGER.log(Level.SEVERE, "Data source failed to produce content");
458  }
459  break;
460 
461  case NONCRITICAL_ERRORS:
462  for (String errorMessage : dataSource.getDataSourceProcessorErrorMessages()) {
463  LOGGER.log(Level.WARNING, "Non-critical error running data source processor for {0}: {1}", new Object[]{dataSource.getPath(), errorMessage});
464  }
465  LOGGER.log(Level.INFO, "Added data source to case");
466  if (dataSource.getContent().isEmpty()) {
467  LOGGER.log(Level.SEVERE, "Data source failed to produce content");
468  }
469  break;
470 
471  case CRITICAL_ERRORS:
472  for (String errorMessage : dataSource.getDataSourceProcessorErrorMessages()) {
473  LOGGER.log(Level.SEVERE, "Critical error running data source processor for {0}: {1}", new Object[]{dataSource.getPath(), errorMessage});
474  }
475  LOGGER.log(Level.SEVERE, "Failed to add data source to case");
476  break;
477  }
478  } else {
479  LOGGER.log(Level.WARNING, "No result code for data source processor for {0}", dataSource.getPath());
480  }
481  }
482 
500  private void analyze(AutoIngestDataSource dataSource, String ingestProfileName) throws AnalysisStartupException, InterruptedException {
501 
502  LOGGER.log(Level.INFO, "Starting ingest modules analysis for {0} ", dataSource.getPath());
503 
504  // configure ingest profile and file filter
505  IngestProfiles.IngestProfile selectedProfile = null;
506  FilesSet selectedFileSet = null;
507  if (!ingestProfileName.isEmpty()) {
508  selectedProfile = getSelectedProfile(ingestProfileName);
509  if (selectedProfile == null) {
510  // unable to find the user specified profile
511  LOGGER.log(Level.SEVERE, "Unable to find ingest profile: {0}. Ingest cancelled!", ingestProfileName);
512  System.out.println("Unable to find ingest profile: " + ingestProfileName + ". Ingest cancelled!");
513  return;
514  }
515 
516  // get FileSet filter associated with this profile
517  selectedFileSet = getSelectedFilter(selectedProfile.getFileIngestFilter());
518  if (selectedFileSet == null) {
519  // unable to find the user specified profile
520  LOGGER.log(Level.SEVERE, "Unable to find file filter {0} for ingest profile: {1}. Ingest cancelled!", new Object[]{selectedProfile.getFileIngestFilter(), ingestProfileName});
521  System.out.println("Unable to find file filter " + selectedProfile.getFileIngestFilter() + " for ingest profile: " + ingestProfileName + ". Ingest cancelled!");
522  return;
523  }
524  }
525 
526  IngestJobEventListener ingestJobEventListener = new IngestJobEventListener();
528  try {
529  synchronized (ingestLock) {
530  IngestJobSettings ingestJobSettings;
531  if (selectedProfile == null || selectedFileSet == null) {
532  // use baseline configuration
534  } else {
535  // load the custom ingest
536  ingestJobSettings = new IngestJobSettings(selectedProfile.toString());
537  ingestJobSettings.setFileFilter(selectedFileSet);
538  }
539 
540  List<String> settingsWarnings = ingestJobSettings.getWarnings();
541  if (settingsWarnings.isEmpty()) {
542  IngestJobStartResult ingestJobStartResult = IngestManager.getInstance().beginIngestJob(dataSource.getContent(), ingestJobSettings);
543  IngestJob ingestJob = ingestJobStartResult.getJob();
544  if (null != ingestJob) {
545  /*
546  * Block until notified by the ingest job event
547  * listener or until interrupted because auto ingest
548  * is shutting down.
549  */
550  ingestLock.wait();
551  LOGGER.log(Level.INFO, "Finished ingest modules analysis for {0} ", dataSource.getPath());
552  IngestJob.ProgressSnapshot jobSnapshot = ingestJob.getSnapshot();
553  for (IngestJob.ProgressSnapshot.DataSourceProcessingSnapshot snapshot : jobSnapshot.getDataSourceSnapshots()) {
554  if (!snapshot.isCancelled()) {
555  List<String> cancelledModules = snapshot.getCancelledDataSourceIngestModules();
556  if (!cancelledModules.isEmpty()) {
557  LOGGER.log(Level.WARNING, String.format("Ingest module(s) cancelled for %s", dataSource.getPath()));
558  for (String module : snapshot.getCancelledDataSourceIngestModules()) {
559  LOGGER.log(Level.WARNING, String.format("%s ingest module cancelled for %s", module, dataSource.getPath()));
560  }
561  }
562  LOGGER.log(Level.INFO, "Analysis of data source completed");
563  } else {
564  LOGGER.log(Level.WARNING, "Analysis of data source cancelled");
565  IngestJob.CancellationReason cancellationReason = snapshot.getCancellationReason();
566  if (IngestJob.CancellationReason.NOT_CANCELLED != cancellationReason && IngestJob.CancellationReason.USER_CANCELLED != cancellationReason) {
567  throw new AnalysisStartupException(String.format("Analysis cancelled due to %s for %s", cancellationReason.getDisplayName(), dataSource.getPath()));
568  }
569  }
570  }
571  } else if (!ingestJobStartResult.getModuleErrors().isEmpty()) {
572  for (IngestModuleError error : ingestJobStartResult.getModuleErrors()) {
573  LOGGER.log(Level.SEVERE, String.format("%s ingest module startup error for %s", error.getModuleDisplayName(), dataSource.getPath()), error.getThrowable());
574  }
575  LOGGER.log(Level.SEVERE, "Failed to analyze data source due to ingest job startup error");
576  throw new AnalysisStartupException(String.format("Error(s) during ingest module startup for %s", dataSource.getPath()));
577  } else {
578  LOGGER.log(Level.SEVERE, String.format("Ingest manager ingest job start error for %s", dataSource.getPath()), ingestJobStartResult.getStartupException());
579  throw new AnalysisStartupException("Ingest manager error starting job", ingestJobStartResult.getStartupException());
580  }
581  } else {
582  for (String warning : settingsWarnings) {
583  LOGGER.log(Level.SEVERE, "Ingest job settings error for {0}: {1}", new Object[]{dataSource.getPath(), warning});
584  }
585  LOGGER.log(Level.SEVERE, "Failed to analyze data source due to settings errors");
586  throw new AnalysisStartupException("Error(s) in ingest job settings");
587  }
588  }
589  } finally {
590  IngestManager.getInstance().removeIngestJobEventListener(ingestJobEventListener);
591  }
592  }
593 
602  private IngestProfiles.IngestProfile getSelectedProfile(String ingestProfileName) {
603 
604  IngestProfiles.IngestProfile selectedProfile = null;
605  // lookup the profile by name
607  if (profile.toString().equalsIgnoreCase(ingestProfileName)) {
608  // found the profile
609  selectedProfile = profile;
610  break;
611  }
612  }
613  return selectedProfile;
614  }
615 
624  private FilesSet getSelectedFilter(String filterName) {
625  try {
626  Map<String, FilesSet> fileIngestFilters = FilesSetsManager.getInstance()
629  fileIngestFilters.put(fSet.getName(), fSet);
630  }
631  return fileIngestFilters.get(filterName);
633  LOGGER.log(Level.SEVERE, "Failed to get file ingest filter: " + filterName, ex); //NON-NLS
634  return null;
635  }
636  }
637 
647  private Path createCaseFolderPath(Path caseFoldersPath, String caseName) {
648  String folderName = caseName + "_" + TimeStampUtils.createTimeStamp();
649  return Paths.get(caseFoldersPath.toString(), folderName);
650  }
651 
662  private Path findCaseDirectory(Path folderToSearch, String caseName) {
663  File searchFolder = new File(folderToSearch.toString());
664  if (!searchFolder.isDirectory()) {
665  return null;
666  }
667  Path caseFolderPath = null;
668  String[] candidateFolders = searchFolder.list(new CaseFolderFilter(caseName));
669  long mostRecentModified = 0;
670  for (String candidateFolder : candidateFolders) {
671  File file = new File(candidateFolder);
672  if (file.lastModified() >= mostRecentModified) {
673  mostRecentModified = file.lastModified();
674  caseFolderPath = Paths.get(folderToSearch.toString(), file.getPath());
675  }
676  }
677  return caseFolderPath;
678  }
679 
687  private String getOutputDirPath(Case caseForJob) {
688  return caseForJob.getCaseDirectory() + File.separator + LOG_DIR_NAME;
689  }
690 
700  private class IngestJobEventListener implements PropertyChangeListener {
701 
709  @Override
710  public void propertyChange(PropertyChangeEvent event) {
711  if (AutopsyEvent.SourceType.LOCAL == ((AutopsyEvent) event).getSourceType()) {
712  String eventType = event.getPropertyName();
713  if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
714  synchronized (ingestLock) {
715  ingestLock.notify();
716  }
717  }
718  }
719  }
720  };
721 
728 
734  @Override
735  public void setIndeterminate(final boolean indeterminate) {
736  }
737 
743  @Override
744  public void setProgress(final int progress) {
745  }
746 
752  @Override
753  public void setProgressText(final String text) {
754  }
755  }
756 
762  private final class AnalysisStartupException extends Exception {
763 
764  private static final long serialVersionUID = 1L;
765 
766  private AnalysisStartupException(String message) {
767  super(message);
768  }
769 
770  private AnalysisStartupException(String message, Throwable cause) {
771  super(message, cause);
772  }
773  }
774  }
775 
776  private static class CaseFolderFilter implements FilenameFilter {
777 
778  private final String caseName;
779  private final static String CASE_METADATA_EXT = CaseMetadata.getFileExtension();
780 
781  CaseFolderFilter(String caseName) {
782  this.caseName = caseName;
783  }
784 
785  @Override
786  public boolean accept(File folder, String fileName) {
787  File file = new File(folder, fileName);
788  if (fileName.length() > TimeStampUtils.getTimeStampLength() && file.isDirectory()) {
789  if (TimeStampUtils.endsWithTimeStamp(fileName)) {
790  if (null != caseName) {
791  String fileNamePrefix = fileName.substring(0, fileName.length() - TimeStampUtils.getTimeStampLength());
792  if (fileNamePrefix.equals(caseName)) {
793  return hasCaseMetadataFile(file);
794  }
795  } else {
796  return hasCaseMetadataFile(file);
797  }
798  }
799  }
800  return false;
801  }
802 
811  private static boolean hasCaseMetadataFile(File folder) {
812  for (File file : folder.listFiles()) {
813  if (file.getName().toLowerCase().endsWith(CASE_METADATA_EXT) && file.isFile()) {
814  return true;
815  }
816  }
817  return false;
818  }
819  }
820 
821 }
static List< AutoIngestDataSourceProcessor > getOrderedListOfDataSourceProcessors(Path dataSourcePath)
void openCase(String baseCaseName, String rootOutputDirectory, CaseType caseType)
static synchronized IngestManager getInstance()
static void createCaseDirectory(String caseDirPath, CaseType caseType)
Definition: Case.java:1170
IngestJobStartResult beginIngestJob(Collection< Content > dataSources, IngestJobSettings settings)
synchronized DataSourceProcessorResult getResultDataSourceProcessorResultCode()
static final Set< IngestManager.IngestJobEvent > INGEST_JOB_EVENTS_OF_INTEREST
static boolean endsWithTimeStamp(String inputString)
void removeIngestJobEventListener(final PropertyChangeListener listener)
void addIngestJobEventListener(final PropertyChangeListener listener)
static synchronized List< IngestProfile > getIngestProfiles()
void analyze(AutoIngestDataSource dataSource, String ingestProfileName)
static synchronized void setRunningWithGUI(boolean runningWithGUI)
synchronized void setDataSourceProcessorOutput(DataSourceProcessorResult result, List< String > errorMessages, List< Content > content)
synchronized static Logger getLogger(String name)
Definition: Logger.java:124
void notifyAddingDataSource(UUID eventId)
Definition: Case.java:1767
static void createAsCurrentCase(String caseDir, String caseDisplayName, String caseNumber, String examiner, CaseType caseType)
Definition: Case.java:801
IngestManager.IngestManagerException getStartupException()

Copyright © 2012-2021 Basis Technology. Generated on: Thu Sep 30 2021
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.