Autopsy 4.22.1
Graphical digital forensics platform for The Sleuth Kit and other tools.
CommandLineIngestManager.java
Go to the documentation of this file.
1/*
2 * Autopsy Forensic Browser
3 *
4 * Copyright 2019-2022 Basis Technology Corp.
5 * Contact: carrier <at> sleuthkit <dot> org
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 */
19package org.sleuthkit.autopsy.commandlineingest;
20
21import com.google.gson.GsonBuilder;
22import java.beans.PropertyChangeEvent;
23import java.beans.PropertyChangeListener;
24import java.nio.file.Paths;
25import java.util.ArrayList;
26import java.util.Arrays;
27import java.util.List;
28import java.util.UUID;
29import java.util.Collection;
30import java.util.EnumSet;
31import java.util.Iterator;
32import java.util.Map;
33import java.util.Set;
34import java.util.logging.Level;
35import org.netbeans.spi.sendopts.OptionProcessor;
36import org.openide.LifecycleManager;
37import org.openide.util.Lookup;
38import org.sleuthkit.autopsy.casemodule.Case;
39import org.sleuthkit.autopsy.casemodule.Case.CaseType;
40import org.sleuthkit.autopsy.casemodule.CaseActionException;
41import org.sleuthkit.autopsy.core.RuntimeProperties;
42import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback;
43import static org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS;
44import static org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback.DataSourceProcessorResult.NO_ERRORS;
45import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorProgressMonitor;
46import org.sleuthkit.autopsy.coreutils.Logger;
47import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor;
48import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSource;
49import org.sleuthkit.autopsy.datasourceprocessors.AddDataSourceCallback;
50import org.sleuthkit.autopsy.datasourceprocessors.DataSourceProcessorUtility;
51import org.sleuthkit.autopsy.events.AutopsyEvent;
52import org.sleuthkit.autopsy.ingest.IngestJob;
53import org.sleuthkit.autopsy.ingest.IngestJobSettings;
54import org.sleuthkit.autopsy.ingest.IngestJobStartResult;
55import org.sleuthkit.autopsy.ingest.IngestManager;
56import org.sleuthkit.autopsy.ingest.IngestModuleError;
57import org.sleuthkit.autopsy.ingest.IngestProfiles;
58import org.sleuthkit.autopsy.ingest.IngestProfiles.IngestProfile;
59import org.sleuthkit.autopsy.ingest.profile.IngestProfilePaths;
60import org.sleuthkit.autopsy.modules.interestingitems.FilesSet;
61import org.sleuthkit.autopsy.modules.interestingitems.FilesSetsManager;
62import org.sleuthkit.autopsy.report.infrastructure.ReportGenerator;
63import org.sleuthkit.autopsy.report.infrastructure.ReportProgressIndicator;
64import org.sleuthkit.datamodel.Content;
65import org.sleuthkit.datamodel.TskCoreException;
66
72public class CommandLineIngestManager extends CommandLineManager {
73
74 private static final Logger LOGGER = Logger.getLogger(CommandLineIngestManager.class.getName());
76 private Case caseForJob = null;
78
79 static final int CL_SUCCESS = 0;
80 static final int CL_RUN_FAILURE = -1;
81 static final int CL_PROCESS_FAILURE = -2;
82
84 }
85
86 public void start() {
87 new Thread(new JobProcessingTask()).start();
88 }
89
90 void stop() {
91 stop(CL_SUCCESS);
92 }
93
94 void stop(int errorCode) {
95 try {
96 // close current case if there is one open
97 Case.closeCurrentCase();
98 } catch (CaseActionException ex) {
99 LOGGER.log(Level.WARNING, "Unable to close the case while shutting down command line ingest manager", ex); //NON-NLS
100 }
101
102 // shut down Autopsy
103 if (errorCode == CL_SUCCESS) {
104 LifecycleManager.getDefault().exit();
105 } else {
106 LifecycleManager.getDefault().exit(errorCode);
107 }
108 }
109
110 private final class JobProcessingTask implements Runnable {
111
112 private final Object ingestLock;
113
115 ingestLock = new Object();
116 try {
118 LOGGER.log(Level.INFO, "Set running with desktop GUI runtime property to false");
120 LOGGER.log(Level.SEVERE, "Failed to set running with desktop GUI runtime property to false", ex);
121 }
122 }
123
128 @Override
129 public void run() {
130 LOGGER.log(Level.INFO, "Job processing task started");
131 int errorCode = CL_SUCCESS;
132
133 try {
134 // read command line inputs
135 LOGGER.log(Level.INFO, "Autopsy is running from command line"); //NON-NLS
136 List<CommandLineCommand> commands = null;
137
138 // first look up all OptionProcessors and get input data from CommandLineOptionProcessor
139 Collection<? extends OptionProcessor> optionProcessors = Lookup.getDefault().lookupAll(OptionProcessor.class);
140 Iterator<? extends OptionProcessor> optionsIterator = optionProcessors.iterator();
141 while (optionsIterator.hasNext()) {
142 // find CommandLineOptionProcessor
143 OptionProcessor processor = optionsIterator.next();
144 if (processor instanceof CommandLineOptionProcessor) {
145 // check if we are running from command line
146 commands = ((CommandLineOptionProcessor) processor).getCommands();
147 }
148 }
149 try {
150 if (commands == null || commands.isEmpty()) {
151 LOGGER.log(Level.SEVERE, "No command line commands specified");
152 System.out.println("No command line commands specified");
153 errorCode = CL_RUN_FAILURE;
154 return;
155 }
156
157 // Commands are already stored in order in which they should be executed
158 for (CommandLineCommand command : commands) {
159 CommandLineCommand.CommandType type = command.getType();
160 switch (type) {
161 case CREATE_CASE:
162 try {
163 LOGGER.log(Level.INFO, "Processing 'Create Case' command");
164 System.out.println("Processing 'Create Case' command");
165 Map<String, String> inputs = command.getInputs();
166 String baseCaseName = inputs.get(CommandLineCommand.InputType.CASE_NAME.name());
167 String rootOutputDirectory = inputs.get(CommandLineCommand.InputType.CASES_BASE_DIR_PATH.name());
169 String caseTypeString = inputs.get(CommandLineCommand.InputType.CASE_TYPE.name());
170 if (caseTypeString != null && caseTypeString.equalsIgnoreCase(CommandLineOptionProcessor.CASETYPE_MULTI)) {
171 caseType = CaseType.MULTI_USER_CASE;
172 }
173 caseForJob = createCase(baseCaseName, rootOutputDirectory, caseType);
174
175 String outputDirPath = getOutputDirPath(caseForJob);
176 OutputGenerator.saveCreateCaseOutput(caseForJob, outputDirPath, baseCaseName);
177 } catch (CaseActionException ex) {
178 String baseCaseName = command.getInputs().get(CommandLineCommand.InputType.CASE_NAME.name());
179 LOGGER.log(Level.SEVERE, "Error creating or opening case " + baseCaseName, ex);
180 System.out.println("Error creating or opening case " + baseCaseName);
181 // Do not process any other commands
182 errorCode = CL_RUN_FAILURE;
183 return;
184 }
185 break;
186 case ADD_DATA_SOURCE:
187 try {
188 LOGGER.log(Level.INFO, "Processing 'Add Data Source' command");
189 System.out.println("Processing 'Add Data Source' command");
190 Map<String, String> inputs = command.getInputs();
191
192 // open the case, if it hasn't been already opened by CREATE_CASE command
193 if (caseForJob == null) {
194 // find case output directory by name and open the case
195 String baseCaseName = inputs.get(CommandLineCommand.InputType.CASE_NAME.name());
196 String rootOutputDirectory = inputs.get(CommandLineCommand.InputType.CASES_BASE_DIR_PATH.name());
197 caseForJob = openExistingCase(baseCaseName, rootOutputDirectory);
198 }
199
200 String dataSourcePath = inputs.get(CommandLineCommand.InputType.DATA_SOURCE_PATH.name());
201 String password = inputs.get(CommandLineCommand.InputType.BITLOCKER_KEY.name());
202 dataSource = new AutoIngestDataSource(UUID.randomUUID().toString(), Paths.get(dataSourcePath), password);
204
205 String outputDirPath = getOutputDirPath(caseForJob);
206 OutputGenerator.saveAddDataSourceOutput(caseForJob, dataSource, outputDirPath);
208 String dataSourcePath = command.getInputs().get(CommandLineCommand.InputType.DATA_SOURCE_PATH.name());
209 LOGGER.log(Level.SEVERE, "Error adding data source " + dataSourcePath, ex);
210 System.out.println("Error adding data source " + dataSourcePath);
211 // Do not process any other commands
212 errorCode = CL_RUN_FAILURE;
213 return;
214 }
215 break;
216 case RUN_INGEST:
217 try {
218 LOGGER.log(Level.INFO, "Processing 'Run Ingest' command");
219 System.out.println("Processing 'Run Ingest' command");
220 Map<String, String> inputs = command.getInputs();
221
222 // open the case, if it hasn't been already opened by CREATE_CASE command
223 if (caseForJob == null) {
224 // find case output directory by name and open the case
225 String baseCaseName = inputs.get(CommandLineCommand.InputType.CASE_NAME.name());
226 String rootOutputDirectory = inputs.get(CommandLineCommand.InputType.CASES_BASE_DIR_PATH.name());
227 caseForJob = openExistingCase(baseCaseName, rootOutputDirectory);
228 }
229
230 // populate the AutoIngestDataSource structure, if that hasn't been done by ADD_DATA_SOURCE command
231 if (dataSource == null) {
232
233 String dataSourceId = inputs.get(CommandLineCommand.InputType.DATA_SOURCE_ID.name());
234 Long dataSourceObjId = Long.valueOf(dataSourceId);
235
236 // get Content object for the data source
237 Content content = null;
238 try {
239 content = Case.getCurrentCaseThrows().getSleuthkitCase().getContentById(dataSourceObjId);
240 } catch (TskCoreException ex) {
241 LOGGER.log(Level.SEVERE, "Exception while trying to find data source with object ID " + dataSourceId, ex);
242 System.out.println("Exception while trying to find data source with object ID " + dataSourceId);
243 // Do not process any other commands
244 errorCode = CL_RUN_FAILURE;
245 return;
246 }
247
248 if (content == null) {
249 LOGGER.log(Level.SEVERE, "Unable to find data source with object ID {0}", dataSourceId);
250 System.out.println("Unable to find data source with object ID " + dataSourceId);
251 // Do not process any other commands
252 return;
253 }
254
255 // populate the AutoIngestDataSource structure
256 dataSource = new AutoIngestDataSource("", Paths.get(content.getName()));
257 List<Content> contentList = Arrays.asList(new Content[]{content});
258 List<String> errorList = new ArrayList<>();
259 dataSource.setDataSourceProcessorOutput(NO_ERRORS, errorList, contentList);
260 }
261
262 // run ingest
263 String ingestProfile = inputs.get(CommandLineCommand.InputType.INGEST_PROFILE_NAME.name());
264 analyze(dataSource, ingestProfile);
265 } catch (InterruptedException | CaseActionException | AnalysisStartupException ex) {
266 String dataSourcePath = command.getInputs().get(CommandLineCommand.InputType.DATA_SOURCE_PATH.name());
267 LOGGER.log(Level.SEVERE, "Error running ingest on data source " + dataSourcePath, ex);
268 System.out.println("Error running ingest on data source " + dataSourcePath);
269 // Do not process any other commands
270 errorCode = CL_RUN_FAILURE;
271 return;
272 }
273 break;
274
275 case LIST_ALL_DATA_SOURCES:
276 try {
277 LOGGER.log(Level.INFO, "Processing 'List All Data Sources' command");
278 System.out.println("Processing 'List All Data Sources' command");
279 Map<String, String> inputs = command.getInputs();
280
281 // open the case, if it hasn't been already opened by CREATE_CASE command
282 if (caseForJob == null) {
283 // find case output directory by name and open the case
284 String baseCaseName = inputs.get(CommandLineCommand.InputType.CASE_NAME.name());
285 String rootOutputDirectory = inputs.get(CommandLineCommand.InputType.CASES_BASE_DIR_PATH.name());
286 caseForJob = openExistingCase(baseCaseName, rootOutputDirectory);
287 }
288
289 String outputDirPath = getOutputDirPath(caseForJob);
290 OutputGenerator.listAllDataSources(caseForJob, outputDirPath);
291 } catch (CaseActionException ex) {
292 String baseCaseName = command.getInputs().get(CommandLineCommand.InputType.CASE_NAME.name());
293 String rootOutputDirectory = command.getInputs().get(CommandLineCommand.InputType.CASES_BASE_DIR_PATH.name());
294 String msg = "Error opening case " + baseCaseName + " in directory: " + rootOutputDirectory;
295 LOGGER.log(Level.SEVERE, msg, ex);
296 System.out.println(msg);
297 errorCode = CL_RUN_FAILURE;
298 // Do not process any other commands
299 return;
300 }
301 break;
302
303 case GENERATE_REPORTS:
304 try {
305 LOGGER.log(Level.INFO, "Processing 'Generate Reports' command");
306 System.out.println("Processing 'Generate Reports' command");
307 Map<String, String> inputs = command.getInputs();
308
309 // open the case, if it hasn't been already opened by CREATE_CASE command
310 if (caseForJob == null) {
311 // find case output directory by name and open the case
312 String baseCaseName = inputs.get(CommandLineCommand.InputType.CASE_NAME.name());
313 String rootOutputDirectory = inputs.get(CommandLineCommand.InputType.CASES_BASE_DIR_PATH.name());
314 caseForJob = openExistingCase(baseCaseName, rootOutputDirectory);
315 }
316 // generate reports
317 String reportName = inputs.get(CommandLineCommand.InputType.REPORT_PROFILE_NAME.name());
318 if (reportName == null) {
320 }
321
322 // generate reports
324 ReportGenerator generator = new ReportGenerator(reportName, progressIndicator);
325 generator.generateReports();
326 } catch (CaseActionException ex) {
327 String baseCaseName = command.getInputs().get(CommandLineCommand.InputType.CASE_NAME.name());
328 String rootOutputDirectory = command.getInputs().get(CommandLineCommand.InputType.CASES_BASE_DIR_PATH.name());
329 String msg = "Error opening case " + baseCaseName + " in directory: " + rootOutputDirectory;
330 LOGGER.log(Level.SEVERE, msg, ex);
331 System.out.println(msg);
332 errorCode = CL_RUN_FAILURE;
333 // Do not process any other commands
334 return;
335 } catch (Exception ex) {
336 String msg = "An exception occurred while generating report: " + ex.getMessage();
337 LOGGER.log(Level.WARNING, msg, ex);
338 System.out.println(msg);
339 errorCode = CL_RUN_FAILURE;
340 // Do not process any other commands
341 return;
342 }
343 break;
344 case LIST_ALL_INGEST_PROFILES:
345 List<IngestProfile> profiles = IngestProfiles.getIngestProfiles();
346 GsonBuilder gb = new GsonBuilder();
347 System.out.println("Listing ingest profiles");
348 for (IngestProfile profile : profiles) {
349 String jsonText = gb.create().toJson(profile);
350 System.out.println(jsonText);
351 }
352 System.out.println("Ingest profile list complete");
353 break;
354 default:
355 break;
356 }
357 }
358 } catch (Throwable ex) {
359 /*
360 * Unexpected runtime exceptions firewall. This task is
361 * designed to be able to be run in an executor service
362 * thread pool without calling get() on the task's
363 * Future<Void>, so this ensures that such errors get
364 * logged.
365 */
366 LOGGER.log(Level.SEVERE, "Unexpected error", ex);
367 System.out.println("Unexpected error. Exiting...");
368 errorCode = CL_RUN_FAILURE;
369 } finally {
370 try {
372 } catch (CaseActionException ex) {
373 LOGGER.log(Level.WARNING, "Exception while closing case", ex);
374 System.out.println("Exception while closing case");
375 }
376 }
377
378 } finally {
379 LOGGER.log(Level.INFO, "Job processing task finished");
380 System.out.println("Job processing task finished");
381
382 // shut down Autopsy
383 stop(errorCode);
384 }
385 }
386
404
405 LOGGER.log(Level.INFO, "Adding data source {0} ", dataSource.getPath().toString());
406
407 // Get an ordered list of data source processors to try
408 List<AutoIngestDataSourceProcessor> validDataSourceProcessors;
409 try {
410 validDataSourceProcessors = DataSourceProcessorUtility.getOrderedListOfDataSourceProcessors(dataSource.getPath(), dataSource.getPassword());
412 LOGGER.log(Level.SEVERE, "Exception while determining best data source processor for {0}", dataSource.getPath());
413 // rethrow the exception.
414 throw ex;
415 }
416
417 // did we find a data source processor that can process the data source
418 if (validDataSourceProcessors.isEmpty()) {
419 // This should never happen. We should add all unsupported data sources as logical files.
420 LOGGER.log(Level.SEVERE, "Unsupported data source {0}", dataSource.getPath()); // NON-NLS
421 return;
422 }
423
425 synchronized (ingestLock) {
426 // Try each DSP in decreasing order of confidence
427 for (AutoIngestDataSourceProcessor selectedProcessor : validDataSourceProcessors) {
428 UUID taskId = UUID.randomUUID();
429 caseForJob.notifyAddingDataSource(taskId);
431 caseForJob.notifyAddingDataSource(taskId);
432 LOGGER.log(Level.INFO, "Identified data source type for {0} as {1}", new Object[]{dataSource.getPath(), selectedProcessor.getDataSourceType()});
433 selectedProcessor.process(dataSource.getDeviceId(), dataSource.getPath(), dataSource.getPassword(), null, progressMonitor, callBack);
434 ingestLock.wait();
435
436 // at this point we got the content object(s) from the current DSP.
437 // check whether the data source was processed successfully
438 if ((dataSource.getResultDataSourceProcessorResultCode() == CRITICAL_ERRORS)
439 || dataSource.getContent().isEmpty()) {
440 // move onto the the next DSP that can process this data source
442 continue;
443 }
444
446 return;
447 }
448 // If we get to this point, none of the processors were successful
449 LOGGER.log(Level.SEVERE, "All data source processors failed to process {0}", dataSource.getPath());
450 // Throw an exception. It will get caught & handled upstream and will result in AIM auto-pause.
451 throw new AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException("Failed to process " + dataSource.getPath() + " with all data source processors");
452 }
453 }
454
462
463 DataSourceProcessorCallback.DataSourceProcessorResult resultCode = dataSource.getResultDataSourceProcessorResultCode();
464 if (null != resultCode) {
465 switch (resultCode) {
466 case NO_ERRORS:
467 LOGGER.log(Level.INFO, "Added data source to case");
468 if (dataSource.getContent().isEmpty()) {
469 LOGGER.log(Level.SEVERE, "Data source failed to produce content");
470 }
471 break;
472
473 case NONCRITICAL_ERRORS:
474 for (String errorMessage : dataSource.getDataSourceProcessorErrorMessages()) {
475 LOGGER.log(Level.WARNING, "Non-critical error running data source processor for {0}: {1}", new Object[]{dataSource.getPath(), errorMessage});
476 }
477 LOGGER.log(Level.INFO, "Added data source to case");
478 if (dataSource.getContent().isEmpty()) {
479 LOGGER.log(Level.SEVERE, "Data source failed to produce content");
480 }
481 break;
482
483 case CRITICAL_ERRORS:
484 for (String errorMessage : dataSource.getDataSourceProcessorErrorMessages()) {
485 LOGGER.log(Level.SEVERE, "Critical error running data source processor for {0}: {1}", new Object[]{dataSource.getPath(), errorMessage});
486 }
487 LOGGER.log(Level.SEVERE, "Failed to add data source to case");
488 break;
489 }
490 } else {
491 LOGGER.log(Level.WARNING, "No result code for data source processor for {0}", dataSource.getPath());
492 }
493 }
494
512 private void analyze(AutoIngestDataSource dataSource, String ingestProfileName) throws AnalysisStartupException, InterruptedException {
513
514 LOGGER.log(Level.INFO, "Starting ingest modules analysis for {0} ", dataSource.getPath());
515
516 // configure ingest profile and file filter
517 IngestProfiles.IngestProfile selectedProfile = null;
518 FilesSet selectedFileSet = null;
519 if (!ingestProfileName.isEmpty()) {
520 selectedProfile = getSelectedProfile(ingestProfileName);
521 if (selectedProfile == null) {
522 // unable to find the user specified profile
523 LOGGER.log(Level.SEVERE, "Unable to find ingest profile: {0}. Ingest cancelled!", ingestProfileName);
524 System.out.println("Unable to find ingest profile: " + ingestProfileName + ". Ingest cancelled!");
525 throw new AnalysisStartupException("Unable to find ingest profile: " + ingestProfileName + ". Ingest cancelled!");
526 }
527
528 // get FileSet filter associated with this profile
529 selectedFileSet = getSelectedFilter(selectedProfile.getFileIngestFilter());
530 if (selectedFileSet == null) {
531 // unable to find the user specified profile
532 LOGGER.log(Level.SEVERE, "Unable to find file filter {0} for ingest profile: {1}. Ingest cancelled!", new Object[]{selectedProfile.getFileIngestFilter(), ingestProfileName});
533 System.out.println("Unable to find file filter " + selectedProfile.getFileIngestFilter() + " for ingest profile: " + ingestProfileName + ". Ingest cancelled!");
534 throw new AnalysisStartupException("Unable to find file filter " + selectedProfile.getFileIngestFilter() + " for ingest profile: " + ingestProfileName + ". Ingest cancelled!");
535 }
536 }
537
538 IngestJobEventListener ingestJobEventListener = new IngestJobEventListener();
540 try {
541 synchronized (ingestLock) {
542 IngestJobSettings ingestJobSettings;
543 if (selectedProfile == null || selectedFileSet == null) {
544 // use baseline configuration
546 } else {
547 // load the custom ingest
548 ingestJobSettings = new IngestJobSettings(IngestProfilePaths.getInstance().getIngestProfilePrefix() + selectedProfile.toString());
549 ingestJobSettings.setFileFilter(selectedFileSet);
550 }
551
552 List<String> settingsWarnings = ingestJobSettings.getWarnings();
553 if (settingsWarnings.isEmpty()) {
554 IngestJobStartResult ingestJobStartResult = IngestManager.getInstance().beginIngestJob(dataSource.getContent(), ingestJobSettings);
555 IngestJob ingestJob = ingestJobStartResult.getJob();
556 if (null != ingestJob) {
557 /*
558 * Block until notified by the ingest job event
559 * listener or until interrupted because auto ingest
560 * is shutting down. For very small jobs, it is
561 * possible that ingest has completed by the time we
562 * get here, so check periodically in case the event
563 * was missed.
564 */
566 ingestLock.wait(60000); // Check every minute
567 }
568
569 LOGGER.log(Level.INFO, "Finished ingest modules analysis for {0} ", dataSource.getPath());
570 IngestJob.ProgressSnapshot jobSnapshot = ingestJob.getSnapshot();
571 IngestJob.ProgressSnapshot.DataSourceProcessingSnapshot snapshot = jobSnapshot.getDataSourceProcessingSnapshot();
572 if (!snapshot.isCancelled()) {
573 List<String> cancelledModules = snapshot.getCancelledDataSourceIngestModules();
574 if (!cancelledModules.isEmpty()) {
575 LOGGER.log(Level.WARNING, String.format("Ingest module(s) cancelled for %s", dataSource.getPath()));
576 for (String module : snapshot.getCancelledDataSourceIngestModules()) {
577 LOGGER.log(Level.WARNING, String.format("%s ingest module cancelled for %s", module, dataSource.getPath()));
578 }
579 }
580 LOGGER.log(Level.INFO, "Analysis of data source completed");
581 } else {
582 LOGGER.log(Level.WARNING, "Analysis of data source cancelled");
583 IngestJob.CancellationReason cancellationReason = snapshot.getCancellationReason();
584 if (IngestJob.CancellationReason.NOT_CANCELLED != cancellationReason && IngestJob.CancellationReason.USER_CANCELLED != cancellationReason) {
585 throw new AnalysisStartupException(String.format("Analysis cancelled due to %s for %s", cancellationReason.getDisplayName(), dataSource.getPath()));
586 }
587 }
588 } else if (!ingestJobStartResult.getModuleErrors().isEmpty()) {
589 for (IngestModuleError error : ingestJobStartResult.getModuleErrors()) {
590 LOGGER.log(Level.SEVERE, String.format("%s ingest module startup error for %s", error.getModuleDisplayName(), dataSource.getPath()), error.getThrowable());
591 }
592 LOGGER.log(Level.SEVERE, "Failed to analyze data source due to ingest job startup error");
593 throw new AnalysisStartupException(String.format("Error(s) during ingest module startup for %s", dataSource.getPath()));
594 } else {
595 LOGGER.log(Level.SEVERE, String.format("Ingest manager ingest job start error for %s", dataSource.getPath()), ingestJobStartResult.getStartupException());
596 throw new AnalysisStartupException("Ingest manager error starting job", ingestJobStartResult.getStartupException());
597 }
598 } else {
599 for (String warning : settingsWarnings) {
600 LOGGER.log(Level.SEVERE, "Ingest job settings error for {0}: {1}", new Object[]{dataSource.getPath(), warning});
601 }
602 LOGGER.log(Level.SEVERE, "Failed to analyze data source due to settings errors");
603 throw new AnalysisStartupException("Error(s) in ingest job settings");
604 }
605 }
606 } finally {
608 }
609 }
610
619 private IngestProfiles.IngestProfile getSelectedProfile(String ingestProfileName) {
620
621 IngestProfiles.IngestProfile selectedProfile = null;
622 // lookup the profile by name
624 if (profile.toString().equalsIgnoreCase(ingestProfileName)) {
625 // found the profile
626 selectedProfile = profile;
627 break;
628 }
629 }
630 return selectedProfile;
631 }
632
641 private FilesSet getSelectedFilter(String filterName) {
642 try {
643 Map<String, FilesSet> fileIngestFilters = FilesSetsManager.getInstance()
646 fileIngestFilters.put(fSet.getName(), fSet);
647 }
648 return fileIngestFilters.get(filterName);
650 LOGGER.log(Level.SEVERE, "Failed to get file ingest filter: " + filterName, ex); //NON-NLS
651 return null;
652 }
653 }
654
664 private class IngestJobEventListener implements PropertyChangeListener {
665
673 @Override
674 public void propertyChange(PropertyChangeEvent event) {
675 if (AutopsyEvent.SourceType.LOCAL == ((AutopsyEvent) event).getSourceType()) {
676 String eventType = event.getPropertyName();
677 if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
678 synchronized (ingestLock) {
679 ingestLock.notify();
680 }
681 }
682 }
683 }
684 };
685
692
698 @Override
699 public void setIndeterminate(final boolean indeterminate) {
700 }
701
707 @Override
708 public void setProgress(final int progress) {
709 }
710
716 @Override
717 public void setProgressText(final String text) {
718 }
719 }
720
726 private final class AnalysisStartupException extends Exception {
727
728 private static final long serialVersionUID = 1L;
729
730 private AnalysisStartupException(String message) {
731 super(message);
732 }
733
734 private AnalysisStartupException(String message, Throwable cause) {
735 super(message, cause);
736 }
737 }
738 }
739}
void analyze(AutoIngestDataSource dataSource, String ingestProfileName)
static final Set< IngestManager.IngestJobEvent > INGEST_JOB_EVENTS_OF_INTEREST
static synchronized void setRunningWithGUI(boolean runningWithGUI)
synchronized static Logger getLogger(String name)
Definition Logger.java:124
static List< AutoIngestDataSourceProcessor > getOrderedListOfDataSourceProcessors(Path dataSourcePath)
IngestManager.IngestManagerException getStartupException()
static synchronized IngestManager getInstance()
IngestJobStartResult beginIngestJob(Collection< Content > dataSources, IngestJobSettings settings)
void removeIngestJobEventListener(final PropertyChangeListener listener)
void addIngestJobEventListener(final PropertyChangeListener listener)
static synchronized List< IngestProfile > getIngestProfiles()

Copyright © 2012-2024 Sleuth Kit Labs. Generated on:
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.