Autopsy  4.14.0
Graphical digital forensics platform for The Sleuth Kit and other tools.
HashDbIngestModule.java
Go to the documentation of this file.
1 /*
2  * Autopsy Forensic Browser
3  *
4  * Copyright 2011-2018 Basis Technology Corp.
5  * Contact: carrier <at> sleuthkit <dot> org
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  * http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19 package org.sleuthkit.autopsy.modules.hashdatabase;
20 
21 import java.io.IOException;
22 import java.util.ArrayList;
23 import java.util.Collection;
24 import java.util.HashMap;
25 import java.util.List;
26 import java.util.concurrent.atomic.AtomicLong;
27 import java.util.logging.Level;
28 import org.openide.util.NbBundle;
29 import org.openide.util.NbBundle.Messages;
41 import org.sleuthkit.datamodel.AbstractFile;
42 import org.sleuthkit.datamodel.Blackboard;
43 import org.sleuthkit.datamodel.BlackboardArtifact;
44 import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
45 import org.sleuthkit.datamodel.BlackboardAttribute;
46 import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
47 import org.sleuthkit.datamodel.HashHitInfo;
48 import org.sleuthkit.datamodel.HashUtility;
49 import org.sleuthkit.datamodel.SleuthkitCase;
50 import org.sleuthkit.datamodel.TskCoreException;
51 import org.sleuthkit.datamodel.TskData;
52 import org.sleuthkit.datamodel.TskException;
53 
57 @Messages({
58  "HashDbIngestModule.noKnownBadHashDbSetMsg=No notable hash set.",
59  "HashDbIngestModule.knownBadFileSearchWillNotExecuteWarn=Notable file search will not be executed.",
60  "HashDbIngestModule.noKnownHashDbSetMsg=No known hash set.",
61  "HashDbIngestModule.knownFileSearchWillNotExecuteWarn=Known file search will not be executed."
62 })
63 public class HashDbIngestModule implements FileIngestModule {
64 
65  private static final Logger logger = Logger.getLogger(HashDbIngestModule.class.getName());
66  private static final int MAX_COMMENT_SIZE = 500;
67  private final IngestServices services = IngestServices.getInstance();
68  private final SleuthkitCase skCase;
69  private final HashDbManager hashDbManager = HashDbManager.getInstance();
70  private final HashLookupModuleSettings settings;
71  private final List<HashDb> knownBadHashSets = new ArrayList<>();
72  private final List<HashDb> knownHashSets = new ArrayList<>();
73  private long jobId;
74  private static final HashMap<Long, IngestJobTotals> totalsForIngestJobs = new HashMap<>();
75  private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
76  private Blackboard blackboard;
77 
81  private static class IngestJobTotals {
82 
83  private final AtomicLong totalKnownBadCount = new AtomicLong(0);
84  private final AtomicLong totalCalctime = new AtomicLong(0);
85  private final AtomicLong totalLookuptime = new AtomicLong(0);
86  }
87 
88  private static synchronized IngestJobTotals getTotalsForIngestJobs(long ingestJobId) {
89  IngestJobTotals totals = totalsForIngestJobs.get(ingestJobId);
90  if (totals == null) {
91  totals = new HashDbIngestModule.IngestJobTotals();
92  totalsForIngestJobs.put(ingestJobId, totals);
93  }
94  return totals;
95  }
96 
106  HashDbIngestModule(HashLookupModuleSettings settings) throws NoCurrentCaseException {
107  this.settings = settings;
109  }
110 
111  @Override
113  jobId = context.getJobId();
114  if (!hashDbManager.verifyAllDatabasesLoadedCorrectly()) {
115  throw new IngestModuleException("Could not load all hash sets");
116  }
117  updateEnabledHashSets(hashDbManager.getKnownBadFileHashSets(), knownBadHashSets);
118  updateEnabledHashSets(hashDbManager.getKnownFileHashSets(), knownHashSets);
119 
120  if (refCounter.incrementAndGet(jobId) == 1) {
121  // initialize job totals
122  getTotalsForIngestJobs(jobId);
123 
124  // if first module for this job then post error msgs if needed
125  if (knownBadHashSets.isEmpty()) {
128  Bundle.HashDbIngestModule_noKnownBadHashDbSetMsg(),
129  Bundle.HashDbIngestModule_knownBadFileSearchWillNotExecuteWarn()));
130  }
131 
132  if (knownHashSets.isEmpty()) {
135  Bundle.HashDbIngestModule_noKnownHashDbSetMsg(),
136  Bundle.HashDbIngestModule_knownFileSearchWillNotExecuteWarn()));
137  }
138  }
139  }
140 
147  private void updateEnabledHashSets(List<HashDb> allHashSets, List<HashDb> enabledHashSets) {
148  enabledHashSets.clear();
149  for (HashDb db : allHashSets) {
150  if (settings.isHashSetEnabled(db)) {
151  try {
152  if (db.isValid()) {
153  enabledHashSets.add(db);
154  }
155  } catch (TskCoreException ex) {
156  logger.log(Level.WARNING, "Error getting index status for " + db.getDisplayName() + " hash set", ex); //NON-NLS
157  }
158  }
159  }
160  }
161 
162  @Messages({
163  "# {0} - File name",
164  "HashDbIngestModule.dialogTitle.errorFindingArtifacts=Error Finding Artifacts: {0}",
165  "# {0} - File name",
166  "HashDbIngestModule.errorMessage.lookingForFileArtifacts=Error encountered while looking for existing artifacts for {0}."
167  })
168  @Override
169  public ProcessResult process(AbstractFile file) {
170  try {
171  blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
172  } catch (NoCurrentCaseException ex) {
173  logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
174  return ProcessResult.ERROR;
175  }
176 
177  // Skip unallocated space files.
178  if ((file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
179  || file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK))) {
180  return ProcessResult.OK;
181  }
182 
183  /*
184  * Skip directories. One reason for this is because we won't accurately
185  * calculate hashes of NTFS directories that have content that spans the
186  * IDX_ROOT and IDX_ALLOC artifacts. So we disable that until a solution
187  * for it is developed.
188  */
189  if (file.isDir()) {
190  return ProcessResult.OK;
191  }
192 
193  // bail out if we have no hashes set
194  if ((knownHashSets.isEmpty()) && (knownBadHashSets.isEmpty()) && (!settings.shouldCalculateHashes())) {
195  return ProcessResult.OK;
196  }
197 
198  // Safely get a reference to the totalsForIngestJobs object
199  IngestJobTotals totals = getTotalsForIngestJobs(jobId);
200 
201  // calc hash value
202  String name = file.getName();
203  long fileId = file.getId();
204  String md5Hash = file.getMd5Hash();
205  if (md5Hash == null || md5Hash.isEmpty()) {
206  try {
207  TimingMetric metric = HealthMonitor.getTimingMetric("Disk Reads: Hash calculation");
208  long calcstart = System.currentTimeMillis();
209  md5Hash = HashUtility.calculateMd5Hash(file);
210  if (file.getSize() > 0) {
211  // Surprisingly, the hash calculation does not seem to be correlated that
212  // strongly with file size until the files get large.
213  // Only normalize if the file size is greater than ~1MB.
214  if (file.getSize() < 1000000) {
216  } else {
217  // In testing, this normalization gave reasonable resuls
218  HealthMonitor.submitNormalizedTimingMetric(metric, file.getSize() / 500000);
219  }
220  }
221  file.setMd5Hash(md5Hash);
222  long delta = (System.currentTimeMillis() - calcstart);
223  totals.totalCalctime.addAndGet(delta);
224 
225  } catch (IOException ex) {
226  logger.log(Level.WARNING, String.format("Error calculating hash of file '%s' (id=%d).", name, fileId), ex); //NON-NLS
229  NbBundle.getMessage(this.getClass(), "HashDbIngestModule.fileReadErrorMsg", name),
230  NbBundle.getMessage(this.getClass(), "HashDbIngestModule.calcHashValueErr",
231  file.getParentPath() + file.getName(),
232  file.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.ALLOC)?"Allocated File" : "Deleted File")));
233  return ProcessResult.ERROR;
234  }
235  }
236 
237  // look up in notable first
238  boolean foundBad = false;
240  for (HashDb db : knownBadHashSets) {
241  try {
242  long lookupstart = System.currentTimeMillis();
243  HashHitInfo hashInfo = db.lookupMD5(file);
244  if (null != hashInfo) {
245  foundBad = true;
246  totals.totalKnownBadCount.incrementAndGet();
247 
248  file.setKnown(TskData.FileKnown.BAD);
249 
250  String hashSetName = db.getDisplayName();
251 
252  String comment = "";
253  ArrayList<String> comments = hashInfo.getComments();
254  int i = 0;
255  for (String c : comments) {
256  if (++i > 1) {
257  comment += " ";
258  }
259  comment += c;
260  if (comment.length() > MAX_COMMENT_SIZE) {
261  comment = comment.substring(0, MAX_COMMENT_SIZE) + "...";
262  break;
263  }
264  }
265 
266  /*
267  * We have a match. Now create an artifact if it is
268  * determined that one hasn't been created yet.
269  */
270  List<BlackboardAttribute> attributesList = new ArrayList<>();
271  attributesList.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME, HashLookupModuleFactory.getModuleName(), hashSetName));
272  try {
273  org.sleuthkit.datamodel.Blackboard tskBlackboard = skCase.getBlackboard();
274  if (tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT, attributesList) == false) {
275  postHashSetHitToBlackboard(file, md5Hash, hashSetName, comment, db.getSendIngestMessages());
276  }
277  } catch (TskCoreException ex) {
278  logger.log(Level.SEVERE, String.format(
279  "A problem occurred while checking for existing artifacts for file '%s' (id=%d).", name, fileId), ex); //NON-NLS
282  Bundle.HashDbIngestModule_dialogTitle_errorFindingArtifacts(name),
283  Bundle.HashDbIngestModule_errorMessage_lookingForFileArtifacts(name)));
284  ret = ProcessResult.ERROR;
285  }
286  }
287  long delta = (System.currentTimeMillis() - lookupstart);
288  totals.totalLookuptime.addAndGet(delta);
289 
290  } catch (TskException ex) {
291  logger.log(Level.WARNING, String.format(
292  "Couldn't lookup notable hash for file '%s' (id=%d) - see sleuthkit log for details", name, fileId), ex); //NON-NLS
295  NbBundle.getMessage(this.getClass(), "HashDbIngestModule.hashLookupErrorMsg", name),
296  NbBundle.getMessage(this.getClass(), "HashDbIngestModule.lookingUpKnownBadHashValueErr", name)));
297  ret = ProcessResult.ERROR;
298  }
299  }
300 
301  // If the file is not in the notable sets, search for it in the known sets.
302  // Any hit is sufficient to classify it as known, and there is no need to create
303  // a hit artifact or send a message to the application inbox.
304  if (!foundBad) {
305  for (HashDb db : knownHashSets) {
306  try {
307  long lookupstart = System.currentTimeMillis();
308  if (db.lookupMD5Quick(file)) {
309  file.setKnown(TskData.FileKnown.KNOWN);
310  break;
311  }
312  long delta = (System.currentTimeMillis() - lookupstart);
313  totals.totalLookuptime.addAndGet(delta);
314 
315  } catch (TskException ex) {
316  logger.log(Level.WARNING, String.format(
317  "Couldn't lookup known hash for file '%s' (id=%d) - see sleuthkit log for details", name, fileId), ex); //NON-NLS
320  NbBundle.getMessage(this.getClass(), "HashDbIngestModule.hashLookupErrorMsg", name),
321  NbBundle.getMessage(this.getClass(), "HashDbIngestModule.lookingUpKnownHashValueErr", name)));
322  ret = ProcessResult.ERROR;
323  }
324  }
325  }
326 
327  return ret;
328  }
329 
340  @Messages({
341  "HashDbIngestModule.indexError.message=Failed to index hashset hit artifact for keyword search."
342  })
343  private void postHashSetHitToBlackboard(AbstractFile abstractFile, String md5Hash, String hashSetName, String comment, boolean showInboxMessage) {
344  try {
345  String moduleName = HashLookupModuleFactory.getModuleName();
346  BlackboardArtifact badFile = abstractFile.newArtifact(ARTIFACT_TYPE.TSK_HASHSET_HIT);
347  Collection<BlackboardAttribute> attributes = new ArrayList<>();
348  //TODO Revisit usage of deprecated constructor as per TSK-583
349  //BlackboardAttribute att2 = new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), MODULE_NAME, "Known Bad", hashSetName);
350  attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME, moduleName, hashSetName));
351  attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_HASH_MD5, moduleName, md5Hash));
352  attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COMMENT, moduleName, comment));
353 
354  badFile.addAttributes(attributes);
355 
356  try {
357  /*
358  * post the artifact which will index the artifact for keyword
359  * search, and fire an event to notify UI of this new artifact
360  */
361  blackboard.postArtifact(badFile, moduleName);
362  } catch (Blackboard.BlackboardException ex) {
363  logger.log(Level.SEVERE, "Unable to index blackboard artifact " + badFile.getArtifactID(), ex); //NON-NLS
365  Bundle.HashDbIngestModule_indexError_message(), badFile.getDisplayName());
366  }
367 
368  if (showInboxMessage) {
369  StringBuilder detailsSb = new StringBuilder();
370  //details
371  detailsSb.append("<table border='0' cellpadding='4' width='280'>"); //NON-NLS
372  //hit
373  detailsSb.append("<tr>"); //NON-NLS
374  detailsSb.append("<th>") //NON-NLS
375  .append(NbBundle.getMessage(this.getClass(), "HashDbIngestModule.postToBB.fileName"))
376  .append("</th>"); //NON-NLS
377  detailsSb.append("<td>") //NON-NLS
378  .append(abstractFile.getName())
379  .append("</td>"); //NON-NLS
380  detailsSb.append("</tr>"); //NON-NLS
381 
382  detailsSb.append("<tr>"); //NON-NLS
383  detailsSb.append("<th>") //NON-NLS
384  .append(NbBundle.getMessage(this.getClass(), "HashDbIngestModule.postToBB.md5Hash"))
385  .append("</th>"); //NON-NLS
386  detailsSb.append("<td>").append(md5Hash).append("</td>"); //NON-NLS
387  detailsSb.append("</tr>"); //NON-NLS
388 
389  detailsSb.append("<tr>"); //NON-NLS
390  detailsSb.append("<th>") //NON-NLS
391  .append(NbBundle.getMessage(this.getClass(), "HashDbIngestModule.postToBB.hashsetName"))
392  .append("</th>"); //NON-NLS
393  detailsSb.append("<td>").append(hashSetName).append("</td>"); //NON-NLS
394  detailsSb.append("</tr>"); //NON-NLS
395 
396  detailsSb.append("</table>"); //NON-NLS
397 
399  NbBundle.getMessage(this.getClass(), "HashDbIngestModule.postToBB.knownBadMsg", abstractFile.getName()),
400  detailsSb.toString(),
401  abstractFile.getName() + md5Hash,
402  badFile));
403  }
404  } catch (TskException ex) {
405  logger.log(Level.WARNING, "Error creating blackboard artifact", ex); //NON-NLS
406  }
407  }
408 
416  private static synchronized void postSummary(long jobId,
417  List<HashDb> knownBadHashSets, List<HashDb> knownHashSets) {
418  IngestJobTotals jobTotals = getTotalsForIngestJobs(jobId);
419  totalsForIngestJobs.remove(jobId);
420 
421  if ((!knownBadHashSets.isEmpty()) || (!knownHashSets.isEmpty())) {
422  StringBuilder detailsSb = new StringBuilder();
423  //details
424  detailsSb.append("<table border='0' cellpadding='4' width='280'>"); //NON-NLS
425 
426  detailsSb.append("<tr><td>") //NON-NLS
427  .append(NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.complete.knownBadsFound"))
428  .append("</td>"); //NON-NLS
429  detailsSb.append("<td>").append(jobTotals.totalKnownBadCount.get()).append("</td></tr>"); //NON-NLS
430 
431  detailsSb.append("<tr><td>") //NON-NLS
432  .append(NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.complete.totalCalcTime"))
433  .append("</td><td>").append(jobTotals.totalCalctime.get()).append("</td></tr>\n"); //NON-NLS
434  detailsSb.append("<tr><td>") //NON-NLS
435  .append(NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.complete.totalLookupTime"))
436  .append("</td><td>").append(jobTotals.totalLookuptime.get()).append("</td></tr>\n"); //NON-NLS
437  detailsSb.append("</table>"); //NON-NLS
438 
439  detailsSb.append("<p>") //NON-NLS
440  .append(NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.complete.databasesUsed"))
441  .append("</p>\n<ul>"); //NON-NLS
442  for (HashDb db : knownBadHashSets) {
443  detailsSb.append("<li>").append(db.getHashSetName()).append("</li>\n"); //NON-NLS
444  }
445 
446  detailsSb.append("</ul>"); //NON-NLS
447 
451  NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.complete.hashLookupResults"),
452  detailsSb.toString()));
453  }
454  }
455 
456  @Override
457  public void shutDown() {
458  if (refCounter.decrementAndGet(jobId) == 0) {
459  postSummary(jobId, knownBadHashSets, knownHashSets);
460  }
461  }
462 }
static IngestMessage createDataMessage(String source, String subject, String detailsHtml, String uniqueKey, BlackboardArtifact data)
static IngestMessage createErrorMessage(String source, String subject, String detailsHtml)
void startUp(org.sleuthkit.autopsy.ingest.IngestJobContext context)
static IngestMessage createMessage(MessageType messageType, String source, String subject, String detailsHtml)
static synchronized IngestJobTotals getTotalsForIngestJobs(long ingestJobId)
void updateEnabledHashSets(List< HashDb > allHashSets, List< HashDb > enabledHashSets)
static TimingMetric getTimingMetric(String name)
void postHashSetHitToBlackboard(AbstractFile abstractFile, String md5Hash, String hashSetName, String comment, boolean showInboxMessage)
void postMessage(final IngestMessage message)
static void submitTimingMetric(TimingMetric metric)
static synchronized void postSummary(long jobId, List< HashDb > knownBadHashSets, List< HashDb > knownHashSets)
static void error(String title, String message)
synchronized static Logger getLogger(String name)
Definition: Logger.java:124
static IngestMessage createWarningMessage(String source, String subject, String detailsHtml)
static void submitNormalizedTimingMetric(TimingMetric metric, long normalization)
static synchronized IngestServices getInstance()

Copyright © 2012-2020 Basis Technology. Generated on: Wed Apr 8 2020
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.