Autopsy  4.6.0
Graphical digital forensics platform for The Sleuth Kit and other tools.
RegexQuery.java
Go to the documentation of this file.
1 /*
2  * Autopsy Forensic Browser
3  *
4  * Copyright 2011-2018 Basis Technology Corp.
5  * Contact: carrier <at> sleuthkit <dot> org
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  * http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19 package org.sleuthkit.autopsy.keywordsearch;
20 
21 import com.google.common.base.CharMatcher;
22 import java.util.ArrayList;
23 import java.util.Collection;
24 import java.util.HashMap;
25 import java.util.List;
26 import java.util.Map;
27 import java.util.logging.Level;
28 import java.util.regex.Matcher;
29 import java.util.regex.Pattern;
30 import org.apache.commons.lang3.StringUtils;
31 import org.apache.commons.validator.routines.DomainValidator;
32 import org.apache.solr.client.solrj.SolrQuery;
33 import org.apache.solr.client.solrj.SolrQuery.SortClause;
34 import org.apache.solr.client.solrj.SolrRequest;
35 import org.apache.solr.client.solrj.response.QueryResponse;
36 import org.apache.solr.common.SolrDocument;
37 import org.apache.solr.common.SolrDocumentList;
38 import org.apache.solr.common.params.CursorMarkParams;
39 import org.openide.util.Exceptions;
40 import org.openide.util.NbBundle;
46 import static org.sleuthkit.autopsy.keywordsearch.KeywordSearchSettings.MODULE_NAME;
47 import static org.sleuthkit.autopsy.keywordsearch.TermsComponentQuery.CREDIT_CARD_NUM_PATTERN;
48 import static org.sleuthkit.autopsy.keywordsearch.TermsComponentQuery.CREDIT_CARD_TRACK2_PATTERN;
49 import static org.sleuthkit.autopsy.keywordsearch.TermsComponentQuery.KEYWORD_SEARCH_DOCUMENT_ID;
50 import org.sleuthkit.datamodel.AbstractFile;
51 import org.sleuthkit.datamodel.Account;
52 import org.sleuthkit.datamodel.AccountFileInstance;
53 import org.sleuthkit.datamodel.BlackboardArtifact;
54 import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
55 import org.sleuthkit.datamodel.BlackboardAttribute;
56 import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
57 import org.sleuthkit.datamodel.Content;
58 import org.sleuthkit.datamodel.TskCoreException;
59 import org.sleuthkit.datamodel.TskData;
60 
75 final class RegexQuery implements KeywordSearchQuery {
76 
77  public static final Logger LOGGER = Logger.getLogger(RegexQuery.class.getName());
78 
89  private static final CharSequence[] UNSUPPORTED_CHARS = {"\\d", "\\D", "\\w", "\\W", "\\s", "\\S", "\\n",
90  "\\t", "\\r", "\\f", "\\a", "\\e", "\\v", "\\V", "\\h", "\\H", "\\p"}; //NON-NLS
91 
92  private static final int MAX_RESULTS_PER_CURSOR_MARK = 512;
93  private static final int MIN_EMAIL_ADDR_LENGTH = 8;
94  private static final String SNIPPET_DELIMITER = String.valueOf(Character.toChars(171));
95 
96  private final List<KeywordQueryFilter> filters = new ArrayList<>();
97  private final KeywordList keywordList;
98  private final Keyword originalKeyword; // The regular expression originalKeyword used to perform the search.
99  private final String keywordString;
100  private final boolean queryStringContainsWildcardPrefix;
101  private final boolean queryStringContainsWildcardSuffix;
102 
103  private boolean escaped;
104  private String escapedQuery;
105  private String field = Server.Schema.CONTENT_STR.toString();
106 
113  RegexQuery(KeywordList keywordList, Keyword keyword) {
114  this.keywordList = keywordList;
115  this.originalKeyword = keyword;
116  this.keywordString = keyword.getSearchTerm();
117 
118  this.queryStringContainsWildcardPrefix = this.keywordString.startsWith(".*");
119  this.queryStringContainsWildcardSuffix = this.keywordString.endsWith(".*");
120  }
121 
122  @Override
123  public KeywordList getKeywordList() {
124  return keywordList;
125  }
126 
127  @Override
128  public boolean validate() {
129  if (keywordString.isEmpty()) {
130  return false;
131  }
132  try {
133  // First we perform regular Java regex validation to catch errors.
134  Pattern.compile(keywordString, Pattern.UNICODE_CHARACTER_CLASS);
135 
136  // Then we check for the set of Java predefined and POSIX character
137  // classes. While they are valid Lucene regex characters, they will
138  // behave differently than users may expect. E.g. the regex \d\d\d
139  // will not find 3 digits but will instead find a sequence of 3 'd's.
140  for (CharSequence c : UNSUPPORTED_CHARS) {
141  if (keywordString.contains(c)) {
142  return false;
143  }
144  }
145  return true;
146  } catch (IllegalArgumentException ex) {
147  return false;
148  }
149  }
150 
151  @Override
152  public QueryResults performQuery() throws NoOpenCoreException {
153 
154  final Server solrServer = KeywordSearch.getServer();
155  SolrQuery solrQuery = new SolrQuery();
156 
157  /*
158  * The provided regular expression may include wildcards at the
159  * beginning and/or end. These wildcards are used to indicate that the
160  * user wants to find hits for the regex that are embedded within other
161  * characters. For example, if we are given .*127.0.0.1.* as a regular
162  * expression, this will produce hits for: (a) " 127.0.0.1 " as a
163  * standalone token (surrounded by whitespace). (b) "abc127.0.0.1def"
164  * where the IP address is surrounded by other characters.
165  *
166  * If we are given this type of regex, we do not need to add our own
167  * wildcards to anchor the query. Otherwise, we need to add wildcard
168  * anchors because Lucene string regex searches default to using ^ and $
169  * to match the entire string.
170  */
171  // We construct the query by surrounding it with slashes (to indicate it is
172  // a regular expression search) and .* as anchors (if the query doesn't
173  // already have them). We do not add .* if there is a boundary character.
174  boolean skipWildcardPrefix = queryStringContainsWildcardPrefix || getQueryString().startsWith("^");
175  boolean skipWildcardSuffix = queryStringContainsWildcardSuffix ||
176  (getQueryString().endsWith("$") && ( ! getQueryString().endsWith("\\$")));
177  solrQuery.setQuery((field == null ? Server.Schema.CONTENT_STR.toString() : field) + ":/"
178  + (skipWildcardPrefix ? "" : ".*") + getQueryString()
179  + (skipWildcardSuffix ? "" : ".*") + "/");
180 
181  // Set the fields we want to have returned by the query.
182  solrQuery.setFields(Server.Schema.CONTENT_STR.toString(), Server.Schema.ID.toString(), Server.Schema.CHUNK_SIZE.toString());
183 
184  filters.stream()
185  .map(KeywordQueryFilter::toString)
186  .forEach(solrQuery::addFilterQuery);
187 
188  solrQuery.setRows(MAX_RESULTS_PER_CURSOR_MARK);
189  // Setting the sort order is necessary for cursor based paging to work.
190  solrQuery.setSort(SortClause.asc(Server.Schema.ID.toString()));
191 
192  String cursorMark = CursorMarkParams.CURSOR_MARK_START;
193  SolrDocumentList resultList;
194  boolean allResultsProcessed = false;
195  QueryResults results = new QueryResults(this);
196 
197  while (!allResultsProcessed) {
198  try {
199  solrQuery.set(CursorMarkParams.CURSOR_MARK_PARAM, cursorMark);
200  QueryResponse response = solrServer.query(solrQuery, SolrRequest.METHOD.POST);
201  resultList = response.getResults();
202 
203  for (SolrDocument resultDoc : resultList) {
204  try {
205  List<KeywordHit> keywordHits = createKeywordHits(resultDoc);
206  for (KeywordHit hit : keywordHits) {
207  Keyword keywordInstance = new Keyword(hit.getHit(), true, true, originalKeyword.getListName(), originalKeyword.getOriginalTerm());
208  List<KeywordHit> hitsForKeyword = results.getResults(keywordInstance);
209  if (hitsForKeyword == null) {
210  hitsForKeyword = new ArrayList<>();
211  results.addResult(keywordInstance, hitsForKeyword);
212  }
213  hitsForKeyword.add(hit);
214  }
215  } catch (TskCoreException ex) {
216  LOGGER.log(Level.SEVERE, "Error creating keyword hits", ex); //NON-NLS
217  }
218  }
219 
220  String nextCursorMark = response.getNextCursorMark();
221  if (cursorMark.equals(nextCursorMark)) {
222  allResultsProcessed = true;
223  }
224  cursorMark = nextCursorMark;
225  } catch (KeywordSearchModuleException ex) {
226  LOGGER.log(Level.SEVERE, "Error executing Regex Solr Query: " + keywordString, ex); //NON-NLS
227  MessageNotifyUtil.Notify.error(NbBundle.getMessage(Server.class, "Server.query.exception.msg", keywordString), ex.getCause().getMessage());
228  }
229  }
230 
231  return results;
232  }
233 
234  private List<KeywordHit> createKeywordHits(SolrDocument solrDoc) throws TskCoreException {
235 
236  final HashMap<String, String> keywordsFoundInThisDocument = new HashMap<>();
237 
238  List<KeywordHit> hits = new ArrayList<>();
239  final String docId = solrDoc.getFieldValue(Server.Schema.ID.toString()).toString();
240  final Integer chunkSize = (Integer) solrDoc.getFieldValue(Server.Schema.CHUNK_SIZE.toString());
241 
242  final Collection<Object> content_str = solrDoc.getFieldValues(Server.Schema.CONTENT_STR.toString());
243 
244  final Pattern pattern = Pattern.compile(keywordString);
245  try {
246  for (Object content_obj : content_str) {
247  String content = (String) content_obj;
248  Matcher hitMatcher = pattern.matcher(content);
249  int offset = 0;
250 
251  while (hitMatcher.find(offset)) {
252 
253  // If the location of the hit is beyond this chunk (i.e. it
254  // exists in the overlap region), we skip the hit. It will
255  // show up again as a hit in the chunk following this one.
256  if (chunkSize != null && hitMatcher.start() >= chunkSize) {
257  break;
258  }
259 
260  String hit = hitMatcher.group();
261 
262  offset = hitMatcher.end();
263  final ATTRIBUTE_TYPE artifactAttributeType = originalKeyword.getArtifactAttributeType();
264 
265  // We attempt to reduce false positives for phone numbers and IP address hits
266  // by querying Solr for hits delimited by a set of known boundary characters.
267  // See KeywordSearchList.PHONE_NUMBER_REGEX for an example.
268  // Because of this the hits may contain an extra character at the beginning or end that
269  // needs to be chopped off, unless the user has supplied their own wildcard suffix
270  // as part of the regex.
271  if (!queryStringContainsWildcardSuffix
272  && (artifactAttributeType == ATTRIBUTE_TYPE.TSK_PHONE_NUMBER
273  || artifactAttributeType == ATTRIBUTE_TYPE.TSK_IP_ADDRESS)) {
274  if (artifactAttributeType == ATTRIBUTE_TYPE.TSK_PHONE_NUMBER) {
275  // For phone numbers replace all non numeric characters (except "(") at the start of the hit.
276  hit = hit.replaceAll("^[^0-9\\(]", "");
277  } else {
278  // Replace all non numeric characters at the start of the hit.
279  hit = hit.replaceAll("^[^0-9]", "");
280  }
281  // Replace all non numeric at the end of the hit.
282  hit = hit.replaceAll("[^0-9]$", "");
283  }
284 
293  hit = hit.intern();
294 
295  // We will only create one KeywordHit instance per document for
296  // a given hit.
297  if (keywordsFoundInThisDocument.containsKey(hit)) {
298  continue;
299  }
300  keywordsFoundInThisDocument.put(hit, hit);
301 
302  if (artifactAttributeType == null) {
303  hits.add(new KeywordHit(docId, makeSnippet(content, hitMatcher, hit), hit));
304  } else {
305  switch (artifactAttributeType) {
306  case TSK_EMAIL:
307  /*
308  * Reduce false positives by eliminating email
309  * address hits that are either too short or are
310  * not for valid top level domains.
311  */
312  if (hit.length() >= MIN_EMAIL_ADDR_LENGTH
313  && DomainValidator.getInstance(true).isValidTld(hit.substring(hit.lastIndexOf('.')))) {
314  hits.add(new KeywordHit(docId, makeSnippet(content, hitMatcher, hit), hit));
315  }
316 
317  break;
318  case TSK_CARD_NUMBER:
319  /*
320  * If searching for credit card account numbers,
321  * do extra validation on the term and discard
322  * it if it does not pass.
323  */
324  Matcher ccnMatcher = CREDIT_CARD_NUM_PATTERN.matcher(hit);
325 
326  for (int rLength = hit.length(); rLength >= 12; rLength--) {
327  ccnMatcher.region(0, rLength);
328  if (ccnMatcher.find()) {
329  final String group = ccnMatcher.group("ccn");
330  if (CreditCardValidator.isValidCCN(group)) {
331  hits.add(new KeywordHit(docId, makeSnippet(content, hitMatcher, hit), hit));
332  }
333  }
334  }
335 
336  break;
337  default:
338  hits.add(new KeywordHit(docId, makeSnippet(content, hitMatcher, hit), hit));
339  break;
340  }
341  }
342  }
343 
344  }
345  } catch (Throwable error) {
346  /*
347  * NOTE: Matcher.find() is known to throw StackOverflowError in rare
348  * cases (see JIRA-2700). StackOverflowError is an error, not an
349  * exception, and therefore needs to be caught as a Throwable. When
350  * this occurs we should re-throw the error as TskCoreException so
351  * that it is logged by the calling method and move on to the next
352  * Solr document.
353  */
354  throw new TskCoreException("Failed to create keyword hits for Solr document id " + docId + " due to " + error.getMessage());
355  }
356  return hits;
357  }
358 
372  private String makeSnippet(String content, Matcher hitMatcher, String hit) {
373  // Get the snippet from the document.
374  int maxIndex = content.length() - 1;
375  final int end = hitMatcher.end();
376  final int start = hitMatcher.start();
377 
378  return content.substring(Integer.max(0, start - 20), Integer.max(0, start))
379  + SNIPPET_DELIMITER + hit + SNIPPET_DELIMITER
380  + content.substring(Integer.min(maxIndex, end), Integer.min(maxIndex, end + 20));
381  }
382 
383  @Override
384  public void addFilter(KeywordQueryFilter filter) {
385  this.filters.add(filter);
386  }
387 
388  @Override
389  public void setField(String field) {
390  this.field = field;
391  }
392 
393  @Override
394  public void setSubstringQuery() {
395  }
396 
397  @Override
398  synchronized public void escape() {
399  if (isEscaped() == false) {
400  escapedQuery = KeywordSearchUtil.escapeLuceneQuery(keywordString);
401  escaped = true;
402  }
403  }
404 
405  @Override
406  synchronized public boolean isEscaped() {
407  return escaped;
408  }
409 
410  @Override
411  public boolean isLiteral() {
412  return false;
413  }
414 
415  @Override
416  public String getQueryString() {
417  return originalKeyword.getSearchTerm();
418  }
419 
420  @Override
421  synchronized public String getEscapedQueryString() {
422  if (false == isEscaped()) {
423  escape();
424  }
425  return escapedQuery;
426  }
427 
444  @Override
445  public BlackboardArtifact postKeywordHitToBlackboard(Content content, Keyword foundKeyword, KeywordHit hit, String snippet, String listName) {
446  final String MODULE_NAME = KeywordSearchModuleFactory.getModuleName();
447 
448  if (content == null) {
449  LOGGER.log(Level.WARNING, "Error adding artifact for keyword hit to blackboard"); //NON-NLS
450  return null;
451  }
452 
453  /*
454  * Credit Card number hits are handled differently
455  */
456  if (originalKeyword.getArtifactAttributeType() == ATTRIBUTE_TYPE.TSK_CARD_NUMBER) {
457  createCCNAccount(content, foundKeyword, hit, snippet, listName);
458  return null;
459  }
460 
461  /*
462  * Create a "plain vanilla" keyword hit artifact with keyword and
463  * regex attributes
464  */
465  BlackboardArtifact newArtifact;
466  Collection<BlackboardAttribute> attributes = new ArrayList<>();
467 
468  attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD, MODULE_NAME, foundKeyword.getSearchTerm()));
469  attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP, MODULE_NAME, getQueryString()));
470 
471  try {
472  newArtifact = content.newArtifact(ARTIFACT_TYPE.TSK_KEYWORD_HIT);
473  } catch (TskCoreException ex) {
474  LOGGER.log(Level.SEVERE, "Error adding artifact for keyword hit to blackboard", ex); //NON-NLS
475  return null;
476  }
477 
478  if (StringUtils.isNotBlank(listName)) {
479  attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME, listName));
480  }
481  if (snippet != null) {
482  attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW, MODULE_NAME, snippet));
483  }
484 
485  hit.getArtifactID().ifPresent(artifactID
486  -> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, MODULE_NAME, artifactID))
487  );
488 
489  attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD_SEARCH_TYPE, MODULE_NAME, KeywordSearch.QueryType.REGEX.ordinal()));
490 
491  try {
492  newArtifact.addAttributes(attributes);
493  return newArtifact;
494  } catch (TskCoreException e) {
495  LOGGER.log(Level.SEVERE, "Error adding bb attributes for terms search artifact", e); //NON-NLS
496  return null;
497  }
498  }
499 
500  private void createCCNAccount(Content content, Keyword foundKeyword, KeywordHit hit, String snippet, String listName) {
501 
502  final String MODULE_NAME = KeywordSearchModuleFactory.getModuleName();
503 
504  if (originalKeyword.getArtifactAttributeType() != ATTRIBUTE_TYPE.TSK_CARD_NUMBER) {
505  LOGGER.log(Level.SEVERE, "Keyword hit is not a credit card number"); //NON-NLS
506  return;
507  }
508  /*
509  * Create a credit card account with attributes
510  * parsed from the snippet for the hit and looked up based on the
511  * parsed bank identifcation number.
512  */
513  Collection<BlackboardAttribute> attributes = new ArrayList<>();
514 
515  Map<BlackboardAttribute.Type, BlackboardAttribute> parsedTrackAttributeMap = new HashMap<>();
516  Matcher matcher = TermsComponentQuery.CREDIT_CARD_TRACK1_PATTERN.matcher(hit.getSnippet());
517  if (matcher.find()) {
518  parseTrack1Data(parsedTrackAttributeMap, matcher);
519  }
520  matcher = CREDIT_CARD_TRACK2_PATTERN.matcher(hit.getSnippet());
521  if (matcher.find()) {
522  parseTrack2Data(parsedTrackAttributeMap, matcher);
523  }
524  final BlackboardAttribute ccnAttribute = parsedTrackAttributeMap.get(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_CARD_NUMBER));
525  if (ccnAttribute == null || StringUtils.isBlank(ccnAttribute.getValueString())) {
526 
527  if (hit.isArtifactHit()) {
528  LOGGER.log(Level.SEVERE, String.format("Failed to parse credit card account number for artifact keyword hit: term = %s, snippet = '%s', artifact id = %d", foundKeyword.getSearchTerm(), hit.getSnippet(), hit.getArtifactID().get())); //NON-NLS
529  } else {
530  try {
531  LOGGER.log(Level.SEVERE, String.format("Failed to parse credit card account number for content keyword hit: term = %s, snippet = '%s', object id = %d", foundKeyword.getSearchTerm(), hit.getSnippet(), hit.getContentID())); //NON-NLS
532  } catch (TskCoreException ex) {
533  LOGGER.log(Level.SEVERE, String.format("Failed to parse credit card account number for content keyword hit: term = %s, snippet = '%s' ", foundKeyword.getSearchTerm(), hit.getSnippet())); //NON-NLS
534  LOGGER.log(Level.SEVERE, "There was a error getting contentID for keyword hit.", ex); //NON-NLS
535  }
536  }
537  return;
538  }
539  attributes.addAll(parsedTrackAttributeMap.values());
540 
541  /*
542  * Look up the bank name, scheme, etc. attributes for the bank
543  * indentification number (BIN).
544  */
545  final int bin = Integer.parseInt(ccnAttribute.getValueString().substring(0, 8));
546  CreditCards.BankIdentificationNumber binInfo = CreditCards.getBINInfo(bin);
547  if (binInfo != null) {
548  binInfo.getScheme().ifPresent(scheme
549  -> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_CARD_SCHEME, MODULE_NAME, scheme)));
550  binInfo.getCardType().ifPresent(cardType
551  -> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_CARD_TYPE, MODULE_NAME, cardType)));
552  binInfo.getBrand().ifPresent(brand
553  -> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_BRAND_NAME, MODULE_NAME, brand)));
554  binInfo.getBankName().ifPresent(bankName
555  -> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_BANK_NAME, MODULE_NAME, bankName)));
556  binInfo.getBankPhoneNumber().ifPresent(phoneNumber
557  -> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, MODULE_NAME, phoneNumber)));
558  binInfo.getBankURL().ifPresent(url
559  -> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, MODULE_NAME, url)));
560  binInfo.getCountry().ifPresent(country
561  -> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COUNTRY, MODULE_NAME, country)));
562  binInfo.getBankCity().ifPresent(city
563  -> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_CITY, MODULE_NAME, city)));
564  }
565 
566  /*
567  * If the hit is from unused or unallocated space, record the Solr
568  * document id to support showing just the chunk that contained the
569  * hit.
570  */
571  if (content instanceof AbstractFile) {
572  AbstractFile file = (AbstractFile) content;
573  if (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS
574  || file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) {
575  attributes.add(new BlackboardAttribute(KEYWORD_SEARCH_DOCUMENT_ID, MODULE_NAME, hit.getSolrDocumentId()));
576  }
577  }
578 
579  if (StringUtils.isNotBlank(listName)) {
580  attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME, listName));
581  }
582  if (snippet != null) {
583  attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW, MODULE_NAME, snippet));
584  }
585 
586  hit.getArtifactID().ifPresent(artifactID
587  -> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, MODULE_NAME, artifactID))
588  );
589 
590  attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD_SEARCH_TYPE, MODULE_NAME, KeywordSearch.QueryType.REGEX.ordinal()));
591 
592 
593  /*
594  * Create an account instance.
595  */
596  try {
597  AccountFileInstance ccAccountInstance = Case.getOpenCase().getSleuthkitCase().getCommunicationsManager().createAccountFileInstance(Account.Type.CREDIT_CARD, ccnAttribute.getValueString() , MODULE_NAME, content);
598 
599  ccAccountInstance.addAttributes(attributes);
600 
601  } catch (TskCoreException | NoCurrentCaseException ex) {
602  LOGGER.log(Level.SEVERE, "Error creating CCN account instance", ex); //NON-NLS
603 
604  }
605 
606  }
615  static private void parseTrack2Data(Map<BlackboardAttribute.Type, BlackboardAttribute> attributesMap, Matcher matcher) {
616  addAttributeIfNotAlreadyCaptured(attributesMap, ATTRIBUTE_TYPE.TSK_CARD_NUMBER, "accountNumber", matcher);
617  addAttributeIfNotAlreadyCaptured(attributesMap, ATTRIBUTE_TYPE.TSK_CARD_EXPIRATION, "expiration", matcher);
618  addAttributeIfNotAlreadyCaptured(attributesMap, ATTRIBUTE_TYPE.TSK_CARD_SERVICE_CODE, "serviceCode", matcher);
619  addAttributeIfNotAlreadyCaptured(attributesMap, ATTRIBUTE_TYPE.TSK_CARD_DISCRETIONARY, "discretionary", matcher);
620  addAttributeIfNotAlreadyCaptured(attributesMap, ATTRIBUTE_TYPE.TSK_CARD_LRC, "LRC", matcher);
621  }
622 
632  static private void parseTrack1Data(Map<BlackboardAttribute.Type, BlackboardAttribute> attributeMap, Matcher matcher) {
633  parseTrack2Data(attributeMap, matcher);
634  addAttributeIfNotAlreadyCaptured(attributeMap, ATTRIBUTE_TYPE.TSK_NAME_PERSON, "name", matcher);
635  }
636 
649  static private void addAttributeIfNotAlreadyCaptured(Map<BlackboardAttribute.Type, BlackboardAttribute> attributeMap, ATTRIBUTE_TYPE attrType, String groupName, Matcher matcher) {
650  BlackboardAttribute.Type type = new BlackboardAttribute.Type(attrType);
651 
652  if( ! attributeMap.containsKey(type)) {
653  String value = matcher.group(groupName);
654  if (attrType.equals(ATTRIBUTE_TYPE.TSK_CARD_NUMBER)) {
655  attributeMap.put(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_KEYWORD),
656  new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD, MODULE_NAME, value));
657  value = CharMatcher.anyOf(" -").removeFrom(value);
658  }
659 
660  if (StringUtils.isNotBlank(value)) {
661  attributeMap.put(type, new BlackboardAttribute(attrType, MODULE_NAME, value));
662  }
663  }
664  }
665 }

Copyright © 2012-2016 Basis Technology. Generated on: Mon May 7 2018
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.