Autopsy  4.5.0
Graphical digital forensics platform for The Sleuth Kit and other tools.
RegexQuery.java
Go to the documentation of this file.
1 /*
2  * Autopsy Forensic Browser
3  *
4  * Copyright 2011-2017 Basis Technology Corp.
5  * Contact: carrier <at> sleuthkit <dot> org
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  * http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19 package org.sleuthkit.autopsy.keywordsearch;
20 
21 import com.google.common.base.CharMatcher;
22 import java.util.ArrayList;
23 import java.util.Collection;
24 import java.util.HashMap;
25 import java.util.List;
26 import java.util.Map;
27 import java.util.logging.Level;
28 import java.util.regex.Matcher;
29 import java.util.regex.Pattern;
30 import org.apache.commons.lang3.StringUtils;
31 import org.apache.commons.validator.routines.DomainValidator;
32 import org.apache.solr.client.solrj.SolrQuery;
33 import org.apache.solr.client.solrj.SolrQuery.SortClause;
34 import org.apache.solr.client.solrj.SolrRequest;
35 import org.apache.solr.client.solrj.response.QueryResponse;
36 import org.apache.solr.common.SolrDocument;
37 import org.apache.solr.common.SolrDocumentList;
38 import org.apache.solr.common.params.CursorMarkParams;
39 import org.openide.util.Exceptions;
40 import org.openide.util.NbBundle;
45 import static org.sleuthkit.autopsy.keywordsearch.KeywordSearchSettings.MODULE_NAME;
46 import static org.sleuthkit.autopsy.keywordsearch.TermsComponentQuery.CREDIT_CARD_NUM_PATTERN;
47 import static org.sleuthkit.autopsy.keywordsearch.TermsComponentQuery.CREDIT_CARD_TRACK2_PATTERN;
48 import static org.sleuthkit.autopsy.keywordsearch.TermsComponentQuery.KEYWORD_SEARCH_DOCUMENT_ID;
49 import org.sleuthkit.datamodel.AbstractFile;
50 import org.sleuthkit.datamodel.Account;
51 import org.sleuthkit.datamodel.AccountFileInstance;
52 import org.sleuthkit.datamodel.BlackboardArtifact;
53 import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
54 import org.sleuthkit.datamodel.BlackboardAttribute;
55 import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
56 import org.sleuthkit.datamodel.Content;
57 import org.sleuthkit.datamodel.TskCoreException;
58 import org.sleuthkit.datamodel.TskData;
59 
74 final class RegexQuery implements KeywordSearchQuery {
75 
76  public static final Logger LOGGER = Logger.getLogger(RegexQuery.class.getName());
77 
88  private static final CharSequence[] UNSUPPORTED_CHARS = {"\\d", "\\D", "\\w", "\\W", "\\s", "\\S", "\\n",
89  "\\t", "\\r", "\\f", "\\a", "\\e", "\\v", "\\V", "\\h", "\\H", "\\p"}; //NON-NLS
90 
91  private static final int MAX_RESULTS_PER_CURSOR_MARK = 512;
92  private static final int MIN_EMAIL_ADDR_LENGTH = 8;
93  private static final String SNIPPET_DELIMITER = String.valueOf(Character.toChars(171));
94 
95  private final List<KeywordQueryFilter> filters = new ArrayList<>();
96  private final KeywordList keywordList;
97  private final Keyword originalKeyword; // The regular expression originalKeyword used to perform the search.
98  private final String keywordString;
99  private final boolean queryStringContainsWildcardPrefix;
100  private final boolean queryStringContainsWildcardSuffix;
101 
102  private boolean escaped;
103  private String escapedQuery;
104  private String field = Server.Schema.CONTENT_STR.toString();
105 
112  RegexQuery(KeywordList keywordList, Keyword keyword) {
113  this.keywordList = keywordList;
114  this.originalKeyword = keyword;
115  this.keywordString = keyword.getSearchTerm();
116 
117  this.queryStringContainsWildcardPrefix = this.keywordString.startsWith(".*");
118  this.queryStringContainsWildcardSuffix = this.keywordString.endsWith(".*");
119  }
120 
121  @Override
122  public KeywordList getKeywordList() {
123  return keywordList;
124  }
125 
126  @Override
127  public boolean validate() {
128  if (keywordString.isEmpty()) {
129  return false;
130  }
131  try {
132  // First we perform regular Java regex validation to catch errors.
133  Pattern.compile(keywordString, Pattern.UNICODE_CHARACTER_CLASS);
134 
135  // Then we check for the set of Java predefined and POSIX character
136  // classes. While they are valid Lucene regex characters, they will
137  // behave differently than users may expect. E.g. the regex \d\d\d
138  // will not find 3 digits but will instead find a sequence of 3 'd's.
139  for (CharSequence c : UNSUPPORTED_CHARS) {
140  if (keywordString.contains(c)) {
141  return false;
142  }
143  }
144  return true;
145  } catch (IllegalArgumentException ex) {
146  return false;
147  }
148  }
149 
150  @Override
151  public QueryResults performQuery() throws NoOpenCoreException {
152 
153  final Server solrServer = KeywordSearch.getServer();
154  SolrQuery solrQuery = new SolrQuery();
155 
156  /*
157  * The provided regular expression may include wildcards at the
158  * beginning and/or end. These wildcards are used to indicate that the
159  * user wants to find hits for the regex that are embedded within other
160  * characters. For example, if we are given .*127.0.0.1.* as a regular
161  * expression, this will produce hits for: (a) " 127.0.0.1 " as a
162  * standalone token (surrounded by whitespace). (b) "abc127.0.0.1def"
163  * where the IP address is surrounded by other characters.
164  *
165  * If we are given this type of regex, we do not need to add our own
166  * wildcards to anchor the query. Otherwise, we need to add wildcard
167  * anchors because Lucene string regex searches default to using ^ and $
168  * to match the entire string.
169  */
170  // We construct the query by surrounding it with slashes (to indicate it is
171  // a regular expression search) and .* as anchors (if the query doesn't
172  // already have them).
173  solrQuery.setQuery((field == null ? Server.Schema.CONTENT_STR.toString() : field) + ":/"
174  + (queryStringContainsWildcardPrefix ? "" : ".*") + getQueryString()
175  + (queryStringContainsWildcardSuffix ? "" : ".*") + "/");
176 
177  // Set the fields we want to have returned by the query.
178  solrQuery.setFields(Server.Schema.CONTENT_STR.toString(), Server.Schema.ID.toString(), Server.Schema.CHUNK_SIZE.toString());
179 
180  filters.stream()
181  .map(KeywordQueryFilter::toString)
182  .forEach(solrQuery::addFilterQuery);
183 
184  solrQuery.setRows(MAX_RESULTS_PER_CURSOR_MARK);
185  // Setting the sort order is necessary for cursor based paging to work.
186  solrQuery.setSort(SortClause.asc(Server.Schema.ID.toString()));
187 
188  String cursorMark = CursorMarkParams.CURSOR_MARK_START;
189  SolrDocumentList resultList;
190  boolean allResultsProcessed = false;
191  QueryResults results = new QueryResults(this);
192 
193  while (!allResultsProcessed) {
194  try {
195  solrQuery.set(CursorMarkParams.CURSOR_MARK_PARAM, cursorMark);
196  QueryResponse response = solrServer.query(solrQuery, SolrRequest.METHOD.POST);
197  resultList = response.getResults();
198 
199  for (SolrDocument resultDoc : resultList) {
200  try {
201  List<KeywordHit> keywordHits = createKeywordHits(resultDoc);
202  for (KeywordHit hit : keywordHits) {
203  Keyword keywordInstance = new Keyword(hit.getHit(), true, true, originalKeyword.getListName(), originalKeyword.getOriginalTerm());
204  List<KeywordHit> hitsForKeyword = results.getResults(keywordInstance);
205  if (hitsForKeyword == null) {
206  hitsForKeyword = new ArrayList<>();
207  results.addResult(keywordInstance, hitsForKeyword);
208  }
209  hitsForKeyword.add(hit);
210  }
211  } catch (TskCoreException ex) {
212  LOGGER.log(Level.SEVERE, "Error creating keyword hits", ex); //NON-NLS
213  }
214  }
215 
216  String nextCursorMark = response.getNextCursorMark();
217  if (cursorMark.equals(nextCursorMark)) {
218  allResultsProcessed = true;
219  }
220  cursorMark = nextCursorMark;
221  } catch (KeywordSearchModuleException ex) {
222  LOGGER.log(Level.SEVERE, "Error executing Regex Solr Query: " + keywordString, ex); //NON-NLS
223  MessageNotifyUtil.Notify.error(NbBundle.getMessage(Server.class, "Server.query.exception.msg", keywordString), ex.getCause().getMessage());
224  }
225  }
226 
227  return results;
228  }
229 
230  private List<KeywordHit> createKeywordHits(SolrDocument solrDoc) throws TskCoreException {
231 
232  final HashMap<String, String> keywordsFoundInThisDocument = new HashMap<>();
233 
234  List<KeywordHit> hits = new ArrayList<>();
235  final String docId = solrDoc.getFieldValue(Server.Schema.ID.toString()).toString();
236  final Integer chunkSize = (Integer) solrDoc.getFieldValue(Server.Schema.CHUNK_SIZE.toString());
237 
238  final Collection<Object> content_str = solrDoc.getFieldValues(Server.Schema.CONTENT_STR.toString());
239 
240  final Pattern pattern = Pattern.compile(keywordString);
241  try {
242  for (Object content_obj : content_str) {
243  String content = (String) content_obj;
244  Matcher hitMatcher = pattern.matcher(content);
245  int offset = 0;
246 
247  while (hitMatcher.find(offset)) {
248 
249  // If the location of the hit is beyond this chunk (i.e. it
250  // exists in the overlap region), we skip the hit. It will
251  // show up again as a hit in the chunk following this one.
252  if (chunkSize != null && hitMatcher.start() >= chunkSize) {
253  break;
254  }
255 
256  String hit = hitMatcher.group();
257 
258  offset = hitMatcher.end();
259  final ATTRIBUTE_TYPE artifactAttributeType = originalKeyword.getArtifactAttributeType();
260 
261  // We attempt to reduce false positives for phone numbers and IP address hits
262  // by querying Solr for hits delimited by a set of known boundary characters.
263  // See KeywordSearchList.PHONE_NUMBER_REGEX for an example.
264  // Because of this the hits may contain an extra character at the beginning or end that
265  // needs to be chopped off, unless the user has supplied their own wildcard suffix
266  // as part of the regex.
267  if (!queryStringContainsWildcardSuffix
268  && (artifactAttributeType == ATTRIBUTE_TYPE.TSK_PHONE_NUMBER
269  || artifactAttributeType == ATTRIBUTE_TYPE.TSK_IP_ADDRESS)) {
270  if (artifactAttributeType == ATTRIBUTE_TYPE.TSK_PHONE_NUMBER) {
271  // For phone numbers replace all non numeric characters (except "(") at the start of the hit.
272  hit = hit.replaceAll("^[^0-9\\(]", "");
273  } else {
274  // Replace all non numeric characters at the start of the hit.
275  hit = hit.replaceAll("^[^0-9]", "");
276  }
277  // Replace all non numeric at the end of the hit.
278  hit = hit.replaceAll("[^0-9]$", "");
279  }
280 
289  hit = hit.intern();
290 
291  // We will only create one KeywordHit instance per document for
292  // a given hit.
293  if (keywordsFoundInThisDocument.containsKey(hit)) {
294  continue;
295  }
296  keywordsFoundInThisDocument.put(hit, hit);
297 
298  if (artifactAttributeType == null) {
299  hits.add(new KeywordHit(docId, makeSnippet(content, hitMatcher, hit), hit));
300  } else {
301  switch (artifactAttributeType) {
302  case TSK_EMAIL:
303  /*
304  * Reduce false positives by eliminating email
305  * address hits that are either too short or are
306  * not for valid top level domains.
307  */
308  if (hit.length() >= MIN_EMAIL_ADDR_LENGTH
309  && DomainValidator.getInstance(true).isValidTld(hit.substring(hit.lastIndexOf('.')))) {
310  hits.add(new KeywordHit(docId, makeSnippet(content, hitMatcher, hit), hit));
311  }
312 
313  break;
314  case TSK_CARD_NUMBER:
315  /*
316  * If searching for credit card account numbers,
317  * do extra validation on the term and discard
318  * it if it does not pass.
319  */
320  Matcher ccnMatcher = CREDIT_CARD_NUM_PATTERN.matcher(hit);
321 
322  for (int rLength = hit.length(); rLength >= 12; rLength--) {
323  ccnMatcher.region(0, rLength);
324  if (ccnMatcher.find()) {
325  final String group = ccnMatcher.group("ccn");
326  if (CreditCardValidator.isValidCCN(group)) {
327  hits.add(new KeywordHit(docId, makeSnippet(content, hitMatcher, hit), hit));
328  }
329  }
330  }
331 
332  break;
333  default:
334  hits.add(new KeywordHit(docId, makeSnippet(content, hitMatcher, hit), hit));
335  break;
336  }
337  }
338  }
339 
340  }
341  } catch (Throwable error) {
342  /*
343  * NOTE: Matcher.find() is known to throw StackOverflowError in rare
344  * cases (see JIRA-2700). StackOverflowError is an error, not an
345  * exception, and therefore needs to be caught as a Throwable. When
346  * this occurs we should re-throw the error as TskCoreException so
347  * that it is logged by the calling method and move on to the next
348  * Solr document.
349  */
350  throw new TskCoreException("Failed to create keyword hits for Solr document id " + docId + " due to " + error.getMessage());
351  }
352  return hits;
353  }
354 
368  private String makeSnippet(String content, Matcher hitMatcher, String hit) {
369  // Get the snippet from the document.
370  int maxIndex = content.length() - 1;
371  final int end = hitMatcher.end();
372  final int start = hitMatcher.start();
373 
374  return content.substring(Integer.max(0, start - 20), Integer.max(0, start))
375  + SNIPPET_DELIMITER + hit + SNIPPET_DELIMITER
376  + content.substring(Integer.min(maxIndex, end), Integer.min(maxIndex, end + 20));
377  }
378 
379  @Override
380  public void addFilter(KeywordQueryFilter filter) {
381  this.filters.add(filter);
382  }
383 
384  @Override
385  public void setField(String field) {
386  this.field = field;
387  }
388 
389  @Override
390  public void setSubstringQuery() {
391  }
392 
393  @Override
394  synchronized public void escape() {
395  if (isEscaped() == false) {
396  escapedQuery = KeywordSearchUtil.escapeLuceneQuery(keywordString);
397  escaped = true;
398  }
399  }
400 
401  @Override
402  synchronized public boolean isEscaped() {
403  return escaped;
404  }
405 
406  @Override
407  public boolean isLiteral() {
408  return false;
409  }
410 
411  @Override
412  public String getQueryString() {
413  return originalKeyword.getSearchTerm();
414  }
415 
416  @Override
417  synchronized public String getEscapedQueryString() {
418  if (false == isEscaped()) {
419  escape();
420  }
421  return escapedQuery;
422  }
423 
440  @Override
441  public BlackboardArtifact postKeywordHitToBlackboard(Content content, Keyword foundKeyword, KeywordHit hit, String snippet, String listName) {
442  final String MODULE_NAME = KeywordSearchModuleFactory.getModuleName();
443 
444  if (content == null) {
445  LOGGER.log(Level.WARNING, "Error adding artifact for keyword hit to blackboard"); //NON-NLS
446  return null;
447  }
448 
449  /*
450  * Credit Card number hits are handled differently
451  */
452  if (originalKeyword.getArtifactAttributeType() == ATTRIBUTE_TYPE.TSK_CARD_NUMBER) {
453  createCCNAccount(content, foundKeyword, hit, snippet, listName);
454  return null;
455  }
456 
457  /*
458  * Create a "plain vanilla" keyword hit artifact with keyword and
459  * regex attributes
460  */
461  BlackboardArtifact newArtifact;
462  Collection<BlackboardAttribute> attributes = new ArrayList<>();
463 
464  attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD, MODULE_NAME, foundKeyword.getSearchTerm()));
465  attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP, MODULE_NAME, getQueryString()));
466 
467  try {
468  newArtifact = content.newArtifact(ARTIFACT_TYPE.TSK_KEYWORD_HIT);
469  } catch (TskCoreException ex) {
470  LOGGER.log(Level.SEVERE, "Error adding artifact for keyword hit to blackboard", ex); //NON-NLS
471  return null;
472  }
473 
474  if (StringUtils.isNotBlank(listName)) {
475  attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME, listName));
476  }
477  if (snippet != null) {
478  attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW, MODULE_NAME, snippet));
479  }
480 
481  hit.getArtifactID().ifPresent(artifactID
482  -> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, MODULE_NAME, artifactID))
483  );
484 
485  attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD_SEARCH_TYPE, MODULE_NAME, KeywordSearch.QueryType.REGEX.ordinal()));
486 
487  try {
488  newArtifact.addAttributes(attributes);
489  return newArtifact;
490  } catch (TskCoreException e) {
491  LOGGER.log(Level.SEVERE, "Error adding bb attributes for terms search artifact", e); //NON-NLS
492  return null;
493  }
494  }
495 
496  private void createCCNAccount(Content content, Keyword foundKeyword, KeywordHit hit, String snippet, String listName) {
497 
498  final String MODULE_NAME = KeywordSearchModuleFactory.getModuleName();
499 
500  if (originalKeyword.getArtifactAttributeType() != ATTRIBUTE_TYPE.TSK_CARD_NUMBER) {
501  LOGGER.log(Level.SEVERE, "Keyword hit is not a credit card number"); //NON-NLS
502  return;
503  }
504  /*
505  * Create a credit card account with attributes
506  * parsed from the snippet for the hit and looked up based on the
507  * parsed bank identifcation number.
508  */
509  Collection<BlackboardAttribute> attributes = new ArrayList<>();
510 
511  Map<BlackboardAttribute.Type, BlackboardAttribute> parsedTrackAttributeMap = new HashMap<>();
512  Matcher matcher = TermsComponentQuery.CREDIT_CARD_TRACK1_PATTERN.matcher(hit.getSnippet());
513  if (matcher.find()) {
514  parseTrack1Data(parsedTrackAttributeMap, matcher);
515  }
516  matcher = CREDIT_CARD_TRACK2_PATTERN.matcher(hit.getSnippet());
517  if (matcher.find()) {
518  parseTrack2Data(parsedTrackAttributeMap, matcher);
519  }
520  final BlackboardAttribute ccnAttribute = parsedTrackAttributeMap.get(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_CARD_NUMBER));
521  if (ccnAttribute == null || StringUtils.isBlank(ccnAttribute.getValueString())) {
522 
523  if (hit.isArtifactHit()) {
524  LOGGER.log(Level.SEVERE, String.format("Failed to parse credit card account number for artifact keyword hit: term = %s, snippet = '%s', artifact id = %d", foundKeyword.getSearchTerm(), hit.getSnippet(), hit.getArtifactID().get())); //NON-NLS
525  } else {
526  try {
527  LOGGER.log(Level.SEVERE, String.format("Failed to parse credit card account number for content keyword hit: term = %s, snippet = '%s', object id = %d", foundKeyword.getSearchTerm(), hit.getSnippet(), hit.getContentID())); //NON-NLS
528  } catch (TskCoreException ex) {
529  LOGGER.log(Level.SEVERE, String.format("Failed to parse credit card account number for content keyword hit: term = %s, snippet = '%s' ", foundKeyword.getSearchTerm(), hit.getSnippet())); //NON-NLS
530  LOGGER.log(Level.SEVERE, "There was a error getting contentID for keyword hit.", ex); //NON-NLS
531  }
532  }
533  return;
534  }
535  attributes.addAll(parsedTrackAttributeMap.values());
536 
537  /*
538  * Look up the bank name, scheme, etc. attributes for the bank
539  * indentification number (BIN).
540  */
541  final int bin = Integer.parseInt(ccnAttribute.getValueString().substring(0, 8));
542  CreditCards.BankIdentificationNumber binInfo = CreditCards.getBINInfo(bin);
543  if (binInfo != null) {
544  binInfo.getScheme().ifPresent(scheme
545  -> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_CARD_SCHEME, MODULE_NAME, scheme)));
546  binInfo.getCardType().ifPresent(cardType
547  -> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_CARD_TYPE, MODULE_NAME, cardType)));
548  binInfo.getBrand().ifPresent(brand
549  -> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_BRAND_NAME, MODULE_NAME, brand)));
550  binInfo.getBankName().ifPresent(bankName
551  -> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_BANK_NAME, MODULE_NAME, bankName)));
552  binInfo.getBankPhoneNumber().ifPresent(phoneNumber
553  -> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, MODULE_NAME, phoneNumber)));
554  binInfo.getBankURL().ifPresent(url
555  -> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, MODULE_NAME, url)));
556  binInfo.getCountry().ifPresent(country
557  -> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COUNTRY, MODULE_NAME, country)));
558  binInfo.getBankCity().ifPresent(city
559  -> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_CITY, MODULE_NAME, city)));
560  }
561 
562  /*
563  * If the hit is from unused or unallocated space, record the Solr
564  * document id to support showing just the chunk that contained the
565  * hit.
566  */
567  if (content instanceof AbstractFile) {
568  AbstractFile file = (AbstractFile) content;
569  if (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS
570  || file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) {
571  attributes.add(new BlackboardAttribute(KEYWORD_SEARCH_DOCUMENT_ID, MODULE_NAME, hit.getSolrDocumentId()));
572  }
573  }
574 
575  if (StringUtils.isNotBlank(listName)) {
576  attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME, listName));
577  }
578  if (snippet != null) {
579  attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW, MODULE_NAME, snippet));
580  }
581 
582  hit.getArtifactID().ifPresent(artifactID
583  -> attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, MODULE_NAME, artifactID))
584  );
585 
586  attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD_SEARCH_TYPE, MODULE_NAME, KeywordSearch.QueryType.REGEX.ordinal()));
587 
588 
589  /*
590  * Create an account instance.
591  */
592  try {
593  AccountFileInstance ccAccountInstance = Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager().createAccountFileInstance(Account.Type.CREDIT_CARD, ccnAttribute.getValueString() , MODULE_NAME, content);
594 
595  ccAccountInstance.addAttributes(attributes);
596 
597  } catch (TskCoreException ex) {
598  LOGGER.log(Level.SEVERE, "Error creating CCN account instance", ex); //NON-NLS
599 
600  }
601 
602  }
611  static private void parseTrack2Data(Map<BlackboardAttribute.Type, BlackboardAttribute> attributesMap, Matcher matcher) {
612  addAttributeIfNotAlreadyCaptured(attributesMap, ATTRIBUTE_TYPE.TSK_CARD_NUMBER, "accountNumber", matcher);
613  addAttributeIfNotAlreadyCaptured(attributesMap, ATTRIBUTE_TYPE.TSK_CARD_EXPIRATION, "expiration", matcher);
614  addAttributeIfNotAlreadyCaptured(attributesMap, ATTRIBUTE_TYPE.TSK_CARD_SERVICE_CODE, "serviceCode", matcher);
615  addAttributeIfNotAlreadyCaptured(attributesMap, ATTRIBUTE_TYPE.TSK_CARD_DISCRETIONARY, "discretionary", matcher);
616  addAttributeIfNotAlreadyCaptured(attributesMap, ATTRIBUTE_TYPE.TSK_CARD_LRC, "LRC", matcher);
617  }
618 
628  static private void parseTrack1Data(Map<BlackboardAttribute.Type, BlackboardAttribute> attributeMap, Matcher matcher) {
629  parseTrack2Data(attributeMap, matcher);
630  addAttributeIfNotAlreadyCaptured(attributeMap, ATTRIBUTE_TYPE.TSK_NAME_PERSON, "name", matcher);
631  }
632 
645  static private void addAttributeIfNotAlreadyCaptured(Map<BlackboardAttribute.Type, BlackboardAttribute> attributeMap, ATTRIBUTE_TYPE attrType, String groupName, Matcher matcher) {
646  BlackboardAttribute.Type type = new BlackboardAttribute.Type(attrType);
647  attributeMap.computeIfAbsent(type, t -> {
648  String value = matcher.group(groupName);
649  if (attrType.equals(ATTRIBUTE_TYPE.TSK_CARD_NUMBER)) {
650  attributeMap.put(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_KEYWORD),
651  new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD, MODULE_NAME, value));
652  value = CharMatcher.anyOf(" -").removeFrom(value);
653  }
654  if (StringUtils.isNotBlank(value)) {
655  return new BlackboardAttribute(attrType, MODULE_NAME, value);
656  } else {
657  return null;
658  }
659  });
660  }
661 }

Copyright © 2012-2016 Basis Technology. Generated on: Tue Feb 20 2018
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.