Autopsy  4.5.0
Graphical digital forensics platform for The Sleuth Kit and other tools.
SearchRunner.java
Go to the documentation of this file.
1 /*
2  * Autopsy Forensic Browser
3  *
4  * Copyright 2014 - 2017 Basis Technology Corp.
5  * Contact: carrier <at> sleuthkit <dot> org
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  * http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19 package org.sleuthkit.autopsy.keywordsearch;
20 
21 import com.google.common.util.concurrent.ThreadFactoryBuilder;
22 import java.util.ArrayList;
23 import java.util.Collections;
24 import java.util.HashMap;
25 import java.util.HashSet;
26 import java.util.Iterator;
27 import java.util.List;
28 import java.util.Map;
29 import java.util.Map.Entry;
30 import java.util.Set;
31 import java.util.concurrent.CancellationException;
32 import java.util.concurrent.ConcurrentHashMap;
33 import java.util.concurrent.ExecutionException;
34 import java.util.concurrent.Future;
35 import java.util.concurrent.ScheduledThreadPoolExecutor;
36 import static java.util.concurrent.TimeUnit.MILLISECONDS;
37 import java.util.concurrent.atomic.AtomicLong;
38 import java.util.logging.Level;
39 import javax.swing.SwingUtilities;
40 import javax.swing.SwingWorker;
41 import org.netbeans.api.progress.aggregate.AggregateProgressFactory;
42 import org.netbeans.api.progress.aggregate.AggregateProgressHandle;
43 import org.netbeans.api.progress.aggregate.ProgressContributor;
44 import org.openide.util.Cancellable;
45 import org.openide.util.NbBundle;
46 import org.openide.util.NbBundle.Messages;
53 
58 final class SearchRunner {
59 
60  private static final Logger logger = Logger.getLogger(SearchRunner.class.getName());
61  private static SearchRunner instance = null;
62  private IngestServices services = IngestServices.getInstance();
63  private Ingester ingester = null;
64  private long currentUpdateIntervalMs;
65  private volatile boolean periodicSearchTaskRunning = false;
66  private Future<?> jobProcessingTaskFuture;
67  private final ScheduledThreadPoolExecutor jobProcessingExecutor;
68  private static final int NUM_SEARCH_SCHEDULING_THREADS = 1;
69  private static final String SEARCH_SCHEDULER_THREAD_NAME = "periodic-search-scheduler-%d";
70 
71  // maps a jobID to the search
72  private Map<Long, SearchJobInfo> jobs = new ConcurrentHashMap<>();
73 
74  SearchRunner() {
75  currentUpdateIntervalMs = ((long) KeywordSearchSettings.getUpdateFrequency().getTime()) * 60 * 1000;
76  ingester = Ingester.getDefault();
77  jobProcessingExecutor = new ScheduledThreadPoolExecutor(NUM_SEARCH_SCHEDULING_THREADS, new ThreadFactoryBuilder().setNameFormat(SEARCH_SCHEDULER_THREAD_NAME).build());
78  }
79 
84  public static synchronized SearchRunner getInstance() {
85  if (instance == null) {
86  instance = new SearchRunner();
87  }
88  return instance;
89  }
90 
96  public synchronized void startJob(IngestJobContext jobContext, List<String> keywordListNames) {
97  long jobId = jobContext.getJobId();
98  if (jobs.containsKey(jobId) == false) {
99  logger.log(Level.INFO, "Adding job {0}", jobId); //NON-NLS
100  SearchJobInfo jobData = new SearchJobInfo(jobContext, keywordListNames);
101  jobs.put(jobId, jobData);
102  }
103 
104  // keep track of how many threads / module instances from this job have asked for this
105  jobs.get(jobId).incrementModuleReferenceCount();
106 
107  // start the timer, if needed
108  if ((jobs.size() > 0) && (periodicSearchTaskRunning == false)) {
109  // reset the default periodic search frequency to the user setting
110  logger.log(Level.INFO, "Resetting periodic search time out to default value"); //NON-NLS
111  currentUpdateIntervalMs = ((long) KeywordSearchSettings.getUpdateFrequency().getTime()) * 60 * 1000;
112  jobProcessingTaskFuture = jobProcessingExecutor.schedule(new PeriodicSearchTask(), currentUpdateIntervalMs, MILLISECONDS);
113  periodicSearchTaskRunning = true;
114  }
115  }
116 
123  public synchronized void endJob(long jobId) {
124  SearchJobInfo job;
125  boolean readyForFinalSearch = false;
126  job = jobs.get(jobId);
127  if (job == null) {
128  return;
129  }
130 
131  // Only do final search if this is the last module/thread in this job to call endJob()
132  if (job.decrementModuleReferenceCount() == 0) {
133  jobs.remove(jobId);
134  readyForFinalSearch = true;
135  }
136 
137  if (readyForFinalSearch) {
138  logger.log(Level.INFO, "Commiting search index before final search for search job {0}", job.getJobId()); //NON-NLS
139  commit();
140  doFinalSearch(job); //this will block until it's done
141 
142  // new jobs could have been added while we were doing final search
143  if (jobs.isEmpty()) {
144  // no more jobs left. stop the PeriodicSearchTask.
145  // A new one will be created for future jobs.
146  logger.log(Level.INFO, "No more search jobs. Stopping periodic search task"); //NON-NLS
147  periodicSearchTaskRunning = false;
148  jobProcessingTaskFuture.cancel(true);
149  }
150  }
151  }
152 
159  public synchronized void stopJob(long jobId) {
160  logger.log(Level.INFO, "Stopping search job {0}", jobId); //NON-NLS
161  commit();
162 
163  SearchJobInfo job;
164  job = jobs.get(jobId);
165  if (job == null) {
166  return;
167  }
168 
169  //stop currentSearcher
170  SearchRunner.Searcher currentSearcher = job.getCurrentSearcher();
171  if ((currentSearcher != null) && (!currentSearcher.isDone())) {
172  logger.log(Level.INFO, "Cancelling search job {0}", jobId); //NON-NLS
173  currentSearcher.cancel(true);
174  }
175 
176  jobs.remove(jobId);
177 
178  if (jobs.isEmpty()) {
179  // no more jobs left. stop the PeriodicSearchTask.
180  // A new one will be created for future jobs.
181  logger.log(Level.INFO, "No more search jobs. Stopping periodic search task"); //NON-NLS
182  periodicSearchTaskRunning = false;
183  jobProcessingTaskFuture.cancel(true);
184  }
185  }
186 
193  public synchronized void addKeywordListsToAllJobs(List<String> keywordListNames) {
194  for (String listName : keywordListNames) {
195  logger.log(Level.INFO, "Adding keyword list {0} to all jobs", listName); //NON-NLS
196  for (SearchJobInfo j : jobs.values()) {
197  j.addKeywordListName(listName);
198  }
199  }
200  }
201 
205  private void commit() {
206  ingester.commit();
207 
208  // Signal a potential change in number of text_ingested files
209  try {
210  final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles();
211  KeywordSearch.fireNumIndexedFilesChange(null, numIndexedFiles);
212  } catch (NoOpenCoreException | KeywordSearchModuleException ex) {
213  logger.log(Level.SEVERE, "Error executing Solr query to check number of indexed files", ex); //NON-NLS
214  }
215  }
216 
223  private void doFinalSearch(SearchJobInfo job) {
224  // Run one last search as there are probably some new files committed
225  logger.log(Level.INFO, "Starting final search for search job {0}", job.getJobId()); //NON-NLS
226  if (!job.getKeywordListNames().isEmpty()) {
227  try {
228  // In case this job still has a worker running, wait for it to finish
229  logger.log(Level.INFO, "Checking for previous search for search job {0} before executing final search", job.getJobId()); //NON-NLS
230  job.waitForCurrentWorker();
231 
232  SearchRunner.Searcher finalSearcher = new SearchRunner.Searcher(job, true);
233  job.setCurrentSearcher(finalSearcher); //save the ref
234  logger.log(Level.INFO, "Kicking off final search for search job {0}", job.getJobId()); //NON-NLS
235  finalSearcher.execute(); //start thread
236 
237  // block until the search is complete
238  logger.log(Level.INFO, "Waiting for final search for search job {0}", job.getJobId()); //NON-NLS
239  finalSearcher.get();
240  logger.log(Level.INFO, "Final search for search job {0} completed", job.getJobId()); //NON-NLS
241 
242  } catch (InterruptedException | CancellationException ex) {
243  logger.log(Level.INFO, "Final search for search job {0} interrupted or cancelled", job.getJobId()); //NON-NLS
244  } catch (ExecutionException ex) {
245  logger.log(Level.SEVERE, String.format("Final search for search job %d failed", job.getJobId()), ex); //NON-NLS
246  }
247  }
248  }
249 
253  private final class PeriodicSearchTask implements Runnable {
254 
255  private final Logger logger = Logger.getLogger(SearchRunner.PeriodicSearchTask.class.getName());
256 
257  @Override
258  public void run() {
259  // If no jobs then cancel the task. If more job(s) come along, a new task will start up.
260  if (jobs.isEmpty() || jobProcessingTaskFuture.isCancelled()) {
261  logger.log(Level.INFO, "Exiting periodic search task"); //NON-NLS
262  periodicSearchTaskRunning = false;
263  return;
264  }
265 
266  commit();
267 
268  logger.log(Level.INFO, "Starting periodic searches");
269  final StopWatch stopWatch = new StopWatch();
270  stopWatch.start();
271  // NOTE: contents of "jobs" ConcurrentHashMap can be modified in stopJob() and endJob() while we are inside this loop
272  for (Iterator<Entry<Long, SearchJobInfo>> iterator = jobs.entrySet().iterator(); iterator.hasNext();) {
273  SearchJobInfo job = iterator.next().getValue();
274 
275  if (jobProcessingTaskFuture.isCancelled()) {
276  logger.log(Level.INFO, "Search has been cancelled. Exiting periodic search task."); //NON-NLS
277  periodicSearchTaskRunning = false;
278  return;
279  }
280 
281  // If no lists or the worker is already running then skip it
282  if (!job.getKeywordListNames().isEmpty() && !job.isWorkerRunning()) {
283  // Spawn a search thread for each job
284  logger.log(Level.INFO, "Executing periodic search for search job {0}", job.getJobId());
285  Searcher searcher = new Searcher(job); // SwingWorker
286  job.setCurrentSearcher(searcher); //save the ref
287  searcher.execute(); //start thread
288  job.setWorkerRunning(true);
289 
290  try {
291  // wait for the searcher to finish
292  searcher.get();
293  } catch (InterruptedException | ExecutionException ex) {
294  logger.log(Level.SEVERE, "Error performing keyword search: {0}", ex.getMessage()); //NON-NLS
295  services.postMessage(IngestMessage.createErrorMessage(KeywordSearchModuleFactory.getModuleName(),
296  NbBundle.getMessage(this.getClass(),
297  "SearchRunner.Searcher.done.err.msg"), ex.getMessage()));
298  }// catch and ignore if we were cancelled
299  catch (java.util.concurrent.CancellationException ex) {
300  }
301  }
302  }
303  stopWatch.stop();
304  logger.log(Level.INFO, "All periodic searches cumulatively took {0} secs", stopWatch.getElapsedTimeSecs()); //NON-NLS
305 
306  // calculate "hold off" time
307  recalculateUpdateIntervalTime(stopWatch.getElapsedTimeSecs()); // ELDEBUG
308 
309  // schedule next PeriodicSearchTask
310  jobProcessingTaskFuture = jobProcessingExecutor.schedule(new PeriodicSearchTask(), currentUpdateIntervalMs, MILLISECONDS);
311 
312  // exit this thread
313  return;
314  }
315 
316 
317  private void recalculateUpdateIntervalTime(long lastSerchTimeSec) {
318  // If periodic search takes more than 1/4 of the current periodic search interval, then double the search interval
319  if (lastSerchTimeSec * 1000 < currentUpdateIntervalMs / 4) {
320  return;
321  }
322  // double the search interval
323  currentUpdateIntervalMs = currentUpdateIntervalMs * 2;
324  logger.log(Level.WARNING, "Last periodic search took {0} sec. Increasing search interval to {1} sec", new Object[]{lastSerchTimeSec, currentUpdateIntervalMs/1000});
325  return;
326  }
327  }
328 
333  private class SearchJobInfo {
334 
336  private final long jobId;
337  private final long dataSourceId;
338  // mutable state:
339  private volatile boolean workerRunning;
340  private List<String> keywordListNames; //guarded by SearchJobInfo.this
341 
342  // Map of keyword to the object ids that contain a hit
343  private Map<Keyword, Set<Long>> currentResults; //guarded by SearchJobInfo.this
344  private SearchRunner.Searcher currentSearcher;
345  private AtomicLong moduleReferenceCount = new AtomicLong(0);
346  private final Object finalSearchLock = new Object(); //used for a condition wait
347 
348  private SearchJobInfo(IngestJobContext jobContext, List<String> keywordListNames) {
349  this.jobContext = jobContext;
350  this.jobId = jobContext.getJobId();
351  this.dataSourceId = jobContext.getDataSource().getId();
352  this.keywordListNames = new ArrayList<>(keywordListNames);
353  currentResults = new HashMap<>();
354  workerRunning = false;
355  currentSearcher = null;
356  }
357 
359  return jobContext;
360  }
361 
362  private long getJobId() {
363  return jobId;
364  }
365 
366  private long getDataSourceId() {
367  return dataSourceId;
368  }
369 
370  private synchronized List<String> getKeywordListNames() {
371  return new ArrayList<>(keywordListNames);
372  }
373 
374  private synchronized void addKeywordListName(String keywordListName) {
375  if (!keywordListNames.contains(keywordListName)) {
376  keywordListNames.add(keywordListName);
377  }
378  }
379 
380  private synchronized Set<Long> currentKeywordResults(Keyword k) {
381  return currentResults.get(k);
382  }
383 
384  private synchronized void addKeywordResults(Keyword k, Set<Long> resultsIDs) {
385  currentResults.put(k, resultsIDs);
386  }
387 
388  private boolean isWorkerRunning() {
389  return workerRunning;
390  }
391 
392  private void setWorkerRunning(boolean flag) {
393  workerRunning = flag;
394  }
395 
396  private synchronized SearchRunner.Searcher getCurrentSearcher() {
397  return currentSearcher;
398  }
399 
400  private synchronized void setCurrentSearcher(SearchRunner.Searcher searchRunner) {
401  currentSearcher = searchRunner;
402  }
403 
405  moduleReferenceCount.incrementAndGet();
406  }
407 
409  return moduleReferenceCount.decrementAndGet();
410  }
411 
417  private void waitForCurrentWorker() throws InterruptedException {
418  synchronized (finalSearchLock) {
419  while (workerRunning) {
420  logger.log(Level.INFO, "Waiting for previous worker to finish"); //NON-NLS
421  finalSearchLock.wait(); //wait() releases the lock
422  logger.log(Level.INFO, "Notified previous worker finished"); //NON-NLS
423  }
424  }
425  }
426 
430  private void searchNotify() {
431  synchronized (finalSearchLock) {
432  logger.log(Level.INFO, "Notifying after finishing search"); //NON-NLS
433  workerRunning = false;
434  finalSearchLock.notify();
435  }
436  }
437  }
438 
445  private final class Searcher extends SwingWorker<Object, Void> {
446 
451  private List<Keyword> keywords; //keywords to search
452  private List<String> keywordListNames; // lists currently being searched
453  private List<KeywordList> keywordLists;
454  private Map<Keyword, KeywordList> keywordToList; //keyword to list name mapping
455  private AggregateProgressHandle progressGroup;
456  private final Logger logger = Logger.getLogger(SearchRunner.Searcher.class.getName());
457  private boolean finalRun = false;
458 
459  Searcher(SearchJobInfo job) {
460  this.job = job;
461  keywordListNames = job.getKeywordListNames();
462  keywords = new ArrayList<>();
463  keywordToList = new HashMap<>();
464  keywordLists = new ArrayList<>();
465  //keywords are populated as searcher runs
466  }
467 
468  Searcher(SearchJobInfo job, boolean finalRun) {
469  this(job);
470  this.finalRun = finalRun;
471  }
472 
473  @Override
474  @Messages("SearchRunner.query.exception.msg=Error performing query:")
475  protected Object doInBackground() throws Exception {
476  final String displayName = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.displayName")
477  + (finalRun ? (" - " + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.finalizeMsg")) : "");
478  final String pgDisplayName = displayName + (" (" + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.pendingMsg") + ")");
479  progressGroup = AggregateProgressFactory.createSystemHandle(pgDisplayName, null, new Cancellable() {
480  @Override
481  public boolean cancel() {
482  logger.log(Level.INFO, "Cancelling the searcher by user."); //NON-NLS
483  if (progressGroup != null) {
484  progressGroup.setDisplayName(displayName + " " + NbBundle.getMessage(this.getClass(), "SearchRunner.doInBackGround.cancelMsg"));
485  }
486  return SearchRunner.Searcher.this.cancel(true);
487  }
488  }, null);
489 
490  updateKeywords();
491 
492  ProgressContributor[] subProgresses = new ProgressContributor[keywords.size()];
493  int i = 0;
494  for (Keyword keywordQuery : keywords) {
495  subProgresses[i] = AggregateProgressFactory.createProgressContributor(keywordQuery.getSearchTerm());
496  progressGroup.addContributor(subProgresses[i]);
497  i++;
498  }
499 
500  progressGroup.start();
501 
502  final StopWatch stopWatch = new StopWatch();
503  stopWatch.start();
504  try {
505  progressGroup.setDisplayName(displayName);
506 
507  int keywordsSearched = 0;
508 
509  for (Keyword keyword : keywords) {
510  if (this.isCancelled() || this.job.getJobContext().fileIngestIsCancelled()) {
511  logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: {0}", keyword.getSearchTerm()); //NON-NLS
512  return null;
513  }
514 
515  final KeywordList keywordList = keywordToList.get(keyword);
516 
517  //new subProgress will be active after the initial query
518  //when we know number of hits to start() with
519  if (keywordsSearched > 0) {
520  subProgresses[keywordsSearched - 1].finish();
521  }
522 
523  KeywordSearchQuery keywordSearchQuery = KeywordSearchUtil.getQueryForKeyword(keyword, keywordList);
524 
525  // Filtering
526  //limit search to currently ingested data sources
527  //set up a filter with 1 or more image ids OR'ed
528  final KeywordQueryFilter dataSourceFilter = new KeywordQueryFilter(KeywordQueryFilter.FilterType.DATA_SOURCE, job.getDataSourceId());
529  keywordSearchQuery.addFilter(dataSourceFilter);
530 
531  QueryResults queryResults;
532 
533  // Do the actual search
534  try {
535  queryResults = keywordSearchQuery.performQuery();
537  logger.log(Level.SEVERE, "Error performing query: " + keyword.getSearchTerm(), ex); //NON-NLS
538  MessageNotifyUtil.Notify.error(Bundle.SearchRunner_query_exception_msg() + keyword.getSearchTerm(), ex.getCause().getMessage());
539  //no reason to continue with next query if recovery failed
540  //or wait for recovery to kick in and run again later
541  //likely case has closed and threads are being interrupted
542  return null;
543  } catch (CancellationException e) {
544  logger.log(Level.INFO, "Cancel detected, bailing during keyword query: {0}", keyword.getSearchTerm()); //NON-NLS
545  return null;
546  }
547 
548  // Reduce the results of the query to only those hits we
549  // have not already seen.
550  QueryResults newResults = filterResults(queryResults);
551 
552  if (!newResults.getKeywords().isEmpty()) {
553 
554  // Write results to BB
555  //scale progress bar more more granular, per result sub-progress, within per keyword
556  int totalUnits = newResults.getKeywords().size();
557  subProgresses[keywordsSearched].start(totalUnits);
558  int unitProgress = 0;
559  String queryDisplayStr = keyword.getSearchTerm();
560  if (queryDisplayStr.length() > 50) {
561  queryDisplayStr = queryDisplayStr.substring(0, 49) + "...";
562  }
563  subProgresses[keywordsSearched].progress(keywordList.getName() + ": " + queryDisplayStr, unitProgress);
564 
565  // Create blackboard artifacts
566  newResults.process(null, subProgresses[keywordsSearched], this, keywordList.getIngestMessages());
567 
568  } //if has results
569 
570  //reset the status text before it goes away
571  subProgresses[keywordsSearched].progress("");
572 
573  ++keywordsSearched;
574 
575  } //for each keyword
576 
577  } //end try block
578  catch (Exception ex) {
579  logger.log(Level.WARNING, "searcher exception occurred", ex); //NON-NLS
580  } finally {
581  try {
583  stopWatch.stop();
584  logger.log(Level.INFO, "Searcher took {0} secs to run (final = {1})", new Object[]{stopWatch.getElapsedTimeSecs(), this.finalRun}); //NON-NLS
585  } finally {
586  // In case a thread is waiting on this worker to be done
587  job.searchNotify();
588  }
589  }
590 
591  return null;
592  }
593 
597  private void updateKeywords() {
598  XmlKeywordSearchList loader = XmlKeywordSearchList.getCurrent();
599 
600  keywords.clear();
601  keywordToList.clear();
602  keywordLists.clear();
603 
604  for (String name : keywordListNames) {
605  KeywordList list = loader.getList(name);
606  keywordLists.add(list);
607  for (Keyword k : list.getKeywords()) {
608  keywords.add(k);
609  keywordToList.put(k, list);
610  }
611  }
612  }
613 
619  private void finalizeSearcher() {
620  SwingUtilities.invokeLater(new Runnable() {
621  @Override
622  public void run() {
623  progressGroup.finish();
624  }
625  });
626  }
627 
643  private QueryResults filterResults(QueryResults queryResult) {
644 
645  // Create a new (empty) QueryResults object to hold the most recently
646  // found hits.
647  QueryResults newResults = new QueryResults(queryResult.getQuery());
648 
649  // For each keyword represented in the results.
650  for (Keyword keyword : queryResult.getKeywords()) {
651  // These are all of the hits across all objects for the most recent search.
652  // This may well include duplicates of hits we've seen in earlier periodic searches.
653  List<KeywordHit> queryTermResults = queryResult.getResults(keyword);
654 
655  // Sort the hits for this keyword so that we are always
656  // guaranteed to return the hit for the lowest chunk.
657  Collections.sort(queryTermResults);
658 
659  // This will be used to build up the hits we haven't seen before
660  // for this keyword.
661  List<KeywordHit> newUniqueHits = new ArrayList<>();
662 
663  // Get the set of object ids seen in the past by this searcher
664  // for the given keyword.
665  Set<Long> curTermResults = job.currentKeywordResults(keyword);
666  if (curTermResults == null) {
667  // We create a new empty set if we haven't seen results for
668  // this keyword before.
669  curTermResults = new HashSet<>();
670  }
671 
672  // For each hit for this keyword.
673  for (KeywordHit hit : queryTermResults) {
674  if (curTermResults.contains(hit.getSolrObjectId())) {
675  // Skip the hit if we've already seen a hit for
676  // this keyword in the object.
677  continue;
678  }
679 
680  // We haven't seen the hit before so add it to list of new
681  // unique hits.
682  newUniqueHits.add(hit);
683 
684  // Add the object id to the results we've seen for this
685  // keyword.
686  curTermResults.add(hit.getSolrObjectId());
687  }
688 
689  // Update the job with the list of objects for which we have
690  // seen hits for the current keyword.
691  job.addKeywordResults(keyword, curTermResults);
692 
693  // Add the new hits for the current keyword into the results
694  // to be returned.
695  newResults.addResult(keyword, newUniqueHits);
696  }
697 
698  return newResults;
699  }
700  }
701 }
SearchJobInfo(IngestJobContext jobContext, List< String > keywordListNames)
static IngestMessage createErrorMessage(String source, String subject, String detailsHtml)
synchronized void addKeywordListName(String keywordListName)
Logger getLogger(String moduleDisplayName)
synchronized void setCurrentSearcher(SearchRunner.Searcher searchRunner)
static void error(String title, String message)
synchronized static Logger getLogger(String name)
Definition: Logger.java:124
QueryResults filterResults(QueryResults queryResult)
synchronized void addKeywordResults(Keyword k, Set< Long > resultsIDs)

Copyright © 2012-2016 Basis Technology. Generated on: Tue Feb 20 2018
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.