Autopsy  4.19.1
Graphical digital forensics platform for The Sleuth Kit and other tools.
IngestSearchRunner.java
Go to the documentation of this file.
1 /*
2  * Autopsy Forensic Browser
3  *
4  * Copyright 2014 - 2017 Basis Technology Corp.
5  * Contact: carrier <at> sleuthkit <dot> org
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  * http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19 package org.sleuthkit.autopsy.keywordsearch;
20 
21 import com.google.common.util.concurrent.ThreadFactoryBuilder;
22 import java.util.ArrayList;
23 import java.util.Collections;
24 import java.util.HashMap;
25 import java.util.HashSet;
26 import java.util.Iterator;
27 import java.util.List;
28 import java.util.Map;
29 import java.util.Map.Entry;
30 import java.util.Set;
31 import java.util.concurrent.CancellationException;
32 import java.util.concurrent.ConcurrentHashMap;
33 import java.util.concurrent.ExecutionException;
34 import java.util.concurrent.Future;
35 import java.util.concurrent.ScheduledThreadPoolExecutor;
36 import static java.util.concurrent.TimeUnit.MILLISECONDS;
37 import java.util.concurrent.atomic.AtomicLong;
38 import java.util.logging.Level;
39 import javax.swing.SwingUtilities;
40 import javax.swing.SwingWorker;
41 import org.netbeans.api.progress.aggregate.AggregateProgressFactory;
42 import org.netbeans.api.progress.aggregate.AggregateProgressHandle;
43 import org.netbeans.api.progress.aggregate.ProgressContributor;
44 import org.openide.util.Cancellable;
45 import org.openide.util.NbBundle;
46 import org.openide.util.NbBundle.Messages;
53 
58 final class IngestSearchRunner {
59 
60  private static final Logger logger = Logger.getLogger(IngestSearchRunner.class.getName());
61  private static IngestSearchRunner instance = null;
62  private IngestServices services = IngestServices.getInstance();
63  private Ingester ingester = null;
64  private long currentUpdateIntervalMs;
65  private volatile boolean periodicSearchTaskRunning = false;
66  private Future<?> jobProcessingTaskFuture;
67  private final ScheduledThreadPoolExecutor jobProcessingExecutor;
68  private static final int NUM_SEARCH_SCHEDULING_THREADS = 1;
69  private static final String SEARCH_SCHEDULER_THREAD_NAME = "periodic-search-scheduler-%d";
70 
71  // maps a jobID to the search
72  private Map<Long, SearchJobInfo> jobs = new ConcurrentHashMap<>();
73 
74  IngestSearchRunner() {
75  currentUpdateIntervalMs = ((long) KeywordSearchSettings.getUpdateFrequency().getTime()) * 60 * 1000;
76  ingester = Ingester.getDefault();
77  jobProcessingExecutor = new ScheduledThreadPoolExecutor(NUM_SEARCH_SCHEDULING_THREADS, new ThreadFactoryBuilder().setNameFormat(SEARCH_SCHEDULER_THREAD_NAME).build());
78  }
79 
84  public static synchronized IngestSearchRunner getInstance() {
85  if (instance == null) {
86  instance = new IngestSearchRunner();
87  }
88  return instance;
89  }
90 
96  public synchronized void startJob(IngestJobContext jobContext, List<String> keywordListNames) {
97  long jobId = jobContext.getJobId();
98  if (jobs.containsKey(jobId) == false) {
99  logger.log(Level.INFO, "Adding job {0}", jobId); //NON-NLS
100  SearchJobInfo jobData = new SearchJobInfo(jobContext, keywordListNames);
101  jobs.put(jobId, jobData);
102  }
103 
104  // keep track of how many threads / module instances from this job have asked for this
105  jobs.get(jobId).incrementModuleReferenceCount();
106 
107  // start the timer, if needed
108  if ((jobs.size() > 0) && (periodicSearchTaskRunning == false)) {
109  // reset the default periodic search frequency to the user setting
110  logger.log(Level.INFO, "Resetting periodic search time out to default value"); //NON-NLS
111  currentUpdateIntervalMs = ((long) KeywordSearchSettings.getUpdateFrequency().getTime()) * 60 * 1000;
112  jobProcessingTaskFuture = jobProcessingExecutor.schedule(new PeriodicSearchTask(), currentUpdateIntervalMs, MILLISECONDS);
113  periodicSearchTaskRunning = true;
114  }
115  }
116 
123  public synchronized void endJob(long jobId) {
124  SearchJobInfo job;
125  boolean readyForFinalSearch = false;
126  job = jobs.get(jobId);
127  if (job == null) {
128  return;
129  }
130 
131  // Only do final search if this is the last module/thread in this job to call endJob()
132  if (job.decrementModuleReferenceCount() == 0) {
133  jobs.remove(jobId);
134  readyForFinalSearch = true;
135  }
136 
137  if (readyForFinalSearch) {
138  logger.log(Level.INFO, "Commiting search index before final search for search job {0}", job.getJobId()); //NON-NLS
139  commit();
140  doFinalSearch(job); //this will block until it's done
141 
142  // new jobs could have been added while we were doing final search
143  if (jobs.isEmpty()) {
144  // no more jobs left. stop the PeriodicSearchTask.
145  // A new one will be created for future jobs.
146  logger.log(Level.INFO, "No more search jobs. Stopping periodic search task"); //NON-NLS
147  periodicSearchTaskRunning = false;
148  jobProcessingTaskFuture.cancel(true);
149  }
150  }
151  }
152 
159  public synchronized void stopJob(long jobId) {
160  logger.log(Level.INFO, "Stopping search job {0}", jobId); //NON-NLS
161  commit();
162 
163  SearchJobInfo job;
164  job = jobs.get(jobId);
165  if (job == null) {
166  return;
167  }
168 
169  //stop currentSearcher
170  IngestSearchRunner.Searcher currentSearcher = job.getCurrentSearcher();
171  if ((currentSearcher != null) && (!currentSearcher.isDone())) {
172  logger.log(Level.INFO, "Cancelling search job {0}", jobId); //NON-NLS
173  currentSearcher.cancel(true);
174  }
175 
176  jobs.remove(jobId);
177 
178  if (jobs.isEmpty()) {
179  // no more jobs left. stop the PeriodicSearchTask.
180  // A new one will be created for future jobs.
181  logger.log(Level.INFO, "No more search jobs. Stopping periodic search task"); //NON-NLS
182  periodicSearchTaskRunning = false;
183  jobProcessingTaskFuture.cancel(true);
184  }
185  }
186 
193  public synchronized void addKeywordListsToAllJobs(List<String> keywordListNames) {
194  for (String listName : keywordListNames) {
195  logger.log(Level.INFO, "Adding keyword list {0} to all jobs", listName); //NON-NLS
196  for (SearchJobInfo j : jobs.values()) {
197  j.addKeywordListName(listName);
198  }
199  }
200  }
201 
205  private void commit() {
206  ingester.commit();
207 
208  // Signal a potential change in number of text_ingested files
209  try {
210  final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles();
211  KeywordSearch.fireNumIndexedFilesChange(null, numIndexedFiles);
212  } catch (NoOpenCoreException | KeywordSearchModuleException ex) {
213  logger.log(Level.SEVERE, "Error executing Solr query to check number of indexed files", ex); //NON-NLS
214  }
215  }
216 
223  private void doFinalSearch(SearchJobInfo job) {
224  // Run one last search as there are probably some new files committed
225  logger.log(Level.INFO, "Starting final search for search job {0}", job.getJobId()); //NON-NLS
226  if (!job.getKeywordListNames().isEmpty()) {
227  try {
228  // In case this job still has a worker running, wait for it to finish
229  logger.log(Level.INFO, "Checking for previous search for search job {0} before executing final search", job.getJobId()); //NON-NLS
230  job.waitForCurrentWorker();
231 
232  IngestSearchRunner.Searcher finalSearcher = new IngestSearchRunner.Searcher(job, true);
233  job.setCurrentSearcher(finalSearcher); //save the ref
234  logger.log(Level.INFO, "Kicking off final search for search job {0}", job.getJobId()); //NON-NLS
235  finalSearcher.execute(); //start thread
236 
237  // block until the search is complete
238  logger.log(Level.INFO, "Waiting for final search for search job {0}", job.getJobId()); //NON-NLS
239  finalSearcher.get();
240  logger.log(Level.INFO, "Final search for search job {0} completed", job.getJobId()); //NON-NLS
241 
242  } catch (InterruptedException | CancellationException ex) {
243  logger.log(Level.INFO, "Final search for search job {0} interrupted or cancelled", job.getJobId()); //NON-NLS
244  } catch (ExecutionException ex) {
245  logger.log(Level.SEVERE, String.format("Final search for search job %d failed", job.getJobId()), ex); //NON-NLS
246  }
247  }
248  }
249 
253  private final class PeriodicSearchTask implements Runnable {
254 
255  private final Logger logger = Logger.getLogger(IngestSearchRunner.PeriodicSearchTask.class.getName());
256 
257  @Override
258  public void run() {
259  // If no jobs then cancel the task. If more job(s) come along, a new task will start up.
260  if (jobs.isEmpty() || jobProcessingTaskFuture.isCancelled()) {
261  logger.log(Level.INFO, "Exiting periodic search task"); //NON-NLS
262  periodicSearchTaskRunning = false;
263  return;
264  }
265 
266  commit();
267 
268  logger.log(Level.INFO, "Starting periodic searches");
269  final StopWatch stopWatch = new StopWatch();
270  stopWatch.start();
271  // NOTE: contents of "jobs" ConcurrentHashMap can be modified in stopJob() and endJob() while we are inside this loop
272  for (Iterator<Entry<Long, SearchJobInfo>> iterator = jobs.entrySet().iterator(); iterator.hasNext();) {
273  SearchJobInfo job = iterator.next().getValue();
274 
275  if (jobProcessingTaskFuture.isCancelled()) {
276  logger.log(Level.INFO, "Search has been cancelled. Exiting periodic search task."); //NON-NLS
277  periodicSearchTaskRunning = false;
278  return;
279  }
280 
281  // If no lists or the worker is already running then skip it
282  if (!job.getKeywordListNames().isEmpty() && !job.isWorkerRunning()) {
283  // Spawn a search thread for each job
284  logger.log(Level.INFO, "Executing periodic search for search job {0}", job.getJobId());
285  Searcher searcher = new Searcher(job); // SwingWorker
286  job.setCurrentSearcher(searcher); //save the ref
287  searcher.execute(); //start thread
288  job.setWorkerRunning(true);
289 
290  try {
291  // wait for the searcher to finish
292  searcher.get();
293  } catch (InterruptedException | ExecutionException ex) {
294  logger.log(Level.SEVERE, "Error performing keyword search: {0}", ex.getMessage()); //NON-NLS
295  services.postMessage(IngestMessage.createErrorMessage(KeywordSearchModuleFactory.getModuleName(),
296  NbBundle.getMessage(this.getClass(),
297  "SearchRunner.Searcher.done.err.msg"), ex.getMessage()));
298  }// catch and ignore if we were cancelled
299  catch (java.util.concurrent.CancellationException ex) {
300  }
301  }
302  }
303  stopWatch.stop();
304  logger.log(Level.INFO, "All periodic searches cumulatively took {0} secs", stopWatch.getElapsedTimeSecs()); //NON-NLS
305 
306  // calculate "hold off" time
307  recalculateUpdateIntervalTime(stopWatch.getElapsedTimeSecs()); // ELDEBUG
308 
309  // schedule next PeriodicSearchTask
310  jobProcessingTaskFuture = jobProcessingExecutor.schedule(new PeriodicSearchTask(), currentUpdateIntervalMs, MILLISECONDS);
311 
312  // exit this thread
313  return;
314  }
315 
316 
317  private void recalculateUpdateIntervalTime(long lastSerchTimeSec) {
318  // If periodic search takes more than 1/4 of the current periodic search interval, then double the search interval
319  if (lastSerchTimeSec * 1000 < currentUpdateIntervalMs / 4) {
320  return;
321  }
322  // double the search interval
323  currentUpdateIntervalMs = currentUpdateIntervalMs * 2;
324  logger.log(Level.WARNING, "Last periodic search took {0} sec. Increasing search interval to {1} sec", new Object[]{lastSerchTimeSec, currentUpdateIntervalMs/1000});
325  return;
326  }
327  }
328 
333  private class SearchJobInfo {
334 
336  private final long jobId;
337  private final long dataSourceId;
338  // mutable state:
339  private volatile boolean workerRunning;
340  private List<String> keywordListNames; //guarded by SearchJobInfo.this
341 
342  // Map of keyword to the object ids that contain a hit
343  private Map<Keyword, Set<Long>> currentResults; //guarded by SearchJobInfo.this
344  private IngestSearchRunner.Searcher currentSearcher;
345  private AtomicLong moduleReferenceCount = new AtomicLong(0);
346  private final Object finalSearchLock = new Object(); //used for a condition wait
347 
348  private SearchJobInfo(IngestJobContext jobContext, List<String> keywordListNames) {
349  this.jobContext = jobContext;
350  this.jobId = jobContext.getJobId();
351  this.dataSourceId = jobContext.getDataSource().getId();
352  this.keywordListNames = new ArrayList<>(keywordListNames);
353  currentResults = new HashMap<>();
354  workerRunning = false;
355  currentSearcher = null;
356  }
357 
359  return jobContext;
360  }
361 
362  private long getJobId() {
363  return jobId;
364  }
365 
366  private long getDataSourceId() {
367  return dataSourceId;
368  }
369 
370  private synchronized List<String> getKeywordListNames() {
371  return new ArrayList<>(keywordListNames);
372  }
373 
374  private synchronized void addKeywordListName(String keywordListName) {
375  if (!keywordListNames.contains(keywordListName)) {
376  keywordListNames.add(keywordListName);
377  }
378  }
379 
380  private synchronized Set<Long> currentKeywordResults(Keyword k) {
381  return currentResults.get(k);
382  }
383 
384  private synchronized void addKeywordResults(Keyword k, Set<Long> resultsIDs) {
385  currentResults.put(k, resultsIDs);
386  }
387 
388  private boolean isWorkerRunning() {
389  return workerRunning;
390  }
391 
392  private void setWorkerRunning(boolean flag) {
393  workerRunning = flag;
394  }
395 
396  private synchronized IngestSearchRunner.Searcher getCurrentSearcher() {
397  return currentSearcher;
398  }
399 
400  private synchronized void setCurrentSearcher(IngestSearchRunner.Searcher searchRunner) {
401  currentSearcher = searchRunner;
402  }
403 
405  moduleReferenceCount.incrementAndGet();
406  }
407 
409  return moduleReferenceCount.decrementAndGet();
410  }
411 
417  private void waitForCurrentWorker() throws InterruptedException {
418  synchronized (finalSearchLock) {
419  while (workerRunning) {
420  logger.log(Level.INFO, "Waiting for previous worker to finish"); //NON-NLS
421  finalSearchLock.wait(); //wait() releases the lock
422  logger.log(Level.INFO, "Notified previous worker finished"); //NON-NLS
423  }
424  }
425  }
426 
430  private void searchNotify() {
431  synchronized (finalSearchLock) {
432  logger.log(Level.INFO, "Notifying after finishing search"); //NON-NLS
433  workerRunning = false;
434  finalSearchLock.notify();
435  }
436  }
437  }
438 
445  private final class Searcher extends SwingWorker<Object, Void> {
446 
451  private List<Keyword> keywords; //keywords to search
452  private List<String> keywordListNames; // lists currently being searched
453  private List<KeywordList> keywordLists;
454  private Map<Keyword, KeywordList> keywordToList; //keyword to list name mapping
455  private AggregateProgressHandle progressGroup;
456  private final Logger logger = Logger.getLogger(IngestSearchRunner.Searcher.class.getName());
457  private boolean finalRun = false;
458 
459  Searcher(SearchJobInfo job) {
460  this.job = job;
461  keywordListNames = job.getKeywordListNames();
462  keywords = new ArrayList<>();
463  keywordToList = new HashMap<>();
464  keywordLists = new ArrayList<>();
465  //keywords are populated as searcher runs
466  }
467 
468  Searcher(SearchJobInfo job, boolean finalRun) {
469  this(job);
470  this.finalRun = finalRun;
471  }
472 
473  @Override
474  @Messages("SearchRunner.query.exception.msg=Error performing query:")
475  protected Object doInBackground() throws Exception {
476  final String displayName = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.displayName")
477  + (finalRun ? (" - " + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.finalizeMsg")) : "");
478  final String pgDisplayName = displayName + (" (" + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.pendingMsg") + ")");
479  progressGroup = AggregateProgressFactory.createSystemHandle(pgDisplayName, null, new Cancellable() {
480  @Override
481  public boolean cancel() {
482  logger.log(Level.INFO, "Cancelling the searcher by user."); //NON-NLS
483  if (progressGroup != null) {
484  progressGroup.setDisplayName(displayName + " " + NbBundle.getMessage(this.getClass(), "SearchRunner.doInBackGround.cancelMsg"));
485  }
486  progressGroup.finish();
487  return IngestSearchRunner.Searcher.this.cancel(true);
488  }
489  }, null);
490 
491  updateKeywords();
492 
493  ProgressContributor[] subProgresses = new ProgressContributor[keywords.size()];
494  int i = 0;
495  for (Keyword keywordQuery : keywords) {
496  subProgresses[i] = AggregateProgressFactory.createProgressContributor(keywordQuery.getSearchTerm());
497  progressGroup.addContributor(subProgresses[i]);
498  i++;
499  }
500 
501  progressGroup.start();
502 
503  final StopWatch stopWatch = new StopWatch();
504  stopWatch.start();
505  try {
506  progressGroup.setDisplayName(displayName);
507 
508  int keywordsSearched = 0;
509 
510  for (Keyword keyword : keywords) {
511  if (this.isCancelled() || this.job.getJobContext().fileIngestIsCancelled()) {
512  logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: {0}", keyword.getSearchTerm()); //NON-NLS
513  return null;
514  }
515 
516  final KeywordList keywordList = keywordToList.get(keyword);
517 
518  //new subProgress will be active after the initial query
519  //when we know number of hits to start() with
520  if (keywordsSearched > 0) {
521  subProgresses[keywordsSearched - 1].finish();
522  }
523 
524  KeywordSearchQuery keywordSearchQuery = KeywordSearchUtil.getQueryForKeyword(keyword, keywordList);
525 
526  // Filtering
527  //limit search to currently ingested data sources
528  //set up a filter with 1 or more image ids OR'ed
529  final KeywordQueryFilter dataSourceFilter = new KeywordQueryFilter(KeywordQueryFilter.FilterType.DATA_SOURCE, job.getDataSourceId());
530  keywordSearchQuery.addFilter(dataSourceFilter);
531 
532  QueryResults queryResults;
533 
534  // Do the actual search
535  try {
536  queryResults = keywordSearchQuery.performQuery();
538  logger.log(Level.SEVERE, "Error performing query: " + keyword.getSearchTerm(), ex); //NON-NLS
539  MessageNotifyUtil.Notify.error(Bundle.SearchRunner_query_exception_msg() + keyword.getSearchTerm(), ex.getCause().getMessage());
540  //no reason to continue with next query if recovery failed
541  //or wait for recovery to kick in and run again later
542  //likely case has closed and threads are being interrupted
543  return null;
544  } catch (CancellationException e) {
545  logger.log(Level.INFO, "Cancel detected, bailing during keyword query: {0}", keyword.getSearchTerm()); //NON-NLS
546  return null;
547  }
548 
549  // Reduce the results of the query to only those hits we
550  // have not already seen.
551  QueryResults newResults = filterResults(queryResults);
552 
553  if (!newResults.getKeywords().isEmpty()) {
554 
555  // Write results to BB
556  //scale progress bar more more granular, per result sub-progress, within per keyword
557  int totalUnits = newResults.getKeywords().size();
558  subProgresses[keywordsSearched].start(totalUnits);
559  int unitProgress = 0;
560  String queryDisplayStr = keyword.getSearchTerm();
561  if (queryDisplayStr.length() > 50) {
562  queryDisplayStr = queryDisplayStr.substring(0, 49) + "...";
563  }
564  subProgresses[keywordsSearched].progress(keywordList.getName() + ": " + queryDisplayStr, unitProgress);
565 
566  // Create blackboard artifacts
567  newResults.process(null, subProgresses[keywordsSearched], this, keywordList.getIngestMessages(), true);
568 
569  } //if has results
570 
571  //reset the status text before it goes away
572  subProgresses[keywordsSearched].progress("");
573 
574  ++keywordsSearched;
575 
576  } //for each keyword
577 
578  } //end try block
579  catch (Exception ex) {
580  logger.log(Level.WARNING, "searcher exception occurred", ex); //NON-NLS
581  } finally {
582  try {
584  stopWatch.stop();
585  logger.log(Level.INFO, "Searcher took {0} secs to run (final = {1})", new Object[]{stopWatch.getElapsedTimeSecs(), this.finalRun}); //NON-NLS
586  } finally {
587  // In case a thread is waiting on this worker to be done
588  job.searchNotify();
589  }
590  }
591 
592  return null;
593  }
594 
598  private void updateKeywords() {
599  XmlKeywordSearchList loader = XmlKeywordSearchList.getCurrent();
600 
601  keywords.clear();
602  keywordToList.clear();
603  keywordLists.clear();
604 
605  for (String name : keywordListNames) {
606  KeywordList list = loader.getList(name);
607  keywordLists.add(list);
608  for (Keyword k : list.getKeywords()) {
609  keywords.add(k);
610  keywordToList.put(k, list);
611  }
612  }
613  }
614 
620  private void finalizeSearcher() {
621  SwingUtilities.invokeLater(new Runnable() {
622  @Override
623  public void run() {
624  progressGroup.finish();
625  }
626  });
627  }
628 
644  private QueryResults filterResults(QueryResults queryResult) {
645 
646  // Create a new (empty) QueryResults object to hold the most recently
647  // found hits.
648  QueryResults newResults = new QueryResults(queryResult.getQuery());
649 
650  // For each keyword represented in the results.
651  for (Keyword keyword : queryResult.getKeywords()) {
652  // These are all of the hits across all objects for the most recent search.
653  // This may well include duplicates of hits we've seen in earlier periodic searches.
654  List<KeywordHit> queryTermResults = queryResult.getResults(keyword);
655 
656  // Sort the hits for this keyword so that we are always
657  // guaranteed to return the hit for the lowest chunk.
658  Collections.sort(queryTermResults);
659 
660  // This will be used to build up the hits we haven't seen before
661  // for this keyword.
662  List<KeywordHit> newUniqueHits = new ArrayList<>();
663 
664  // Get the set of object ids seen in the past by this searcher
665  // for the given keyword.
666  Set<Long> curTermResults = job.currentKeywordResults(keyword);
667  if (curTermResults == null) {
668  // We create a new empty set if we haven't seen results for
669  // this keyword before.
670  curTermResults = new HashSet<>();
671  }
672 
673  // For each hit for this keyword.
674  for (KeywordHit hit : queryTermResults) {
675  if (curTermResults.contains(hit.getSolrObjectId())) {
676  // Skip the hit if we've already seen a hit for
677  // this keyword in the object.
678  continue;
679  }
680 
681  // We haven't seen the hit before so add it to list of new
682  // unique hits.
683  newUniqueHits.add(hit);
684 
685  // Add the object id to the results we've seen for this
686  // keyword.
687  curTermResults.add(hit.getSolrObjectId());
688  }
689 
690  // Update the job with the list of objects for which we have
691  // seen hits for the current keyword.
692  job.addKeywordResults(keyword, curTermResults);
693 
694  // Add the new hits for the current keyword into the results
695  // to be returned.
696  newResults.addResult(keyword, newUniqueHits);
697  }
698 
699  return newResults;
700  }
701  }
702 }
SearchJobInfo(IngestJobContext jobContext, List< String > keywordListNames)
static IngestMessage createErrorMessage(String source, String subject, String detailsHtml)
synchronized void setCurrentSearcher(IngestSearchRunner.Searcher searchRunner)
Logger getLogger(String moduleDisplayName)
synchronized void addKeywordResults(Keyword k, Set< Long > resultsIDs)
static void error(String title, String message)
synchronized static Logger getLogger(String name)
Definition: Logger.java:124

Copyright © 2012-2021 Basis Technology. Generated on: Thu Sep 30 2021
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.