19 package org.sleuthkit.autopsy.timeline.db;
 
   21 import com.google.common.cache.CacheBuilder;
 
   22 import com.google.common.cache.CacheLoader;
 
   23 import com.google.common.cache.LoadingCache;
 
   24 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 
   25 import java.util.ArrayList;
 
   26 import java.util.Collection;
 
   27 import java.util.Collections;
 
   28 import java.util.EnumMap;
 
   29 import java.util.List;
 
   31 import static java.util.Objects.isNull;
 
   33 import java.util.concurrent.CancellationException;
 
   34 import java.util.concurrent.ExecutionException;
 
   35 import java.util.concurrent.Executor;
 
   36 import java.util.concurrent.Executors;
 
   37 import java.util.concurrent.TimeUnit;
 
   38 import java.util.function.Consumer;
 
   39 import java.util.logging.Level;
 
   40 import java.util.stream.Collectors;
 
   41 import javafx.application.Platform;
 
   42 import javafx.beans.property.ReadOnlyBooleanProperty;
 
   43 import javafx.beans.property.ReadOnlyBooleanWrapper;
 
   44 import javafx.beans.property.ReadOnlyObjectProperty;
 
   45 import javafx.collections.FXCollections;
 
   46 import javafx.collections.ObservableList;
 
   47 import javafx.collections.ObservableMap;
 
   48 import javafx.concurrent.Worker;
 
   49 import javax.swing.JOptionPane;
 
   50 import org.apache.commons.lang3.StringUtils;
 
   51 import org.joda.time.Interval;
 
   52 import org.netbeans.api.progress.ProgressHandle;
 
   53 import org.openide.util.NbBundle;
 
   54 import org.openide.windows.WindowManager;
 
  101     private final Executor 
workerExecutor = Executors.newSingleThreadExecutor(
new ThreadFactoryBuilder().setNameFormat(
"eventrepository-worker-%d").build()); 
 
  113     private final ObservableMap<Long, String> 
datasourcesMap = FXCollections.observableHashMap();
 
  114     private final ObservableMap<Long, String> 
hashSetMap = FXCollections.observableHashMap();
 
  115     private final ObservableList<TagName> 
tagNames = FXCollections.observableArrayList();
 
  134         return eventDB.getBoundingEventsInterval(timeRange, filter);
 
  150         idToEventCache = CacheBuilder.newBuilder()
 
  152                 .expireAfterAccess(10, TimeUnit.MINUTES)
 
  153                 .build(CacheLoader.from(eventDB::getEventById));
 
  154         eventCountsCache = CacheBuilder.newBuilder()
 
  156                 .expireAfterAccess(10, TimeUnit.MINUTES)
 
  157                 .build(CacheLoader.from(eventDB::countEventsByType));
 
  158         eventStripeCache = CacheBuilder.newBuilder()
 
  160                 .expireAfterAccess(10, TimeUnit.MINUTES
 
  161                 ).build(CacheLoader.from(eventDB::getEventStripes));
 
  162         maxCache = CacheBuilder.newBuilder().build(CacheLoader.from(eventDB::getMaxTime));
 
  163         minCache = CacheBuilder.newBuilder().build(CacheLoader.from(eventDB::getMinTime));
 
  171         return maxCache.getUnchecked(
"max"); 
 
  179         return minCache.getUnchecked(
"min"); 
 
  184         return idToEventCache.getUnchecked(eventID);
 
  187     synchronized public Set<SingleEvent> 
getEventsById(Collection<Long> eventIDs) {
 
  188         return eventIDs.stream()
 
  189                 .map(idToEventCache::getUnchecked)
 
  190                 .collect(Collectors.toSet());
 
  196             return eventStripeCache.get(params);
 
  197         } 
catch (ExecutionException ex) {
 
  198             logger.log(Level.SEVERE, 
"Failed to load Event Stripes from cache for " + params.
toString(), ex); 
 
  199             return Collections.emptyList();
 
  204         return eventCountsCache.getUnchecked(params);
 
  208         return eventDB.countAllEvents();
 
  227         return eventDB.getEventIDsForFile(file, includeDerivedArtifacts);
 
  240         return eventDB.getEventIDsForArtifact(artifact);
 
  244         minCache.invalidateAll();
 
  245         maxCache.invalidateAll();
 
  246         eventCountsCache.invalidateAll();
 
  247         eventStripeCache.invalidateAll();
 
  248         idToEventCache.invalidateAll();
 
  252         return eventDB.getEventIDs(timeRange, filter);
 
  267         return eventDB.getCombinedEvents(timeRange, filter);
 
  275         return eventDB.hasNewColumns();
 
  287         return eventDB.getTagCountsByTagName(eventIDsWithTags);
 
  298         for (Map.Entry<Long, String> hashSet : eventDB.getHashSetNames().entrySet()) {
 
  299             hashSetMap.putIfAbsent(hashSet.getKey(), hashSet.getValue());
 
  302         for (Long 
id : eventDB.getDataSourceIDs()) {
 
  304                 datasourcesMap.putIfAbsent(
id, skCase.getContentById(
id).getDataSource().getName());
 
  305             } 
catch (TskCoreException ex) {
 
  306                 logger.log(Level.SEVERE, 
"Failed to get datasource by ID.", ex); 
 
  312             tagNames.setAll(skCase.getTagNamesInUse());
 
  313         } 
catch (TskCoreException ex) {
 
  314             logger.log(Level.SEVERE, 
"Failed to get tag names in use.", ex); 
 
  319         Set<Long> updatedEventIDs = eventDB.addTag(objID, artifactID, tag, trans);
 
  320         if (!updatedEventIDs.isEmpty()) {
 
  323         return updatedEventIDs;
 
  326     synchronized public Set<Long> 
deleteTag(
long objID, Long artifactID, 
long tagID, 
boolean tagged) {
 
  327         Set<Long> updatedEventIDs = eventDB.deleteTag(objID, artifactID, tagID, tagged);
 
  328         if (!updatedEventIDs.isEmpty()) {
 
  331         return updatedEventIDs;
 
  335         eventCountsCache.invalidateAll();
 
  336         eventStripeCache.invalidateAll();
 
  337         idToEventCache.invalidateAll(updatedEventIDs);
 
  340         } 
catch (TskCoreException ex) {
 
  341             logger.log(Level.SEVERE, 
"Failed to get tag names in use.", ex); 
 
  354         for (TagName t : tagNames) {
 
  358             t.setDisabled(tagNames.contains(t.getTagName()) == 
false);
 
  363         return SQLHelper.getSQLWhere(f1).equals(SQLHelper.getSQLWhere(f2));
 
  411         logger.log(Level.INFO, 
"(re)starting {0} db population task", mode); 
 
  412         if (dbWorker != null) {
 
  416         workerExecutor.execute(dbWorker);
 
  432         private final ReadOnlyBooleanWrapper 
cancellable = 
new ReadOnlyBooleanWrapper(
true);
 
  442             return cancellable.getReadOnlyProperty();
 
  447             Platform.runLater(() -> cancellable.set(
false));
 
  448             return super.requestCancel();
 
  453             super.updateTitle(title);
 
  454             progressHandle.setDisplayName(title);
 
  459             super.updateMessage(message);
 
  460             progressHandle.progress(message);
 
  465             super.updateProgress(workDone, max);
 
  467                 progressHandle.progress((
int) workDone);
 
  473             super.updateProgress(workDone, max);
 
  474             super.updateProgress(workDone, max);
 
  476                 progressHandle.progress((
int) workDone);
 
  483             this.dbPopulationMode = mode;
 
  484             this.stateProperty().addListener(stateObservable -> onStateChange.accept(getState()));
 
  487         void restartProgressHandle(String title, String message, Double workDone, 
double total, Boolean cancellable) {
 
  488             if (progressHandle != null) {
 
  489                 progressHandle.finish();
 
  491             progressHandle = cancellable
 
  492                     ? ProgressHandle.createHandle(title, this::requestCancel)
 
  493                     : ProgressHandle.createHandle(title);
 
  496                 progressHandle.start();
 
  498                 progressHandle.start((
int) total);
 
  505         @SuppressWarnings(
"deprecation") 
 
  507         @NbBundle.Messages({
"progressWindow.msg.refreshingFileTags=Refreshing file tags",
 
  508             "progressWindow.msg.refreshingResultTags=Refreshing result tags",
 
  509             "progressWindow.msg.gatheringData=Gathering event data",
 
  510             "progressWindow.msg.commitingDb=Committing events database"})
 
  511         protected Void 
call() 
throws Exception {
 
  516                 logger.log(Level.INFO, 
"Beginning population of timeline db."); 
 
  517                 restartProgressHandle(Bundle.progressWindow_msg_gatheringData(), 
"", -1D, 1, 
true);
 
  519                 eventDB.reInitializeDB();
 
  521                 List<Long> fileIDs = skCase.findAllFileIdsWhere(
"name != '.' AND name != '..'" + 
 
  522                         " AND type != " + TskData.TSK_DB_FILES_TYPE_ENUM.SLACK.ordinal()); 
 
  523                 final int numFiles = fileIDs.size();
 
  525                 trans = eventDB.beginTransaction();
 
  532                 trans = eventDB.beginTransaction();
 
  533                 logger.log(Level.INFO, 
"dropping old tags"); 
 
  534                 eventDB.reInitializeTags();
 
  537             logger.log(Level.INFO, 
"updating content tags"); 
 
  539             int currentWorkTotal = contentTags.size();
 
  540             restartProgressHandle(Bundle.progressWindow_msg_refreshingFileTags(), 
"", 0D, currentWorkTotal, 
true);
 
  543             logger.log(Level.INFO, 
"updating artifact tags"); 
 
  545             currentWorkTotal = artifactTags.size();
 
  546             restartProgressHandle(Bundle.progressWindow_msg_refreshingResultTags(), 
"", 0D, currentWorkTotal, 
true);
 
  549             logger.log(Level.INFO, 
"committing db"); 
 
  550             Platform.runLater(() -> cancellable.set(
false));
 
  551             restartProgressHandle(Bundle.progressWindow_msg_commitingDb(), 
"", -1D, 1, 
false);
 
  552             eventDB.commitTransaction(trans);
 
  558             progressHandle.finish();
 
  566             for (
int i = 0; i < currentWorkTotal; i++) {
 
  571                 BlackboardArtifactTag artifactTag = artifactTags.get(i);
 
  572                 eventDB.addTag(artifactTag.getContent().getId(), artifactTag.getArtifact().getArtifactID(), artifactTag, trans);
 
  577             for (
int i = 0; i < currentWorkTotal; i++) {
 
  582                 ContentTag contentTag = contentTags.get(i);
 
  583                 eventDB.addTag(contentTag.getContent().getId(), null, contentTag, trans);
 
  601         @NbBundle.Messages(
"progressWindow.msg.populateMacEventsFiles=Populating MAC time events for files")
 
  603             restartProgressHandle(Bundle.progressWindow_msg_populateMacEventsFiles(), 
"", 0D, numFiles, 
true);
 
  604             for (
int i = 0; i < numFiles; i++) {
 
  608                 long fID = fileIDs.get(i);
 
  610                     AbstractFile f = skCase.getAbstractFileById(fID);
 
  613                         logger.log(Level.WARNING, 
"Failed to get data for file : {0}", fID); 
 
  619                 } 
catch (TskCoreException tskCoreException) {
 
  620                     logger.log(Level.SEVERE, 
"Failed to insert MAC time events for file : " + fID, tskCoreException); 
 
  627             EnumMap<FileSystemTypes, Long> timeMap = 
new EnumMap<>(
FileSystemTypes.class);
 
  639             if (Collections.max(timeMap.values()) > 0) {
 
  640                 final String uniquePath = f.getUniquePath();
 
  641                 final String parentPath = f.getParentPath();
 
  642                 long datasourceID = f.getDataSource().getId();
 
  643                 String datasourceName = StringUtils.substringBeforeLast(uniquePath, parentPath);
 
  645                 String rootFolder = StringUtils.substringBefore(StringUtils.substringAfter(parentPath, 
"/"), 
"/");
 
  646                 String shortDesc = datasourceName + 
"/" + StringUtils.defaultString(rootFolder);
 
  647                 shortDesc = shortDesc.endsWith(
"/") ? shortDesc : shortDesc + 
"/";
 
  648                 String medDesc = datasourceName + parentPath;
 
  650                 final TskData.FileKnown known = f.getKnown();
 
  651                 Set<String> hashSets = f.getHashSetNames();
 
  654                 for (Map.Entry<
FileSystemTypes, Long> timeEntry : timeMap.entrySet()) {
 
  655                     if (timeEntry.getValue() > 0) {
 
  657                         eventDB.insertEvent(timeEntry.getValue(), timeEntry.getKey(),
 
  658                                 datasourceID, f.getId(), null, uniquePath, medDesc,
 
  659                                 shortDesc, known, hashSets, tags, trans);
 
  666         @NbBundle.Messages(
"msgdlg.problem.text=There was a problem populating the timeline." 
  667                 + 
"  Not all events may be present or accurate.")
 
  672             } 
catch (CancellationException ex) {
 
  673                 logger.log(Level.WARNING, 
"Timeline database population was cancelled by the user. "  
  674                         + 
" Not all events may be present or accurate."); 
 
  675             } 
catch (Exception ex) {
 
  676                 logger.log(Level.WARNING, 
"Unexpected exception while populating database.", ex); 
 
  677                 JOptionPane.showMessageDialog(WindowManager.getDefault().getMainWindow(), Bundle.msgdlg_problem_text());
 
  687         @NbBundle.Messages({
"# {0} - event type ", 
"progressWindow.populatingXevents=Populating {0} events"})
 
  691                 final ArrayList<BlackboardArtifact> blackboardArtifacts = skCase.getBlackboardArtifacts(type.
getArtifactTypeID());
 
  692                 final int numArtifacts = blackboardArtifacts.size();
 
  693                 restartProgressHandle(Bundle.progressWindow_populatingXevents(type.
getDisplayName()), 
"", 0D, numArtifacts, 
true);
 
  694                 for (
int i = 0; i < numArtifacts; i++) {
 
  699                     } 
catch (TskCoreException ex) {
 
  700                         logger.log(Level.SEVERE, 
"There was a problem inserting event for artifact: " + blackboardArtifacts.get(i).getArtifactID(), ex); 
 
  703             } 
catch (TskCoreException ex) {
 
  704                 logger.log(Level.SEVERE, 
"There was a problem getting events with sub type " + type.toString() + 
".", ex); 
 
  712             if (eventDescription != null && eventDescription.getTime() > 0) {
 
  713                 long objectID = bbart.getObjectID();
 
  714                 AbstractFile f = skCase.getAbstractFileById(objectID);
 
  715                 long datasourceID = f.getDataSource().getId();
 
  716                 long artifactID = bbart.getArtifactID();
 
  717                 Set<String> hashSets = f.getHashSetNames();
 
  719                 String fullDescription = eventDescription.getFullDescription();
 
  720                 String medDescription = eventDescription.getMedDescription();
 
  721                 String shortDescription = eventDescription.getShortDescription();
 
  722                 eventDB.insertEvent(eventDescription.getTime(), type, datasourceID, objectID, artifactID, fullDescription, medDescription, shortDescription, null, hashSets, tags, trans);
 
void insertArtifactDerivedEvents(EventDB.EventTransaction trans)
static EventDB getEventDB(Case autoCase)
List< Long > getEventIDsForArtifact(BlackboardArtifact artifact)
final TagsManager tagsManager
List< Long > getEventIDsForFile(AbstractFile file, boolean includeDerivedArtifacts)
final FilteredEventsModel modelInstance
boolean areFiltersEquivalent(RootFilter f1, RootFilter f2)
Interval getSpanningInterval(Collection< Long > eventIDs)
FilteredEventsModel getEventsModel()
void addSubFilter(SubFilterType subfilter)
void insertMACTimeEvents(final int numFiles, List< Long > fileIDs, EventDB.EventTransaction trans)
void updateProgress(double workDone, double max)
Map< String, Long > getTagCountsByTagName(Set< Long > eventIDsWithTags)
synchronized ObservableMap< Long, String > getHashSetMap()
synchronized Map< EventType, Long > countEvents(ZoomParams params)
synchronized void populateFilterData(SleuthkitCase skCase)
synchronized List< EventStripe > getEventStripes(ZoomParams params)
synchronized ObservableMap< Long, String > getDatasourcesMap()
Interval getSpanningInterval(Collection< Long > eventIDs)
void insertEventForArtifact(final ArtifactEventType type, BlackboardArtifact bbart, EventDB.EventTransaction trans)
synchronized Set< Long > deleteTag(long objID, Long artifactID, long tagID, boolean tagged)
void updateMessage(String message)
ReadOnlyBooleanProperty cancellableProperty()
synchronized Set< Long > addTag(long objID, Long artifactID, Tag tag, EventDB.EventTransaction trans)
CancellationProgressTask< Void > rebuildRepository(Consumer< Worker.State > onStateChange)
final ReadOnlyBooleanWrapper cancellable
ObservableList< TagName > getTagNames()
void updateTitle(String title)
static final List<?extends EventType > allTypes
synchronized int countAllEvents()
TagsManager getTagsManager()
void syncTagsFilter(TagsFilter tagsFilter)
final ObservableMap< Long, String > hashSetMap
SingleEvent getEventById(Long eventID)
EventsRepository(Case autoCase, ReadOnlyObjectProperty< ZoomParams > currentStateProperty)
void insertArtifactTags(int currentWorkTotal, List< BlackboardArtifactTag > artifactTags, EventDB.EventTransaction trans)
synchronized Set< SingleEvent > getEventsById(Collection< Long > eventIDs)
List< CombinedEvent > getCombinedEvents(Interval timeRange, RootFilter filter)
static final Logger logger
final LoadingCache< ZoomParams, Map< EventType, Long > > eventCountsCache
void populateEventType(final ArtifactEventType type, EventDB.EventTransaction trans)
void updateProgress(long workDone, long max)
final LoadingCache< Long, SingleEvent > idToEventCache
default int getArtifactTypeID()
SleuthkitCase getSleuthkitCase()
void insertEventsForFile(AbstractFile f, EventDB.EventTransaction trans)
synchronized void invalidateCaches(Set< Long > updatedEventIDs)
final Executor workerExecutor
synchronized boolean isCancelRequested()
final ObservableList< TagName > tagNames
synchronized static Logger getLogger(String name)
final ObservableMap< Long, String > datasourcesMap
CancellationProgressTask< Void > rebuildTags(Consumer< Worker.State > onStateChange)
Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter)
final SleuthkitCase skCase
final LoadingCache< ZoomParams, List< EventStripe > > eventStripeCache
final LoadingCache< Object, Long > minCache
ProgressHandle progressHandle
final DBPopulationMode dbPopulationMode
static AttributeEventDescription buildEventDescription(ArtifactEventType type, BlackboardArtifact artf)
void insertContentTags(int currentWorkTotal, List< ContentTag > contentTags, EventDB.EventTransaction trans)
List< Long > getEventIDs(Interval timeRange, RootFilter filter)
DBPopulationWorker dbWorker
final LoadingCache< Object, Long > maxCache