Autopsy  4.12.0
Graphical digital forensics platform for The Sleuth Kit and other tools.
FilteredEventsModel.java
Go to the documentation of this file.
1 /*
2  * Autopsy Forensic Browser
3  *
4  * Copyright 2011-2019 Basis Technology Corp.
5  * Contact: carrier <at> sleuthkit <dot> org
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  * http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19 package org.sleuthkit.autopsy.timeline;
20 
21 import com.google.common.cache.CacheBuilder;
22 import com.google.common.cache.LoadingCache;
23 import com.google.common.collect.ImmutableList;
24 import com.google.common.eventbus.EventBus;
25 import java.util.Collection;
26 import java.util.Collections;
27 import java.util.HashSet;
28 import java.util.List;
29 import java.util.Map;
30 import java.util.Set;
31 import java.util.concurrent.ExecutionException;
32 import java.util.concurrent.TimeUnit;
33 import java.util.logging.Level;
34 import javafx.beans.InvalidationListener;
35 import javafx.beans.property.ReadOnlyObjectProperty;
36 import javafx.beans.property.ReadOnlyObjectWrapper;
37 import javafx.collections.FXCollections;
38 import javafx.collections.ObservableList;
39 import javafx.collections.ObservableMap;
40 import javafx.collections.ObservableSet;
41 import static org.apache.commons.collections4.CollectionUtils.emptyIfNull;
42 import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
43 import org.joda.time.DateTimeZone;
44 import org.joda.time.Interval;
45 import org.openide.util.NbBundle;
65 import org.sleuthkit.datamodel.AbstractFile;
66 import org.sleuthkit.datamodel.BlackboardArtifact;
67 import org.sleuthkit.datamodel.BlackboardArtifactTag;
68 import org.sleuthkit.datamodel.Content;
69 import org.sleuthkit.datamodel.ContentTag;
70 import org.sleuthkit.datamodel.DataSource;
71 import org.sleuthkit.datamodel.SleuthkitCase;
72 import org.sleuthkit.datamodel.Tag;
73 import org.sleuthkit.datamodel.TagName;
74 import org.sleuthkit.datamodel.TimelineManager;
75 import org.sleuthkit.datamodel.TskCoreException;
76 import org.sleuthkit.datamodel.TimelineEvent;
77 import org.sleuthkit.datamodel.TimelineEventType;
78 import org.sleuthkit.datamodel.TimelineFilter;
79 import org.sleuthkit.datamodel.TimelineFilter.DataSourceFilter;
80 import org.sleuthkit.datamodel.TimelineFilter.DataSourcesFilter;
81 import org.sleuthkit.datamodel.TimelineFilter.EventTypeFilter;
82 import org.sleuthkit.datamodel.TimelineFilter.FileTypesFilter;
83 import org.sleuthkit.datamodel.TimelineFilter.HashHitsFilter;
84 import org.sleuthkit.datamodel.TimelineFilter.HashSetFilter;
85 import org.sleuthkit.datamodel.TimelineFilter.HideKnownFilter;
86 import org.sleuthkit.datamodel.TimelineFilter.RootFilter;
87 import org.sleuthkit.datamodel.TimelineFilter.TagNameFilter;
88 import org.sleuthkit.datamodel.TimelineFilter.TagsFilter;
89 import org.sleuthkit.datamodel.TimelineFilter.TextFilter;
90 
108 public final class FilteredEventsModel {
109 
110  private static final Logger logger = Logger.getLogger(FilteredEventsModel.class.getName());
111 
112  private final TimelineManager eventManager;
113 
114  private final Case autoCase;
115  private final EventBus eventbus = new EventBus("FilteredEventsModel_EventBus"); //NON-NLS
116 
117  //Filter and zoome state
118  private final ReadOnlyObjectWrapper<RootFilterState> requestedFilter = new ReadOnlyObjectWrapper<>();
119  private final ReadOnlyObjectWrapper<Interval> requestedTimeRange = new ReadOnlyObjectWrapper<>();
120  private final ReadOnlyObjectWrapper<ZoomState> requestedZoomState = new ReadOnlyObjectWrapper<>();
121  private final ReadOnlyObjectWrapper< TimelineEventType.TypeLevel> requestedTypeZoom = new ReadOnlyObjectWrapper<>(TimelineEventType.TypeLevel.BASE_TYPE);
122  private final ReadOnlyObjectWrapper< TimelineEvent.DescriptionLevel> requestedLOD = new ReadOnlyObjectWrapper<>(TimelineEvent.DescriptionLevel.SHORT);
123  // end Filter and zoome state
124 
125  //caches
126  private final LoadingCache<Object, Long> maxCache;
127  private final LoadingCache<Object, Long> minCache;
128  private final LoadingCache<Long, TimelineEvent> idToEventCache;
129  private final LoadingCache<ZoomState, Map<TimelineEventType, Long>> eventCountsCache;
131  private final ObservableMap<Long, String> datasourcesMap = FXCollections.observableHashMap();
132  private final ObservableSet< String> hashSets = FXCollections.observableSet();
133  private final ObservableList<TagName> tagNames = FXCollections.observableArrayList();
134  // end caches
135 
143  private static DataSourceFilter newDataSourceFromMapEntry(Map.Entry<Long, String> dataSourceEntry) {
144  return new DataSourceFilter(dataSourceEntry.getValue(), dataSourceEntry.getKey());
145  }
146 
147  public FilteredEventsModel(Case autoCase, ReadOnlyObjectProperty<ZoomState> currentStateProperty) throws TskCoreException {
148  this.autoCase = autoCase;
149  this.eventManager = autoCase.getSleuthkitCase().getTimelineManager();
151 
152  //caches
153  idToEventCache = CacheBuilder.newBuilder()
154  .maximumSize(5000L)
155  .expireAfterAccess(10, TimeUnit.MINUTES)
156  .build(new CacheLoaderImpl<>(eventManager::getEventById));
157  eventCountsCache = CacheBuilder.newBuilder()
158  .maximumSize(1000L)
159  .expireAfterAccess(10, TimeUnit.MINUTES)
160  .build(new CacheLoaderImpl<>(this::countEventsByType));
161 
162  maxCache = CacheBuilder.newBuilder()
163  .build(new CacheLoaderImpl<>(ignored -> eventManager.getMaxTime()));
164  minCache = CacheBuilder.newBuilder()
165  .build(new CacheLoaderImpl<>(ignored -> eventManager.getMinTime()));
166 
167  InvalidationListener filterSyncListener = observable -> {
168  RootFilterState rootFilter = filterProperty().get();
169  syncFilters(rootFilter);
170  requestedFilter.set(rootFilter.copyOf());
171  };
172 
173  datasourcesMap.addListener(filterSyncListener);
174  hashSets.addListener(filterSyncListener);
175  tagNames.addListener(filterSyncListener);
176 
177  requestedFilter.set(getDefaultFilter());
178 
179  requestedZoomState.addListener(observable -> {
180  final ZoomState zoomState = requestedZoomState.get();
181 
182  if (zoomState != null) {
183  synchronized (FilteredEventsModel.this) {
184  requestedTypeZoom.set(zoomState.getTypeZoomLevel());
185  requestedFilter.set(zoomState.getFilterState());
186  requestedTimeRange.set(zoomState.getTimeRange());
187  requestedLOD.set(zoomState.getDescriptionLOD());
188  }
189  }
190  });
191 
192  requestedZoomState.bind(currentStateProperty);
193  }
194 
206  private Map<TimelineEventType, Long> countEventsByType(ZoomState zoomState) throws TskCoreException {
207  if (zoomState.getTimeRange() == null) {
208  return Collections.emptyMap();
209  } else {
210  return eventManager.countEventsByType(zoomState.getTimeRange().getStartMillis() / 1000,
211  zoomState.getTimeRange().getEndMillis() / 1000,
212  zoomState.getFilterState().getActiveFilter(), zoomState.getTypeZoomLevel());
213  }
214  }
215 
216  public TimelineManager getEventManager() {
217  return eventManager;
218  }
219 
220  public SleuthkitCase getSleuthkitCase() {
221  return autoCase.getSleuthkitCase();
222  }
223 
224  public Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter, DateTimeZone timeZone) throws TskCoreException {
225  return eventManager.getSpanningInterval(timeRange, filter, timeZone);
226  }
227 
233  synchronized public ReadOnlyObjectProperty<ZoomState> zoomStateProperty() {
234  return requestedZoomState.getReadOnlyProperty();
235  }
236 
242  synchronized public ZoomState getZoomState() {
243  return requestedZoomState.get();
244  }
245 
249  synchronized private void populateFilterData() throws TskCoreException {
250  SleuthkitCase skCase = autoCase.getSleuthkitCase();
251  hashSets.addAll(eventManager.getHashSetNames());
252 
253  //because there is no way to remove a datasource we only add to this map.
254  for (DataSource ds : skCase.getDataSources()) {
255  datasourcesMap.putIfAbsent(ds.getId(), ds.getName());
256  }
257 
258  //should this only be tags applied to files or event bearing artifacts?
259  tagNames.setAll(skCase.getTagNamesInUse());
260  }
261 
271  public void syncFilters(RootFilterState rootFilterState) {
272  TagsFilterState tagsFilterState = rootFilterState.getTagsFilterState();
273  for (TagName tagName : tagNames) {
274  tagsFilterState.getFilter().addSubFilter(new TagNameFilter(tagName));
275  }
276  for (FilterState<? extends TagNameFilter> tagFilterState : rootFilterState.getTagsFilterState().getSubFilterStates()) {
277  // disable states for tag names that don't exist in case.
278  tagFilterState.setDisabled(tagNames.contains(tagFilterState.getFilter().getTagName()) == false);
279  }
280 
281  DataSourcesFilter dataSourcesFilter = rootFilterState.getDataSourcesFilterState().getFilter();
282  datasourcesMap.entrySet().forEach(entry -> dataSourcesFilter.addSubFilter(newDataSourceFromMapEntry(entry)));
283 
284  HashHitsFilter hashSetsFilter = rootFilterState.getHashHitsFilterState().getFilter();
285  for (String hashSet : hashSets) {
286  hashSetsFilter.addSubFilter(new HashSetFilter(hashSet));
287  }
288  }
289 
295  @NbBundle.Messages({
296  "FilteredEventsModel.timeRangeProperty.errorTitle=Timeline",
297  "FilteredEventsModel.timeRangeProperty.errorMessage=Error getting spanning interval."})
298  synchronized public ReadOnlyObjectProperty<Interval> timeRangeProperty() {
299  if (requestedTimeRange.get() == null) {
300  try {
301  requestedTimeRange.set(getSpanningInterval());
302  } catch (TskCoreException timelineCacheException) {
303  MessageNotifyUtil.Notify.error(Bundle.FilteredEventsModel_timeRangeProperty_errorTitle(),
304  Bundle.FilteredEventsModel_timeRangeProperty_errorMessage());
305  logger.log(Level.SEVERE, "Error getting spanning interval.", timelineCacheException);
306  }
307  }
308  return requestedTimeRange.getReadOnlyProperty();
309  }
310 
311  synchronized public ReadOnlyObjectProperty<TimelineEvent.DescriptionLevel> descriptionLODProperty() {
312  return requestedLOD.getReadOnlyProperty();
313  }
314 
315  synchronized public ReadOnlyObjectProperty<RootFilterState> filterProperty() {
316  return requestedFilter.getReadOnlyProperty();
317  }
318 
319  synchronized public ReadOnlyObjectProperty<TimelineEventType.TypeLevel> eventTypeZoomProperty() {
320  return requestedTypeZoom.getReadOnlyProperty();
321  }
322 
328  synchronized public Interval getTimeRange() {
329  return getZoomState().getTimeRange();
330  }
331 
332  synchronized public TimelineEvent.DescriptionLevel getDescriptionLOD() {
333  return getZoomState().getDescriptionLOD();
334  }
335 
336  synchronized public RootFilterState getFilterState() {
337  return getZoomState().getFilterState();
338  }
339 
340  synchronized public TimelineEventType.TypeLevel getEventTypeZoom() {
341  return getZoomState().getTypeZoomLevel();
342  }
343 
348  public synchronized RootFilterState getDefaultFilter() {
349  DataSourcesFilter dataSourcesFilter = new DataSourcesFilter();
350  datasourcesMap.entrySet().forEach(dataSourceEntry
351  -> dataSourcesFilter.addSubFilter(newDataSourceFromMapEntry(dataSourceEntry)));
352 
353  HashHitsFilter hashHitsFilter = new HashHitsFilter();
354  hashSets.stream().map(HashSetFilter::new).forEach(hashHitsFilter::addSubFilter);
355 
356  TagsFilter tagsFilter = new TagsFilter();
357  tagNames.stream().map(TagNameFilter::new).forEach(tagsFilter::addSubFilter);
358 
359  FileTypesFilter fileTypesFilter = FilterUtils.createDefaultFileTypesFilter();
360 
361  return new RootFilterState(new RootFilter(new HideKnownFilter(),
362  tagsFilter,
363  hashHitsFilter,
364  new TextFilter(),
365  new EventTypeFilter(TimelineEventType.ROOT_EVENT_TYPE),
366  dataSourcesFilter,
367  fileTypesFilter,
368  Collections.emptySet()));
369  }
370 
371  public Interval getBoundingEventsInterval(DateTimeZone timeZone) throws TskCoreException {
372  return eventManager.getSpanningInterval(zoomStateProperty().get().getTimeRange(), getFilterState().getActiveFilter(), timeZone);
373  }
374 
375  public TimelineEvent getEventById(Long eventID) throws TskCoreException {
376  try {
377  return idToEventCache.get(eventID);
378  } catch (ExecutionException ex) {
379  throw new TskCoreException("Error getting cached event from ID", ex);
380  }
381  }
382 
383  public Set<TimelineEvent> getEventsById(Collection<Long> eventIDs) throws TskCoreException {
384  Set<TimelineEvent> events = new HashSet<>();
385  for (Long id : eventIDs) {
386  events.add(getEventById(id));
387  }
388  return events;
389  }
390 
401  public Map<String, Long> getTagCountsByTagName(Set<Long> eventIDsWithTags) throws TskCoreException {
402  return eventManager.getTagCountsByTagName(eventIDsWithTags);
403  }
404 
405  public List<Long> getEventIDs(Interval timeRange, FilterState<? extends TimelineFilter> filter) throws TskCoreException {
406 
407  final Interval overlap;
408  RootFilter intersection;
409  synchronized (this) {
410  overlap = getSpanningInterval().overlap(timeRange);
411  intersection = getFilterState().intersect(filter).getActiveFilter();
412  }
413 
414  return eventManager.getEventIDs(overlap, intersection);
415  }
416 
429  public Map<TimelineEventType, Long> getEventCounts(Interval timeRange) throws TskCoreException {
430 
431  final RootFilterState filter;
432  final TimelineEventType.TypeLevel typeZoom;
433  synchronized (this) {
434  filter = getFilterState();
435  typeZoom = getEventTypeZoom();
436  }
437  try {
438  return eventCountsCache.get(new ZoomState(timeRange, typeZoom, filter, null));
439  } catch (ExecutionException executionException) {
440  throw new TskCoreException("Error getting cached event counts.`1", executionException);
441  }
442  }
443 
450  public Interval getSpanningInterval() throws TskCoreException {
451  return new Interval(getMinTime() * 1000, 1000 + getMaxTime() * 1000);
452  }
453 
463  public Interval getSpanningInterval(Collection<Long> eventIDs) throws TskCoreException {
464  return eventManager.getSpanningInterval(eventIDs);
465  }
466 
474  public Long getMinTime() throws TskCoreException {
475  try {
476  return minCache.get("min"); // NON-NLS
477  } catch (ExecutionException ex) {
478  throw new TskCoreException("Error getting cached min time.", ex);
479  }
480  }
481 
489  public Long getMaxTime() throws TskCoreException {
490  try {
491  return maxCache.get("max"); // NON-NLS
492  } catch (ExecutionException ex) {
493  throw new TskCoreException("Error getting cached max time.", ex);
494  }
495  }
496 
497  synchronized public boolean handleContentTagAdded(ContentTagAddedEvent evt) throws TskCoreException {
498  ContentTag contentTag = evt.getAddedTag();
499  Content content = contentTag.getContent();
500  Set<Long> updatedEventIDs = addTag(content.getId(), null, contentTag);
501  return postTagsAdded(updatedEventIDs);
502  }
503 
504  synchronized public boolean handleArtifactTagAdded(BlackBoardArtifactTagAddedEvent evt) throws TskCoreException {
505  BlackboardArtifactTag artifactTag = evt.getAddedTag();
506  BlackboardArtifact artifact = artifactTag.getArtifact();
507  Set<Long> updatedEventIDs = addTag(artifact.getObjectID(), artifact.getArtifactID(), artifactTag);
508  return postTagsAdded(updatedEventIDs);
509  }
510 
511  synchronized public boolean handleContentTagDeleted(ContentTagDeletedEvent evt) throws TskCoreException {
512  DeletedContentTagInfo deletedTagInfo = evt.getDeletedTagInfo();
513 
514  Content content = autoCase.getSleuthkitCase().getContentById(deletedTagInfo.getContentID());
515  boolean tagged = autoCase.getServices().getTagsManager().getContentTagsByContent(content).isEmpty() == false;
516  Set<Long> updatedEventIDs = deleteTag(content.getId(), null, deletedTagInfo.getTagID(), tagged);
517  return postTagsDeleted(updatedEventIDs);
518  }
519 
520  synchronized public boolean handleArtifactTagDeleted(BlackBoardArtifactTagDeletedEvent evt) throws TskCoreException {
521  DeletedBlackboardArtifactTagInfo deletedTagInfo = evt.getDeletedTagInfo();
522 
523  BlackboardArtifact artifact = autoCase.getSleuthkitCase().getBlackboardArtifact(deletedTagInfo.getArtifactID());
524  boolean tagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact).isEmpty() == false;
525  Set<Long> updatedEventIDs = deleteTag(artifact.getObjectID(), artifact.getArtifactID(), deletedTagInfo.getTagID(), tagged);
526  return postTagsDeleted(updatedEventIDs);
527  }
528 
546  public Set<Long> getEventIDsForFile(AbstractFile file, boolean includeDerivedArtifacts) throws TskCoreException {
547  return eventManager.getEventIDsForFile(file, includeDerivedArtifacts);
548  }
549 
561  public List<Long> getEventIDsForArtifact(BlackboardArtifact artifact) throws TskCoreException {
562  return eventManager.getEventIDsForArtifact(artifact);
563  }
564 
574  private boolean postTagsAdded(Set<Long> updatedEventIDs) {
575  boolean tagsUpdated = !updatedEventIDs.isEmpty();
576  if (tagsUpdated) {
577  eventbus.post(new TagsAddedEvent(updatedEventIDs));
578  }
579  return tagsUpdated;
580  }
581 
591  private boolean postTagsDeleted(Set<Long> updatedEventIDs) {
592  boolean tagsUpdated = !updatedEventIDs.isEmpty();
593  if (tagsUpdated) {
594  eventbus.post(new TagsDeletedEvent(updatedEventIDs));
595  }
596  return tagsUpdated;
597  }
598 
605  synchronized public void registerForEvents(Object subscriber) {
606  eventbus.register(subscriber);
607  }
608 
614  synchronized public void unRegisterForEvents(Object subscriber) {
615  eventbus.unregister(subscriber);
616  }
617 
621  public void postRefreshRequest() {
622  eventbus.post(new RefreshRequestedEvent());
623  }
624 
632  eventbus.post(event);
633  }
634 
635  public ImmutableList<TimelineEventType> getEventTypes() {
636  return eventManager.getEventTypes();
637  }
638 
639  synchronized public Set<Long> addTag(long objID, Long artifactID, Tag tag) throws TskCoreException {
640  Set<Long> updatedEventIDs = eventManager.setEventsTagged(objID, artifactID, true);
641  if (isNotEmpty(updatedEventIDs)) {
642  invalidateCaches(updatedEventIDs);
643  }
644  return updatedEventIDs;
645  }
646 
647  synchronized public Set<Long> deleteTag(long objID, Long artifactID, long tagID, boolean tagged) throws TskCoreException {
648  Set<Long> updatedEventIDs = eventManager.setEventsTagged(objID, artifactID, tagged);
649  if (isNotEmpty(updatedEventIDs)) {
650  invalidateCaches(updatedEventIDs);
651  }
652  return updatedEventIDs;
653  }
654 
655  synchronized public Set<Long> setHashHit(Collection<BlackboardArtifact> artifacts, boolean hasHashHit) throws TskCoreException {
656  Set<Long> updatedEventIDs = new HashSet<>();
657  for (BlackboardArtifact artifact : artifacts) {
658  updatedEventIDs.addAll(eventManager.setEventsHashed(artifact.getObjectID(), hasHashHit));
659  }
660  if (isNotEmpty(updatedEventIDs)) {
661  invalidateCaches(updatedEventIDs);
662  }
663  return updatedEventIDs;
664  }
665 
678  public synchronized void invalidateCaches(Collection<Long> updatedEventIDs) throws TskCoreException {
679  minCache.invalidateAll();
680  maxCache.invalidateAll();
681  idToEventCache.invalidateAll(emptyIfNull(updatedEventIDs));
682  eventCountsCache.invalidateAll();
683 
685 
686  eventbus.post(new CacheInvalidatedEvent());
687  }
688 
694  public static class CacheInvalidatedEvent {
695 
697  }
698  }
699 }
final ReadOnlyObjectWrapper< TimelineEvent.DescriptionLevel > requestedLOD
final LoadingCache< Long, TimelineEvent > idToEventCache
Map< TimelineEventType, Long > getEventCounts(Interval timeRange)
synchronized boolean handleArtifactTagAdded(BlackBoardArtifactTagAddedEvent evt)
CompoundFilterState< HashSetFilter, HashHitsFilter > getHashHitsFilterState()
static DataSourceFilter newDataSourceFromMapEntry(Map.Entry< Long, String > dataSourceEntry)
synchronized Set< Long > deleteTag(long objID, Long artifactID, long tagID, boolean tagged)
Map< String, Long > getTagCountsByTagName(Set< Long > eventIDsWithTags)
Set< Long > getEventIDsForFile(AbstractFile file, boolean includeDerivedArtifacts)
Set< TimelineEvent > getEventsById(Collection< Long > eventIDs)
void syncFilters(RootFilterState rootFilterState)
synchronized ReadOnlyObjectProperty< TimelineEventType.TypeLevel > eventTypeZoomProperty()
final ReadOnlyObjectWrapper< ZoomState > requestedZoomState
synchronized boolean handleContentTagAdded(ContentTagAddedEvent evt)
synchronized TimelineEventType.TypeLevel getEventTypeZoom()
TimelineEventType.TypeLevel getTypeZoomLevel()
Definition: ZoomState.java:45
List< ContentTag > getContentTagsByContent(Content content)
final ReadOnlyObjectWrapper< TimelineEventType.TypeLevel > requestedTypeZoom
List< Long > getEventIDs(Interval timeRange, FilterState<?extends TimelineFilter > filter)
FilteredEventsModel(Case autoCase, ReadOnlyObjectProperty< ZoomState > currentStateProperty)
final LoadingCache< ZoomState, Map< TimelineEventType, Long > > eventCountsCache
synchronized TimelineEvent.DescriptionLevel getDescriptionLOD()
synchronized ReadOnlyObjectProperty< TimelineEvent.DescriptionLevel > descriptionLODProperty()
boolean postTagsAdded(Set< Long > updatedEventIDs)
ImmutableList< TimelineEventType > getEventTypes()
Map< TimelineEventType, Long > countEventsByType(ZoomState zoomState)
Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter, DateTimeZone timeZone)
final ReadOnlyObjectWrapper< RootFilterState > requestedFilter
List< Long > getEventIDsForArtifact(BlackboardArtifact artifact)
TimelineEvent.DescriptionLevel getDescriptionLOD()
Definition: ZoomState.java:53
synchronized boolean handleContentTagDeleted(ContentTagDeletedEvent evt)
synchronized Set< Long > addTag(long objID, Long artifactID, Tag tag)
final ReadOnlyObjectWrapper< Interval > requestedTimeRange
Interval getSpanningInterval(Collection< Long > eventIDs)
synchronized void invalidateCaches(Collection< Long > updatedEventIDs)
synchronized ReadOnlyObjectProperty< Interval > timeRangeProperty()
static FileTypesFilter createDefaultFileTypesFilter()
static void error(String title, String message)
synchronized static Logger getLogger(String name)
Definition: Logger.java:124
ObservableList< FilterState< ?extends SubFilterType > > getSubFilterStates()
synchronized void unRegisterForEvents(Object subscriber)
CompoundFilterState< DataSourceFilter, DataSourcesFilter > getDataSourcesFilterState()
synchronized ReadOnlyObjectProperty< RootFilterState > filterProperty()
synchronized boolean handleArtifactTagDeleted(BlackBoardArtifactTagDeletedEvent evt)
RootFilterState intersect(FilterState< ?extends TimelineFilter > otherFilter)
boolean postTagsDeleted(Set< Long > updatedEventIDs)
final ObservableMap< Long, String > datasourcesMap
synchronized Set< Long > setHashHit(Collection< BlackboardArtifact > artifacts, boolean hasHashHit)
synchronized ReadOnlyObjectProperty< ZoomState > zoomStateProperty()
synchronized void registerForEvents(Object subscriber)
List< BlackboardArtifactTag > getBlackboardArtifactTagsByArtifact(BlackboardArtifact artifact)

Copyright © 2012-2018 Basis Technology. Generated on: Wed Sep 18 2019
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.