Autopsy  4.16.0
Graphical digital forensics platform for The Sleuth Kit and other tools.
DetailsViewModel.java
Go to the documentation of this file.
1 /*
2  * Autopsy Forensic Browser
3  *
4  * Copyright 2018-2019 Basis Technology Corp.
5  * Contact: carrier <at> sleuthkit <dot> org
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  * http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19 package org.sleuthkit.autopsy.timeline.ui.detailview.datamodel;
20 
21 import com.google.common.cache.CacheBuilder;
22 import com.google.common.cache.LoadingCache;
23 import com.google.common.collect.HashMultimap;
24 import com.google.common.collect.SetMultimap;
25 import com.google.common.eventbus.Subscribe;
26 import java.sql.ResultSet;
27 import java.sql.SQLException;
28 import java.util.ArrayList;
29 import java.util.Collection;
30 import java.util.Comparator;
31 import java.util.HashMap;
32 import java.util.Iterator;
33 import java.util.List;
34 import java.util.Map;
35 import java.util.Set;
36 import java.util.SortedSet;
37 import java.util.TreeSet;
38 import java.util.concurrent.ExecutionException;
39 import java.util.concurrent.TimeUnit;
40 import java.util.function.Consumer;
41 import java.util.logging.Level;
42 import java.util.stream.Collectors;
43 import org.apache.commons.lang3.tuple.ImmutablePair;
44 import org.joda.time.DateTimeZone;
45 import org.joda.time.Interval;
46 import org.joda.time.Period;
55 import org.sleuthkit.datamodel.SleuthkitCase;
56 import org.sleuthkit.datamodel.TimelineManager;
57 import org.sleuthkit.datamodel.TskCoreException;
58 import org.sleuthkit.datamodel.TimelineEventType;
59 import org.sleuthkit.datamodel.TimelineEvent;
60 import org.sleuthkit.datamodel.TimelineFilter;
61 import org.sleuthkit.datamodel.TimelineLevelOfDetail;
62 
67 final public class DetailsViewModel {
68 
69  private final static Logger logger = Logger.getLogger(DetailsViewModel.class.getName());
70 
71  private final EventsModel eventsModel;
72  private final LoadingCache<EventsModelParams, List<TimelineEvent>> eventCache;
73  private final TimelineManager eventManager;
74  private final SleuthkitCase sleuthkitCase;
75 
76  public DetailsViewModel(EventsModel eventsModel) {
77  this.eventsModel = eventsModel;
78  this.eventManager = eventsModel.getEventManager();
79  this.sleuthkitCase = eventsModel.getSleuthkitCase();
80  eventCache = CacheBuilder.newBuilder()
81  .maximumSize(1000L)
82  .expireAfterAccess(10, TimeUnit.MINUTES)
83  .build(new CacheLoaderImpl<>(params
85  eventsModel.registerForEvents(this);
86  }
87 
88  @Subscribe
89  void handleCacheInvalidation(EventsModel.CacheInvalidatedEvent event) {
90  eventCache.invalidateAll();
91  }
92 
102  public List<EventStripe> getEventStripes(EventsModelParams zoom) throws TskCoreException {
103  return getEventStripes(UIFilter.getAllPassFilter(), zoom);
104  }
105 
116  public List<EventStripe> getEventStripes(UIFilter uiFilter, EventsModelParams zoom) throws TskCoreException {
117  DateTimeZone timeZone = TimeLineController.getJodaTimeZone();
118  //unpack params
119  Interval timeRange = zoom.getTimeRange();
120  TimelineLevelOfDetail descriptionLOD = zoom.getTimelineLOD();
121 
122  //intermediate results
123  Map<TimelineEventType, SetMultimap< String, EventCluster>> eventClusters = new HashMap<>();
124  try {
125  eventCache.get(zoom).stream()
126  .filter(uiFilter)
127  .forEach(new Consumer<TimelineEvent>() {
128  @Override
129  public void accept(TimelineEvent event) {
130  TimelineEventType clusterType = event.getEventType().getCategory();
131  eventClusters.computeIfAbsent(clusterType, eventType -> HashMultimap.create())
132  .put(event.getDescription(descriptionLOD), new EventCluster(event, clusterType, descriptionLOD));
133  }
134  });
135  //get some info about the time range requested
136  TimeUnits periodSize = RangeDivision.getRangeDivision(timeRange, timeZone).getPeriodSize();
137  return mergeClustersToStripes(periodSize.toUnitPeriod(), eventClusters);
138 
139  } catch (ExecutionException ex) {
140  throw new TskCoreException("Failed to load Event Stripes from cache for " + zoom.toString(), ex); //NON-NLS
141  }
142  }
143 
159  private List<TimelineEvent> getEvents(EventsModelParams zoom, DateTimeZone timeZone) throws TskCoreException {
160  //unpack params
161  Interval timeRange = zoom.getTimeRange();
162  TimelineFilter.RootFilter activeFilter = zoom.getEventFilterState().getActiveFilter();
163  return eventManager.getEvents(timeRange, activeFilter);
164  }
165 
179  static private List<EventStripe> mergeClustersToStripes(Period timeUnitLength, Map<TimelineEventType, SetMultimap< String, EventCluster>> eventClusters) {
180 
181  //result list to return
182  ArrayList<EventCluster> mergedClusters = new ArrayList<>();
183 
184  //For each (type, description) key, merge agg events
185  for (Map.Entry<TimelineEventType, SetMultimap<String, EventCluster>> typeMapEntry : eventClusters.entrySet()) {
186  TimelineEventType type = typeMapEntry.getKey();
187  SetMultimap<String, EventCluster> descrMap = typeMapEntry.getValue();
188  //for each description ...
189  for (String descr : descrMap.keySet()) {
190  Set<EventCluster> events = descrMap.get(descr);
191  //run through the sorted events, merging together adjacent events
192  Iterator<EventCluster> iterator = events.stream()
193  .sorted(new DetailViewEvent.StartComparator())
194  .iterator();
195  EventCluster current = iterator.next();
196 
197  //JM Todo: maybe we can collect all clusters to merge in one go, rather than piece by piece for performance.
198  while (iterator.hasNext()) {
199  EventCluster next = iterator.next();
200  Interval gap = current.getSpan().gap(next.getSpan());
201 
202  //if they overlap or gap is less one quarter timeUnitLength
203  //TODO: 1/4 factor is arbitrary. review! -jm
204  if (gap == null || gap.toDuration().getMillis() <= timeUnitLength.toDurationFrom(gap.getStart()).getMillis() / 4) {
205  //merge them
206  current = EventCluster.merge(current, next);
207  } else {
208  //done merging into current, set next as new current
209  mergedClusters.add(current);
210  current = next;
211  }
212  }
213  mergedClusters.add(current);
214  }
215  }
216 
217  //merge clusters to stripes
218  Map<ImmutablePair<TimelineEventType, String>, EventStripe> stripeDescMap = new HashMap<>();
219 
220  for (EventCluster eventCluster : mergedClusters) {
221  stripeDescMap.merge(ImmutablePair.of(eventCluster.getEventType(), eventCluster.getDescription()),
222  new EventStripe(eventCluster), EventStripe::merge);
223  }
224 
225  return stripeDescMap.values().stream()
226  .sorted(new DetailViewEvent.StartComparator())
227  .collect(Collectors.toList());
228  }
229 
239  static <X> SortedSet<X> copyAsSortedSet(Collection<X> setA, Comparator<X> comparator) {
240  TreeSet<X> treeSet = new TreeSet<>(comparator);
241  treeSet.addAll(setA);
242  return treeSet;
243  }
244 }
static EventStripe merge(EventStripe stripeA, EventStripe stripeB)
static EventCluster merge(EventCluster cluster1, EventCluster cluster2)
final LoadingCache< EventsModelParams, List< TimelineEvent > > eventCache
List< EventStripe > getEventStripes(UIFilter uiFilter, EventsModelParams zoom)
synchronized void registerForEvents(Object subscriber)
static List< EventStripe > mergeClustersToStripes(Period timeUnitLength, Map< TimelineEventType, SetMultimap< String, EventCluster >> eventClusters)
List< TimelineEvent > getEvents(EventsModelParams zoom, DateTimeZone timeZone)
synchronized static Logger getLogger(String name)
Definition: Logger.java:124
static RangeDivision getRangeDivision(Interval timeRange, DateTimeZone timeZone)

Copyright © 2012-2020 Basis Technology. Generated on: Tue Sep 22 2020
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.