Autopsy  4.0
Graphical digital forensics platform for The Sleuth Kit and other tools.
EventDB.java
Go to the documentation of this file.
1 /*
2  * Autopsy Forensic Browser
3  *
4  * Copyright 2013-15 Basis Technology Corp.
5  * Contact: carrier <at> sleuthkit <dot> org
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  * http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  */
19 package org.sleuthkit.autopsy.timeline.db;
20 
21 import com.google.common.collect.HashMultimap;
22 import com.google.common.collect.SetMultimap;
23 import java.nio.file.Paths;
24 import java.sql.Connection;
25 import java.sql.DriverManager;
26 import java.sql.PreparedStatement;
27 import java.sql.ResultSet;
28 import java.sql.SQLException;
29 import java.sql.Statement;
30 import java.sql.Types;
31 import java.util.ArrayList;
32 import java.util.Arrays;
33 import java.util.Collection;
34 import java.util.Collections;
35 import java.util.Comparator;
36 import java.util.HashMap;
37 import java.util.HashSet;
38 import java.util.Iterator;
39 import java.util.List;
40 import java.util.Map;
41 import java.util.Objects;
42 import java.util.Set;
43 import java.util.TimeZone;
44 import java.util.concurrent.locks.Lock;
45 import java.util.concurrent.locks.ReentrantReadWriteLock;
46 import java.util.logging.Level;
47 import java.util.stream.Collectors;
48 import javax.annotation.Nonnull;
49 import javax.annotation.Nullable;
50 import org.apache.commons.lang3.StringUtils;
51 import org.apache.commons.lang3.tuple.ImmutablePair;
52 import org.joda.time.DateTimeZone;
53 import org.joda.time.Interval;
54 import org.joda.time.Period;
65 import static org.sleuthkit.autopsy.timeline.db.SQLHelper.useHashHitTablesHelper;
66 import static org.sleuthkit.autopsy.timeline.db.SQLHelper.useTagTablesHelper;
73 import org.sleuthkit.datamodel.SleuthkitCase;
74 import org.sleuthkit.datamodel.Tag;
75 import org.sleuthkit.datamodel.TskData;
76 import org.sqlite.SQLiteJDBCLoader;
77 
85 public class EventDB {
86 
87  private static final org.sleuthkit.autopsy.coreutils.Logger LOGGER = Logger.getLogger(EventDB.class.getName());
88 
89  static {
90  //make sure sqlite driver is loaded, possibly redundant
91  try {
92  Class.forName("org.sqlite.JDBC"); // NON-NLS
93  } catch (ClassNotFoundException ex) {
94  LOGGER.log(Level.SEVERE, "Failed to load sqlite JDBC driver", ex); // NON-NLS
95  }
96  }
97 
107  public static EventDB getEventDB(Case autoCase) {
108  try {
109  return new EventDB(autoCase);
110  } catch (SQLException ex) {
111  LOGGER.log(Level.SEVERE, "sql error creating database connection", ex); // NON-NLS
112  return null;
113  } catch (Exception ex) {
114  LOGGER.log(Level.SEVERE, "error creating database connection", ex); // NON-NLS
115  return null;
116  }
117  }
118 
119  private volatile Connection con;
120 
121  private final String dbPath;
122 
123  private PreparedStatement getEventByIDStmt;
124  private PreparedStatement getMaxTimeStmt;
125  private PreparedStatement getMinTimeStmt;
126  private PreparedStatement getDataSourceIDsStmt;
127  private PreparedStatement getHashSetNamesStmt;
128  private PreparedStatement insertRowStmt;
129  private PreparedStatement insertHashSetStmt;
130  private PreparedStatement insertHashHitStmt;
131  private PreparedStatement insertTagStmt;
132  private PreparedStatement deleteTagStmt;
133  private PreparedStatement selectHashSetStmt;
134  private PreparedStatement countAllEventsStmt;
135  private PreparedStatement dropEventsTableStmt;
136  private PreparedStatement dropHashSetHitsTableStmt;
137  private PreparedStatement dropHashSetsTableStmt;
138  private PreparedStatement dropTagsTableStmt;
139  private PreparedStatement dropDBInfoTableStmt;
140  private PreparedStatement selectNonArtifactEventIDsByObjectIDStmt;
141  private PreparedStatement selectEventIDsBYObjectAndArtifactIDStmt;
142 
143  private final Set<PreparedStatement> preparedStatements = new HashSet<>();
144 
145  private final Lock DBLock = new ReentrantReadWriteLock(true).writeLock(); //using exclusive lock for all db ops for now
146 
147  private EventDB(Case autoCase) throws SQLException, Exception {
148  //should this go into module output (or even cache, we should be able to rebuild it)?
149  this.dbPath = Paths.get(autoCase.getCaseDirectory(), "events.db").toString(); //NON-NLS
150  initializeDB();
151  }
152 
153  @Override
154  public void finalize() throws Throwable {
155  try {
156  closeDBCon();
157  } finally {
158  super.finalize();
159  }
160  }
161 
162  void closeDBCon() {
163  if (con != null) {
164  try {
165  closeStatements();
166  con.close();
167  } catch (SQLException ex) {
168  LOGGER.log(Level.WARNING, "Failed to close connection to evetns.db", ex); // NON-NLS
169  }
170  }
171  con = null;
172  }
173 
174  public Interval getSpanningInterval(Collection<Long> eventIDs) {
175  DBLock.lock();
176  try (Statement stmt = con.createStatement();
177  ResultSet rs = stmt.executeQuery("SELECT Min(time), Max(time) FROM events WHERE event_id IN (" + StringUtils.join(eventIDs, ", ") + ")");) { // NON-NLS
178  while (rs.next()) {
179  return new Interval(rs.getLong("Min(time)") * 1000, (rs.getLong("Max(time)") + 1) * 1000, DateTimeZone.UTC); // NON-NLS
180  }
181  } catch (SQLException ex) {
182  LOGGER.log(Level.SEVERE, "Error executing get spanning interval query.", ex); // NON-NLS
183  } finally {
184  DBLock.unlock();
185  }
186  return null;
187  }
188 
189  EventTransaction beginTransaction() {
190  return new EventTransaction();
191  }
192 
193  void commitTransaction(EventTransaction tr) {
194  if (tr.isClosed()) {
195  throw new IllegalArgumentException("can't close already closed transaction"); // NON-NLS
196  }
197  tr.commit();
198  }
199 
204  int countAllEvents() {
205  DBLock.lock();
206  try (ResultSet rs = countAllEventsStmt.executeQuery()) { // NON-NLS
207  while (rs.next()) {
208  return rs.getInt("count"); // NON-NLS
209  }
210  } catch (SQLException ex) {
211  LOGGER.log(Level.SEVERE, "Error counting all events", ex); //NON-NLS
212  } finally {
213  DBLock.unlock();
214  }
215  return -1;
216  }
217 
227  Map<EventType, Long> countEventsByType(ZoomParams params) {
228  if (params.getTimeRange() != null) {
229  return countEventsByType(params.getTimeRange().getStartMillis() / 1000,
230  params.getTimeRange().getEndMillis() / 1000,
231  params.getFilter(), params.getTypeZoomLevel());
232  } else {
233  return Collections.emptyMap();
234  }
235  }
236 
245  Map<String, Long> getTagCountsByTagName(Set<Long> eventIDsWithTags) {
246  HashMap<String, Long> counts = new HashMap<>();
247  DBLock.lock();
248  try (Statement createStatement = con.createStatement();
249  ResultSet rs = createStatement.executeQuery("SELECT tag_name_display_name, COUNT(DISTINCT tag_id) AS count FROM tags" //NON-NLS
250  + " WHERE event_id IN (" + StringUtils.join(eventIDsWithTags, ", ") + ")" //NON-NLS
251  + " GROUP BY tag_name_id" //NON-NLS
252  + " ORDER BY tag_name_display_name");) { //NON-NLS
253  while (rs.next()) {
254  counts.put(rs.getString("tag_name_display_name"), rs.getLong("count")); //NON-NLS
255  }
256  } catch (SQLException ex) {
257  LOGGER.log(Level.SEVERE, "Failed to get tag counts by tag name.", ex); //NON-NLS
258  } finally {
259  DBLock.unlock();
260  }
261  return counts;
262  }
263 
268  void reInitializeDB() {
269  DBLock.lock();
270  try {
271  dropEventsTableStmt.executeUpdate();
272  dropHashSetHitsTableStmt.executeUpdate();
273  dropHashSetsTableStmt.executeUpdate();
274  dropTagsTableStmt.executeUpdate();
275  dropDBInfoTableStmt.executeUpdate();
276  initializeDB();
277  } catch (SQLException ex) {
278  LOGGER.log(Level.SEVERE, "could not drop old tables", ex); // NON-NLS
279  } finally {
280  DBLock.unlock();
281  }
282  }
283 
288  void reInitializeTags() {
289  DBLock.lock();
290  try {
291  dropTagsTableStmt.executeUpdate();
293  } catch (SQLException ex) {
294  LOGGER.log(Level.SEVERE, "could not drop old tags table", ex); // NON-NLS
295  } finally {
296  DBLock.unlock();
297  }
298  }
299 
300  Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter) {
301  long start = timeRange.getStartMillis() / 1000;
302  long end = timeRange.getEndMillis() / 1000;
303  final String sqlWhere = SQLHelper.getSQLWhere(filter);
304  DBLock.lock();
305  try (Statement stmt = con.createStatement(); //can't use prepared statement because of complex where clause
306  ResultSet rs = stmt.executeQuery(" SELECT (SELECT Max(time) FROM events " + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) + " WHERE time <=" + start + " AND " + sqlWhere + ") AS start," //NON-NLS
307  + "(SELECT Min(time) FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) + " WHERE time >= " + end + " AND " + sqlWhere + ") AS end")) { // NON-NLS
308  while (rs.next()) {
309 
310  long start2 = rs.getLong("start"); // NON-NLS
311  long end2 = rs.getLong("end"); // NON-NLS
312 
313  if (end2 == 0) {
314  end2 = getMaxTime();
315  }
316  return new Interval(start2 * 1000, (end2 + 1) * 1000, TimeLineController.getJodaTimeZone());
317  }
318  } catch (SQLException ex) {
319  LOGGER.log(Level.SEVERE, "Failed to get MIN time.", ex); // NON-NLS
320  } finally {
321  DBLock.unlock();
322  }
323  return null;
324  }
325 
326  TimeLineEvent getEventById(Long eventID) {
327  TimeLineEvent result = null;
328  DBLock.lock();
329  try {
330  getEventByIDStmt.clearParameters();
331  getEventByIDStmt.setLong(1, eventID);
332  try (ResultSet rs = getEventByIDStmt.executeQuery()) {
333  while (rs.next()) {
334  result = constructTimeLineEvent(rs);
335  break;
336  }
337  }
338  } catch (SQLException sqlEx) {
339  LOGGER.log(Level.SEVERE, "exception while querying for event with id = " + eventID, sqlEx); // NON-NLS
340  } finally {
341  DBLock.unlock();
342  }
343  return result;
344  }
345 
346  Set<Long> getEventIDs(Interval timeRange, RootFilter filter) {
347  return getEventIDs(timeRange.getStartMillis() / 1000, timeRange.getEndMillis() / 1000, filter);
348  }
349 
350  Set<Long> getEventIDs(Long startTime, Long endTime, RootFilter filter) {
351  if (Objects.equals(startTime, endTime)) {
352  endTime++;
353  }
354  Set<Long> resultIDs = new HashSet<>();
355 
356  DBLock.lock();
357  final String query = "SELECT events.event_id AS event_id FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) + " WHERE time >= " + startTime + " AND time <" + endTime + " AND " + SQLHelper.getSQLWhere(filter); // NON-NLS
358  try (Statement stmt = con.createStatement();
359  ResultSet rs = stmt.executeQuery(query)) {
360  while (rs.next()) {
361  resultIDs.add(rs.getLong("event_id")); //NON-NLS
362  }
363 
364  } catch (SQLException sqlEx) {
365  LOGGER.log(Level.SEVERE, "failed to execute query for event ids in range", sqlEx); // NON-NLS
366  } finally {
367  DBLock.unlock();
368  }
369 
370  return resultIDs;
371  }
372 
377  boolean hasNewColumns() {
379  && (getDataSourceIDs().isEmpty() == false);
380  }
381 
382  Set<Long> getDataSourceIDs() {
383  HashSet<Long> hashSet = new HashSet<>();
384  DBLock.lock();
385  try (ResultSet rs = getDataSourceIDsStmt.executeQuery()) {
386  while (rs.next()) {
387  long datasourceID = rs.getLong("datasource_id"); //NON-NLS
388  hashSet.add(datasourceID);
389  }
390  } catch (SQLException ex) {
391  LOGGER.log(Level.SEVERE, "Failed to get MAX time.", ex); // NON-NLS
392  } finally {
393  DBLock.unlock();
394  }
395  return hashSet;
396  }
397 
398  Map<Long, String> getHashSetNames() {
399  Map<Long, String> hashSets = new HashMap<>();
400  DBLock.lock();
401  try (ResultSet rs = getHashSetNamesStmt.executeQuery();) {
402  while (rs.next()) {
403  long hashSetID = rs.getLong("hash_set_id"); //NON-NLS
404  String hashSetName = rs.getString("hash_set_name"); //NON-NLS
405  hashSets.put(hashSetID, hashSetName);
406  }
407  } catch (SQLException ex) {
408  LOGGER.log(Level.SEVERE, "Failed to get hash sets.", ex); // NON-NLS
409  } finally {
410  DBLock.unlock();
411  }
412  return Collections.unmodifiableMap(hashSets);
413  }
414 
415  void analyze() {
416  DBLock.lock();
417  try (Statement createStatement = con.createStatement()) {
418  boolean b = createStatement.execute("analyze; analyze sqlite_master;"); //NON-NLS
419  } catch (SQLException ex) {
420  LOGGER.log(Level.SEVERE, "Failed to analyze events db.", ex); // NON-NLS
421  } finally {
422  DBLock.unlock();
423  }
424  }
425 
429  Long getMaxTime() {
430  DBLock.lock();
431  try (ResultSet rs = getMaxTimeStmt.executeQuery()) {
432  while (rs.next()) {
433  return rs.getLong("max"); // NON-NLS
434  }
435  } catch (SQLException ex) {
436  LOGGER.log(Level.SEVERE, "Failed to get MAX time.", ex); // NON-NLS
437  } finally {
438  DBLock.unlock();
439  }
440  return -1l;
441  }
442 
446  Long getMinTime() {
447  DBLock.lock();
448  try (ResultSet rs = getMinTimeStmt.executeQuery()) {
449  while (rs.next()) {
450  return rs.getLong("min"); // NON-NLS
451  }
452  } catch (SQLException ex) {
453  LOGGER.log(Level.SEVERE, "Failed to get MIN time.", ex); // NON-NLS
454  } finally {
455  DBLock.unlock();
456  }
457  return -1l;
458  }
459 
466  final synchronized void initializeDB() {
467 
468  try {
469  if (con == null || con.isClosed()) {
470  con = DriverManager.getConnection("jdbc:sqlite:" + dbPath); // NON-NLS
471  }
472  } catch (SQLException ex) {
473  LOGGER.log(Level.SEVERE, "Failed to open connection to events.db", ex); // NON-NLS
474  return;
475  }
476  try {
477  configureDB();
478  } catch (SQLException ex) {
479  LOGGER.log(Level.SEVERE, "problem accessing database", ex); // NON-NLS
480  return;
481  }
482 
483  DBLock.lock();
484  try {
485  try (Statement stmt = con.createStatement()) {
486  String sql = "CREATE TABLE if not exists db_info " // NON-NLS
487  + " ( key TEXT, " // NON-NLS
488  + " value INTEGER, " // NON-NLS
489  + "PRIMARY KEY (key))"; // NON-NLS
490  stmt.execute(sql);
491  } catch (SQLException ex) {
492  LOGGER.log(Level.SEVERE, "problem creating db_info table", ex); // NON-NLS
493  }
494 
495  try (Statement stmt = con.createStatement()) {
496  String sql = "CREATE TABLE if not exists events " // NON-NLS
497  + " (event_id INTEGER PRIMARY KEY, " // NON-NLS
498  + " datasource_id INTEGER, " // NON-NLS
499  + " file_id INTEGER, " // NON-NLS
500  + " artifact_id INTEGER, " // NON-NLS
501  + " time INTEGER, " // NON-NLS
502  + " sub_type INTEGER, " // NON-NLS
503  + " base_type INTEGER, " // NON-NLS
504  + " full_description TEXT, " // NON-NLS
505  + " med_description TEXT, " // NON-NLS
506  + " short_description TEXT, " // NON-NLS
507  + " known_state INTEGER," //boolean // NON-NLS
508  + " hash_hit INTEGER," //boolean // NON-NLS
509  + " tagged INTEGER)"; //boolean // NON-NLS
510  stmt.execute(sql);
511  } catch (SQLException ex) {
512  LOGGER.log(Level.SEVERE, "problem creating database table", ex); // NON-NLS
513  }
514 
515  if (hasDataSourceIDColumn() == false) {
516  try (Statement stmt = con.createStatement()) {
517  String sql = "ALTER TABLE events ADD COLUMN datasource_id INTEGER"; // NON-NLS
518  stmt.execute(sql);
519  } catch (SQLException ex) {
520  LOGGER.log(Level.SEVERE, "problem upgrading events table", ex); // NON-NLS
521  }
522  }
523  if (hasTaggedColumn() == false) {
524  try (Statement stmt = con.createStatement()) {
525  String sql = "ALTER TABLE events ADD COLUMN tagged INTEGER"; // NON-NLS
526  stmt.execute(sql);
527  } catch (SQLException ex) {
528  LOGGER.log(Level.SEVERE, "problem upgrading events table", ex); // NON-NLS
529  }
530  }
531 
532  if (hasHashHitColumn() == false) {
533  try (Statement stmt = con.createStatement()) {
534  String sql = "ALTER TABLE events ADD COLUMN hash_hit INTEGER"; // NON-NLS
535  stmt.execute(sql);
536  } catch (SQLException ex) {
537  LOGGER.log(Level.SEVERE, "problem upgrading events table", ex); // NON-NLS
538  }
539  }
540 
541  try (Statement stmt = con.createStatement()) {
542  String sql = "CREATE TABLE if not exists hash_sets " //NON-NLS
543  + "( hash_set_id INTEGER primary key," //NON-NLS
544  + " hash_set_name VARCHAR(255) UNIQUE NOT NULL)"; //NON-NLS
545  stmt.execute(sql);
546  } catch (SQLException ex) {
547  LOGGER.log(Level.SEVERE, "problem creating hash_sets table", ex); //NON-NLS
548  }
549 
550  try (Statement stmt = con.createStatement()) {
551  String sql = "CREATE TABLE if not exists hash_set_hits " //NON-NLS
552  + "(hash_set_id INTEGER REFERENCES hash_sets(hash_set_id) not null, " //NON-NLS
553  + " event_id INTEGER REFERENCES events(event_id) not null, " //NON-NLS
554  + " PRIMARY KEY (hash_set_id, event_id))"; //NON-NLS
555  stmt.execute(sql);
556  } catch (SQLException ex) {
557  LOGGER.log(Level.SEVERE, "problem creating hash_set_hits table", ex); //NON-NLS
558  }
559 
561 
562  createIndex("events", Arrays.asList("datasource_id")); //NON-NLS
563  createIndex("events", Arrays.asList("event_id", "hash_hit")); //NON-NLS
564  createIndex("events", Arrays.asList("event_id", "tagged")); //NON-NLS
565  createIndex("events", Arrays.asList("file_id")); //NON-NLS
566  createIndex("events", Arrays.asList("artifact_id")); //NON-NLS
567  createIndex("events", Arrays.asList("sub_type", "short_description", "time")); //NON-NLS
568  createIndex("events", Arrays.asList("base_type", "short_description", "time")); //NON-NLS
569  createIndex("events", Arrays.asList("time")); //NON-NLS
570  createIndex("events", Arrays.asList("known_state")); //NON-NLS
571 
572  try {
573  insertRowStmt = prepareStatement(
574  "INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hash_hit, tagged) " // NON-NLS
575  + "VALUES (?,?,?,?,?,?,?,?,?,?,?,?)"); // NON-NLS
576  getHashSetNamesStmt = prepareStatement("SELECT hash_set_id, hash_set_name FROM hash_sets"); // NON-NLS
577  getDataSourceIDsStmt = prepareStatement("SELECT DISTINCT datasource_id FROM events WHERE datasource_id != 0"); // NON-NLS
578  getMaxTimeStmt = prepareStatement("SELECT Max(time) AS max FROM events"); // NON-NLS
579  getMinTimeStmt = prepareStatement("SELECT Min(time) AS min FROM events"); // NON-NLS
580  getEventByIDStmt = prepareStatement("SELECT * FROM events WHERE event_id = ?"); // NON-NLS
581  insertHashSetStmt = prepareStatement("INSERT OR IGNORE INTO hash_sets (hash_set_name) values (?)"); //NON-NLS
582  selectHashSetStmt = prepareStatement("SELECT hash_set_id FROM hash_sets WHERE hash_set_name = ?"); //NON-NLS
583  insertHashHitStmt = prepareStatement("INSERT OR IGNORE INTO hash_set_hits (hash_set_id, event_id) values (?,?)"); //NON-NLS
584  insertTagStmt = prepareStatement("INSERT OR IGNORE INTO tags (tag_id, tag_name_id,tag_name_display_name, event_id) values (?,?,?,?)"); //NON-NLS
585  deleteTagStmt = prepareStatement("DELETE FROM tags WHERE tag_id = ?"); //NON-NLS
586  countAllEventsStmt = prepareStatement("SELECT count(*) AS count FROM events"); //NON-NLS
587  dropEventsTableStmt = prepareStatement("DROP TABLE IF EXISTS events"); //NON-NLS
588  dropHashSetHitsTableStmt = prepareStatement("DROP TABLE IF EXISTS hash_set_hits"); //NON-NLS
589  dropHashSetsTableStmt = prepareStatement("DROP TABLE IF EXISTS hash_sets"); //NON-NLS
590  dropTagsTableStmt = prepareStatement("DROP TABLE IF EXISTS tags"); //NON-NLS
591  dropDBInfoTableStmt = prepareStatement("DROP TABLE IF EXISTS db_ino"); //NON-NLS
592  selectNonArtifactEventIDsByObjectIDStmt = prepareStatement("SELECT event_id FROM events WHERE file_id == ? AND artifact_id IS NULL"); //NON-NLS
593  selectEventIDsBYObjectAndArtifactIDStmt = prepareStatement("SELECT event_id FROM events WHERE file_id == ? AND artifact_id = ?"); //NON-NLS
594  } catch (SQLException sQLException) {
595  LOGGER.log(Level.SEVERE, "failed to prepareStatment", sQLException); // NON-NLS
596  }
597  } finally {
598  DBLock.unlock();
599  }
600  }
601 
606  private void initializeTagsTable() {
607  try (Statement stmt = con.createStatement()) {
608  String sql = "CREATE TABLE IF NOT EXISTS tags " //NON-NLS
609  + "(tag_id INTEGER NOT NULL," //NON-NLS
610  + " tag_name_id INTEGER NOT NULL, " //NON-NLS
611  + " tag_name_display_name TEXT NOT NULL, " //NON-NLS
612  + " event_id INTEGER REFERENCES events(event_id) NOT NULL, " //NON-NLS
613  + " PRIMARY KEY (event_id, tag_name_id))"; //NON-NLS
614  stmt.execute(sql);
615  } catch (SQLException ex) {
616  LOGGER.log(Level.SEVERE, "problem creating tags table", ex); //NON-NLS
617  }
618  }
619 
625  private void createIndex(final String tableName, final List<String> columnList) {
626  String indexColumns = columnList.stream().collect(Collectors.joining(",", "(", ")"));
627  String indexName = tableName + "_" + StringUtils.join(columnList, "_") + "_idx"; //NON-NLS
628  try (Statement stmt = con.createStatement()) {
629 
630  String sql = "CREATE INDEX IF NOT EXISTS " + indexName + " ON " + tableName + indexColumns; // NON-NLS
631  stmt.execute(sql);
632  } catch (SQLException ex) {
633  LOGGER.log(Level.SEVERE, "problem creating index " + indexName, ex); // NON-NLS
634  }
635  }
636 
642  private boolean hasDBColumn(@Nonnull final String dbColumn) {
643  try (Statement stmt = con.createStatement()) {
644 
645  ResultSet executeQuery = stmt.executeQuery("PRAGMA table_info(events)"); //NON-NLS
646  while (executeQuery.next()) {
647  if (dbColumn.equals(executeQuery.getString("name"))) {
648  return true;
649  }
650  }
651  } catch (SQLException ex) {
652  LOGGER.log(Level.SEVERE, "problem executing pragma", ex); // NON-NLS
653  }
654  return false;
655  }
656 
657  private boolean hasDataSourceIDColumn() {
658  return hasDBColumn("datasource_id"); //NON-NLS
659  }
660 
661  private boolean hasTaggedColumn() {
662  return hasDBColumn("tagged"); //NON-NLS
663  }
664 
665  private boolean hasHashHitColumn() {
666  return hasDBColumn("hash_hit"); //NON-NLS
667  }
668 
669  void insertEvent(long time, EventType type, long datasourceID, long objID,
670  Long artifactID, String fullDescription, String medDescription,
671  String shortDescription, TskData.FileKnown known, Set<String> hashSets, List<? extends Tag> tags) {
672 
673  EventTransaction transaction = beginTransaction();
674  insertEvent(time, type, datasourceID, objID, artifactID, fullDescription, medDescription, shortDescription, known, hashSets, tags, transaction);
675  commitTransaction(transaction);
676  }
677 
684  void insertEvent(long time, EventType type, long datasourceID, long objID,
685  Long artifactID, String fullDescription, String medDescription,
686  String shortDescription, TskData.FileKnown known, Set<String> hashSetNames,
687  List<? extends Tag> tags, EventTransaction transaction) {
688 
689  if (transaction.isClosed()) {
690  throw new IllegalArgumentException("can't update database with closed transaction"); // NON-NLS
691  }
692  int typeNum = RootEventType.allTypes.indexOf(type);
693  int superTypeNum = type.getSuperType().ordinal();
694 
695  DBLock.lock();
696  try {
697 
698  //"INSERT INTO events (datasource_id,file_id ,artifact_id, time, sub_type, base_type, full_description, med_description, short_description, known_state, hashHit, tagged) "
699  insertRowStmt.clearParameters();
700  insertRowStmt.setLong(1, datasourceID);
701  insertRowStmt.setLong(2, objID);
702  if (artifactID != null) {
703  insertRowStmt.setLong(3, artifactID);
704  } else {
705  insertRowStmt.setNull(3, Types.NULL);
706  }
707  insertRowStmt.setLong(4, time);
708 
709  if (typeNum != -1) {
710  insertRowStmt.setInt(5, typeNum);
711  } else {
712  insertRowStmt.setNull(5, Types.INTEGER);
713  }
714 
715  insertRowStmt.setInt(6, superTypeNum);
716  insertRowStmt.setString(7, fullDescription);
717  insertRowStmt.setString(8, medDescription);
718  insertRowStmt.setString(9, shortDescription);
719 
720  insertRowStmt.setByte(10, known == null ? TskData.FileKnown.UNKNOWN.getFileKnownValue() : known.getFileKnownValue());
721 
722  insertRowStmt.setInt(11, hashSetNames.isEmpty() ? 0 : 1);
723  insertRowStmt.setInt(12, tags.isEmpty() ? 0 : 1);
724 
725  insertRowStmt.executeUpdate();
726 
727  try (ResultSet generatedKeys = insertRowStmt.getGeneratedKeys()) {
728  while (generatedKeys.next()) {
729  long eventID = generatedKeys.getLong("last_insert_rowid()"); //NON-NLS
730  for (String name : hashSetNames) {
731 
732  // "insert or ignore into hash_sets (hash_set_name) values (?)"
733  insertHashSetStmt.setString(1, name);
734  insertHashSetStmt.executeUpdate();
735 
736  //TODO: use nested select to get hash_set_id rather than seperate statement/query ?
737  //"select hash_set_id from hash_sets where hash_set_name = ?"
738  selectHashSetStmt.setString(1, name);
739  try (ResultSet rs = selectHashSetStmt.executeQuery()) {
740  while (rs.next()) {
741  int hashsetID = rs.getInt("hash_set_id"); //NON-NLS
742  //"insert or ignore into hash_set_hits (hash_set_id, obj_id) values (?,?)";
743  insertHashHitStmt.setInt(1, hashsetID);
744  insertHashHitStmt.setLong(2, eventID);
745  insertHashHitStmt.executeUpdate();
746  break;
747  }
748  }
749  }
750  for (Tag tag : tags) {
751  //could this be one insert? is there a performance win?
752  insertTag(tag, eventID);
753  }
754  break;
755  }
756  }
757 
758  } catch (SQLException ex) {
759  LOGGER.log(Level.SEVERE, "failed to insert event", ex); // NON-NLS
760  } finally {
761  DBLock.unlock();
762  }
763  }
764 
778  Set<Long> addTag(long objectID, @Nullable Long artifactID, Tag tag, EventTransaction transaction) {
779  if (transaction != null && transaction.isClosed()) {
780  throw new IllegalArgumentException("can't update database with closed transaction"); // NON-NLS
781  }
782  DBLock.lock();
783  try {
784  Set<Long> eventIDs = markEventsTagged(objectID, artifactID, true);
785  for (Long eventID : eventIDs) {
786  insertTag(tag, eventID);
787  }
788  return eventIDs;
789  } catch (SQLException ex) {
790  LOGGER.log(Level.SEVERE, "failed to add tag to event", ex); // NON-NLS
791  } finally {
792  DBLock.unlock();
793  }
794  return Collections.emptySet();
795  }
796 
808  private void insertTag(Tag tag, long eventID) throws SQLException {
809 
810  //"INSERT OR IGNORE INTO tags (tag_id, tag_name_id,tag_name_display_name, event_id) values (?,?,?,?)"
811  insertTagStmt.clearParameters();
812  insertTagStmt.setLong(1, tag.getId());
813  insertTagStmt.setLong(2, tag.getName().getId());
814  insertTagStmt.setString(3, tag.getName().getDisplayName());
815  insertTagStmt.setLong(4, eventID);
816  insertTagStmt.executeUpdate();
817  }
818 
834  Set<Long> deleteTag(long objectID, @Nullable Long artifactID, long tagID, boolean stillTagged) {
835  DBLock.lock();
836  try {
837  //"DELETE FROM tags WHERE tag_id = ?
838  deleteTagStmt.clearParameters();
839  deleteTagStmt.setLong(1, tagID);
840  deleteTagStmt.executeUpdate();
841 
842  return markEventsTagged(objectID, artifactID, stillTagged);
843  } catch (SQLException ex) {
844  LOGGER.log(Level.SEVERE, "failed to add tag to event", ex); // NON-NLS
845  } finally {
846  DBLock.unlock();
847  }
848  return Collections.emptySet();
849  }
850 
871  private Set<Long> markEventsTagged(long objectID, @Nullable Long artifactID, boolean tagged) throws SQLException {
872 
873  PreparedStatement selectStmt;
874  if (Objects.isNull(artifactID)) {
875  //"SELECT event_id FROM events WHERE file_id == ? AND artifact_id IS NULL"
876  selectNonArtifactEventIDsByObjectIDStmt.clearParameters();
877  selectNonArtifactEventIDsByObjectIDStmt.setLong(1, objectID);
879  } else {
880  //"SELECT event_id FROM events WHERE file_id == ? AND artifact_id = ?"
881  selectEventIDsBYObjectAndArtifactIDStmt.clearParameters();
882  selectEventIDsBYObjectAndArtifactIDStmt.setLong(1, objectID);
883  selectEventIDsBYObjectAndArtifactIDStmt.setLong(2, artifactID);
885  }
886 
887  HashSet<Long> eventIDs = new HashSet<>();
888  try (ResultSet executeQuery = selectStmt.executeQuery();) {
889  while (executeQuery.next()) {
890  eventIDs.add(executeQuery.getLong("event_id")); //NON-NLS
891  }
892  }
893 
894  //update tagged state for all event with selected ids
895  try (Statement updateStatement = con.createStatement();) {
896  updateStatement.executeUpdate("UPDATE events SET tagged = " + (tagged ? 1 : 0) //NON-NLS
897  + " WHERE event_id IN (" + StringUtils.join(eventIDs, ",") + ")"); //NON-NLS
898  }
899 
900  return eventIDs;
901  }
902 
903  void rollBackTransaction(EventTransaction trans) {
904  trans.rollback();
905  }
906 
907  private void closeStatements() throws SQLException {
908  for (PreparedStatement pStmt : preparedStatements) {
909  pStmt.close();
910  }
911  }
912 
913  private void configureDB() throws SQLException {
914  DBLock.lock();
915  //this should match Sleuthkit db setup
916  try (Statement statement = con.createStatement()) {
917  //reduce i/o operations, we have no OS crash recovery anyway
918  statement.execute("PRAGMA synchronous = OFF;"); // NON-NLS
919  //we don't use this feature, so turn it off for minimal speed up on queries
920  //this is deprecated and not recomended
921  statement.execute("PRAGMA count_changes = OFF;"); // NON-NLS
922  //this made a big difference to query speed
923  statement.execute("PRAGMA temp_store = MEMORY"); // NON-NLS
924  //this made a modest improvement in query speeds
925  statement.execute("PRAGMA cache_size = 50000"); // NON-NLS
926  //we never delete anything so...
927  statement.execute("PRAGMA auto_vacuum = 0"); // NON-NLS
928  //allow to query while in transaction - no need read locks
929  statement.execute("PRAGMA read_uncommitted = True;"); // NON-NLS
930  } finally {
931  DBLock.unlock();
932  }
933 
934  try {
935  LOGGER.log(Level.INFO, String.format("sqlite-jdbc version %s loaded in %s mode", // NON-NLS
936  SQLiteJDBCLoader.getVersion(), SQLiteJDBCLoader.isNativeMode() ? "native" : "pure-java")); // NON-NLS
937  } catch (Exception exception) {
938  LOGGER.log(Level.SEVERE, "Failed to determine if sqlite-jdbc is loaded in native or pure-java mode.", exception); //NON-NLS
939  }
940  }
941 
942  private TimeLineEvent constructTimeLineEvent(ResultSet rs) throws SQLException {
943  return new TimeLineEvent(rs.getLong("event_id"), //NON-NLS
944  rs.getLong("datasource_id"), //NON-NLS
945  rs.getLong("file_id"), //NON-NLS
946  rs.getLong("artifact_id"), //NON-NLS
947  rs.getLong("time"), RootEventType.allTypes.get(rs.getInt("sub_type")), //NON-NLS
948  rs.getString("full_description"), //NON-NLS
949  rs.getString("med_description"), //NON-NLS
950  rs.getString("short_description"), //NON-NLS
951  TskData.FileKnown.valueOf(rs.getByte("known_state")), //NON-NLS
952  rs.getInt("hash_hit") != 0, //NON-NLS
953  rs.getInt("tagged") != 0); //NON-NLS
954  }
955 
972  private Map<EventType, Long> countEventsByType(Long startTime, Long endTime, RootFilter filter, EventTypeZoomLevel zoomLevel) {
973  if (Objects.equals(startTime, endTime)) {
974  endTime++;
975  }
976 
977  Map<EventType, Long> typeMap = new HashMap<>();
978 
979  //do we want the root or subtype column of the databse
980  final boolean useSubTypes = (zoomLevel == EventTypeZoomLevel.SUB_TYPE);
981 
982  //get some info about the range of dates requested
983  final String queryString = "SELECT count(DISTINCT events.event_id) AS count, " + typeColumnHelper(useSubTypes) //NON-NLS
984  + " FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) + " WHERE time >= " + startTime + " AND time < " + endTime + " AND " + SQLHelper.getSQLWhere(filter) // NON-NLS
985  + " GROUP BY " + typeColumnHelper(useSubTypes); // NON-NLS
986 
987  DBLock.lock();
988  try (Statement stmt = con.createStatement();
989  ResultSet rs = stmt.executeQuery(queryString);) {
990  while (rs.next()) {
991  EventType type = useSubTypes
992  ? RootEventType.allTypes.get(rs.getInt("sub_type")) //NON-NLS
993  : BaseTypes.values()[rs.getInt("base_type")]; //NON-NLS
994 
995  typeMap.put(type, rs.getLong("count")); // NON-NLS
996  }
997 
998  } catch (Exception ex) {
999  LOGGER.log(Level.SEVERE, "Error getting count of events from db.", ex); // NON-NLS
1000  } finally {
1001  DBLock.unlock();
1002  }
1003  return typeMap;
1004  }
1005 
1017  List<EventStripe> getEventStripes(ZoomParams params) {
1018  //unpack params
1019  Interval timeRange = params.getTimeRange();
1020  RootFilter filter = params.getFilter();
1021  DescriptionLoD descriptionLOD = params.getDescriptionLOD();
1022  EventTypeZoomLevel typeZoomLevel = params.getTypeZoomLevel();
1023 
1024  long start = timeRange.getStartMillis() / 1000;
1025  long end = timeRange.getEndMillis() / 1000;
1026 
1027  //ensure length of querried interval is not 0
1028  end = Math.max(end, start + 1);
1029 
1030  //get some info about the time range requested
1032 
1033  //build dynamic parts of query
1034  String strfTimeFormat = SQLHelper.getStrfTimeFormat(rangeInfo.getPeriodSize());
1035  String descriptionColumn = SQLHelper.getDescriptionColumn(descriptionLOD);
1036  final boolean useSubTypes = typeZoomLevel.equals(EventTypeZoomLevel.SUB_TYPE);
1037  String timeZone = TimeLineController.getTimeZone().get().equals(TimeZone.getDefault()) ? ", 'localtime'" : ""; // NON-NLS
1038  String typeColumn = typeColumnHelper(useSubTypes);
1039 
1040  //compose query string, the new-lines are only for nicer formatting if printing the entire query
1041  String query = "SELECT strftime('" + strfTimeFormat + "',time , 'unixepoch'" + timeZone + ") AS interval," // NON-NLS
1042  + "\n group_concat(events.event_id) as event_ids," //NON-NLS
1043  + "\n group_concat(CASE WHEN hash_hit = 1 THEN events.event_id ELSE NULL END) as hash_hits," //NON-NLS
1044  + "\n group_concat(CASE WHEN tagged = 1 THEN events.event_id ELSE NULL END) as taggeds," //NON-NLS
1045  + "\n min(time), max(time), " + typeColumn + ", " + descriptionColumn // NON-NLS
1046  + "\n FROM events" + useHashHitTablesHelper(filter) + useTagTablesHelper(filter) // NON-NLS
1047  + "\n WHERE time >= " + start + " AND time < " + end + " AND " + SQLHelper.getSQLWhere(filter) // NON-NLS
1048  + "\n GROUP BY interval, " + typeColumn + " , " + descriptionColumn // NON-NLS
1049  + "\n ORDER BY min(time)"; // NON-NLS
1050 
1051  switch (Version.getBuildType()) {
1052  case DEVELOPMENT:
1053  LOGGER.log(Level.INFO, "executing timeline query: {0}", query); //NON-NLS
1054  break;
1055  case RELEASE:
1056  default:
1057  }
1058 
1059  // perform query and map results to AggregateEvent objects
1060  List<EventCluster> events = new ArrayList<>();
1061 
1062  DBLock.lock();
1063  try (Statement createStatement = con.createStatement();
1064  ResultSet rs = createStatement.executeQuery(query)) {
1065  while (rs.next()) {
1066  events.add(eventClusterHelper(rs, useSubTypes, descriptionLOD, filter.getTagsFilter()));
1067  }
1068  } catch (SQLException ex) {
1069  LOGGER.log(Level.SEVERE, "Failed to get events with query: " + query, ex); // NON-NLS
1070  } finally {
1071  DBLock.unlock();
1072  }
1073 
1074  return mergeClustersToStripes(rangeInfo.getPeriodSize().getPeriod(), events);
1075  }
1076 
1090  private EventCluster eventClusterHelper(ResultSet rs, boolean useSubTypes, DescriptionLoD descriptionLOD, TagsFilter filter) throws SQLException {
1091  Interval interval = new Interval(rs.getLong("min(time)") * 1000, rs.getLong("max(time)") * 1000, TimeLineController.getJodaTimeZone());// NON-NLS
1092  String eventIDsString = rs.getString("event_ids");// NON-NLS
1093  Set<Long> eventIDs = SQLHelper.unGroupConcat(eventIDsString, Long::valueOf);
1094  String description = rs.getString(SQLHelper.getDescriptionColumn(descriptionLOD));
1095  EventType type = useSubTypes ? RootEventType.allTypes.get(rs.getInt("sub_type")) : BaseTypes.values()[rs.getInt("base_type")];// NON-NLS
1096 
1097  Set<Long> hashHits = SQLHelper.unGroupConcat(rs.getString("hash_hits"), Long::valueOf); //NON-NLS
1098  Set<Long> tagged = SQLHelper.unGroupConcat(rs.getString("taggeds"), Long::valueOf); //NON-NLS
1099 
1100  return new EventCluster(interval, type, eventIDs, hashHits, tagged,
1101  description, descriptionLOD);
1102  }
1103 
1117  static private List<EventStripe> mergeClustersToStripes(Period timeUnitLength, List<EventCluster> preMergedEvents) {
1118 
1119  //effectively map from type to (map from description to events)
1120  Map<EventType, SetMultimap< String, EventCluster>> typeMap = new HashMap<>();
1121 
1122  for (EventCluster aggregateEvent : preMergedEvents) {
1123  typeMap.computeIfAbsent(aggregateEvent.getEventType(), eventType -> HashMultimap.create())
1124  .put(aggregateEvent.getDescription(), aggregateEvent);
1125  }
1126  //result list to return
1127  ArrayList<EventCluster> aggEvents = new ArrayList<>();
1128 
1129  //For each (type, description) key, merge agg events
1130  for (SetMultimap<String, EventCluster> descrMap : typeMap.values()) {
1131  //for each description ...
1132  for (String descr : descrMap.keySet()) {
1133  //run through the sorted events, merging together adjacent events
1134  Iterator<EventCluster> iterator = descrMap.get(descr).stream()
1135  .sorted(Comparator.comparing(event -> event.getSpan().getStartMillis()))
1136  .iterator();
1137  EventCluster current = iterator.next();
1138  while (iterator.hasNext()) {
1139  EventCluster next = iterator.next();
1140  Interval gap = current.getSpan().gap(next.getSpan());
1141 
1142  //if they overlap or gap is less one quarter timeUnitLength
1143  //TODO: 1/4 factor is arbitrary. review! -jm
1144  if (gap == null || gap.toDuration().getMillis() <= timeUnitLength.toDurationFrom(gap.getStart()).getMillis() / 4) {
1145  //merge them
1146  current = EventCluster.merge(current, next);
1147  } else {
1148  //done merging into current, set next as new current
1149  aggEvents.add(current);
1150  current = next;
1151  }
1152  }
1153  aggEvents.add(current);
1154  }
1155  }
1156 
1157  //merge clusters to stripes
1158  Map<ImmutablePair<EventType, String>, EventStripe> stripeDescMap = new HashMap<>();
1159 
1160  for (EventCluster eventCluster : aggEvents) {
1161  stripeDescMap.merge(ImmutablePair.of(eventCluster.getEventType(), eventCluster.getDescription()),
1162  new EventStripe(eventCluster, null), EventStripe::merge);
1163  }
1164 
1165  return stripeDescMap.values().stream().sorted(Comparator.comparing(EventStripe::getStartMillis)).collect(Collectors.toList());
1166  }
1167 
1168  private static String typeColumnHelper(final boolean useSubTypes) {
1169  return useSubTypes ? "sub_type" : "base_type"; //NON-NLS
1170  }
1171 
1172 
1173  private PreparedStatement prepareStatement(String queryString) throws SQLException {
1174  PreparedStatement prepareStatement = con.prepareStatement(queryString);
1175  preparedStatements.add(prepareStatement);
1176  return prepareStatement;
1177  }
1178 
1182  public class EventTransaction {
1183 
1184  private boolean closed = false;
1185 
1195  private EventTransaction() {
1196 
1197  //get the write lock, released in close()
1198  DBLock.lock();
1199  try {
1200  con.setAutoCommit(false);
1201  } catch (SQLException ex) {
1202  LOGGER.log(Level.SEVERE, "failed to set auto-commit to to false", ex); // NON-NLS
1203  }
1204 
1205  }
1206 
1207  private void rollback() {
1208  if (!closed) {
1209  try {
1210  con.rollback();
1211 
1212  } catch (SQLException ex1) {
1213  LOGGER.log(Level.SEVERE, "Exception while attempting to rollback!!", ex1); // NON-NLS
1214  } finally {
1215  close();
1216  }
1217  }
1218  }
1219 
1220  private void commit() {
1221  if (!closed) {
1222  try {
1223  con.commit();
1224  // make sure we close before we update, bc they'll need locks
1225  close();
1226 
1227  } catch (SQLException ex) {
1228  LOGGER.log(Level.SEVERE, "Error commiting events.db.", ex); // NON-NLS
1229  rollback();
1230  }
1231  }
1232  }
1233 
1234  private void close() {
1235  if (!closed) {
1236  try {
1237  con.setAutoCommit(true);
1238  } catch (SQLException ex) {
1239  LOGGER.log(Level.SEVERE, "Error setting auto-commit to true.", ex); // NON-NLS
1240  } finally {
1241  closed = true;
1242 
1243  DBLock.unlock();
1244  }
1245  }
1246  }
1247 
1248  public Boolean isClosed() {
1249  return closed;
1250  }
1251  }
1252 }
static Version.Type getBuildType()
Definition: Version.java:87
void insertTag(Tag tag, long eventID)
Definition: EventDB.java:808
static List< EventStripe > mergeClustersToStripes(Period timeUnitLength, List< EventCluster > preMergedEvents)
Definition: EventDB.java:1117
static EventDB getEventDB(Case autoCase)
Definition: EventDB.java:107
boolean hasDBColumn(@Nonnull final String dbColumn)
Definition: EventDB.java:642
Interval getSpanningInterval(Collection< Long > eventIDs)
Definition: EventDB.java:174
PreparedStatement selectEventIDsBYObjectAndArtifactIDStmt
Definition: EventDB.java:141
static ReadOnlyObjectProperty< TimeZone > getTimeZone()
Map< EventType, Long > countEventsByType(Long startTime, Long endTime, RootFilter filter, EventTypeZoomLevel zoomLevel)
Definition: EventDB.java:972
TimeLineEvent constructTimeLineEvent(ResultSet rs)
Definition: EventDB.java:942
PreparedStatement selectNonArtifactEventIDsByObjectIDStmt
Definition: EventDB.java:140
static String typeColumnHelper(final boolean useSubTypes)
Definition: EventDB.java:1168
EventCluster eventClusterHelper(ResultSet rs, boolean useSubTypes, DescriptionLoD descriptionLOD, TagsFilter filter)
Definition: EventDB.java:1090
PreparedStatement prepareStatement(String queryString)
Definition: EventDB.java:1173
final Set< PreparedStatement > preparedStatements
Definition: EventDB.java:143
static RangeDivisionInfo getRangeDivisionInfo(Interval timeRange)
static final List<?extends EventType > allTypes
Definition: EventType.java:35
static EventStripe merge(EventStripe u, EventStripe v)
void createIndex(final String tableName, final List< String > columnList)
Definition: EventDB.java:625
static final org.sleuthkit.autopsy.coreutils.Logger LOGGER
Definition: EventDB.java:87
synchronized static Logger getLogger(String name)
Definition: Logger.java:166
Set< Long > markEventsTagged(long objectID,@Nullable Long artifactID, boolean tagged)
Definition: EventDB.java:871
static EventCluster merge(EventCluster cluster1, EventCluster cluster2)

Copyright © 2012-2015 Basis Technology. Generated on: Wed Apr 6 2016
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.