Autopsy 4.22.1
Graphical digital forensics platform for The Sleuth Kit and other tools.
Server.java
Go to the documentation of this file.
1/*
2 * Autopsy Forensic Browser
3 *
4 * Copyright 2011-2021 Basis Technology Corp.
5 * Contact: carrier <at> sleuthkit <dot> org
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 */
19package org.sleuthkit.autopsy.keywordsearch;
20
21import com.google.common.util.concurrent.ThreadFactoryBuilder;
22import java.awt.event.ActionEvent;
23import java.beans.PropertyChangeListener;
24import java.io.BufferedReader;
25import java.io.BufferedWriter;
26import java.io.File;
27import java.io.FileOutputStream;
28import java.io.IOException;
29import java.io.InputStream;
30import java.io.InputStreamReader;
31import java.io.OutputStream;
32import java.io.OutputStreamWriter;
33import java.net.ConnectException;
34import java.net.DatagramSocket;
35import java.net.ServerSocket;
36import java.net.SocketException;
37import java.nio.charset.Charset;
38import java.nio.file.Files;
39import java.nio.file.OpenOption;
40import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
41import java.nio.file.Path;
42import java.nio.file.Paths;
43import java.util.ArrayList;
44import java.util.Arrays;
45import java.util.Collections;
46import java.util.Iterator;
47import java.util.List;
48import java.util.Random;
49import java.util.concurrent.ScheduledThreadPoolExecutor;
50import java.util.concurrent.TimeUnit;
51import java.util.concurrent.locks.ReentrantReadWriteLock;
52import java.util.logging.Level;
53import javax.swing.AbstractAction;
54import org.apache.commons.io.FileUtils;
55import java.util.concurrent.TimeoutException;
56import java.util.concurrent.atomic.AtomicBoolean;
57import java.util.concurrent.atomic.AtomicInteger;
58import java.util.stream.Collectors;
59import static java.util.stream.Collectors.toList;
60import javax.swing.JOptionPane;
61import org.apache.solr.client.solrj.SolrQuery;
62import org.apache.solr.client.solrj.SolrRequest;
63import org.apache.solr.client.solrj.SolrServerException;
64import org.apache.solr.client.solrj.SolrClient;
65import org.apache.solr.client.solrj.impl.HttpSolrClient;
66import org.apache.solr.client.solrj.impl.CloudSolrClient;
67import org.apache.solr.client.solrj.impl.ConcurrentUpdateSolrClient;
68import org.apache.solr.client.solrj.request.CollectionAdminRequest;
69import org.apache.solr.client.solrj.response.CollectionAdminResponse;
70import org.apache.solr.client.solrj.request.CoreAdminRequest;
71import org.apache.solr.client.solrj.response.CoreAdminResponse;
72import org.apache.solr.client.solrj.impl.BaseHttpSolrClient.RemoteSolrException;
73import org.apache.solr.client.solrj.request.QueryRequest;
74import org.apache.solr.client.solrj.response.QueryResponse;
75import org.apache.solr.client.solrj.response.TermsResponse;
76import org.apache.solr.client.solrj.response.TermsResponse.Term;
77import org.apache.solr.common.SolrDocument;
78import org.apache.solr.common.SolrDocumentList;
79import org.apache.solr.common.SolrException;
80import org.apache.solr.common.SolrInputDocument;
81import org.apache.solr.common.util.NamedList;
82import org.openide.modules.InstalledFileLocator;
83import org.openide.modules.Places;
84import org.openide.util.NbBundle;
85import org.openide.util.NbBundle.Messages;
86import org.openide.windows.WindowManager;
87import org.sleuthkit.autopsy.casemodule.Case;
88import org.sleuthkit.autopsy.casemodule.Case.CaseType;
89import org.sleuthkit.autopsy.casemodule.CaseMetadata;
90import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
91import org.sleuthkit.autopsy.core.RuntimeProperties;
92import org.sleuthkit.autopsy.core.UserPreferences;
93import org.sleuthkit.autopsy.coreutils.FileUtil;
94import org.sleuthkit.autopsy.coreutils.Logger;
95import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
96import org.sleuthkit.autopsy.coreutils.ModuleSettings;
97import org.sleuthkit.autopsy.coreutils.PlatformUtil;
98import org.sleuthkit.autopsy.coreutils.ThreadUtils;
99import org.sleuthkit.autopsy.healthmonitor.HealthMonitor;
100import org.sleuthkit.autopsy.healthmonitor.TimingMetric;
101import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchServiceException;
102import org.sleuthkit.autopsy.report.ReportProgressPanel;
103import org.sleuthkit.datamodel.Content;
104
109public class Server {
110
114 public static enum Schema {
115
117 @Override
118 public String toString() {
119 return "id"; //NON-NLS
120 }
121 },
123 @Override
124 public String toString() {
125 return "image_id"; //NON-NLS
126 }
127 },
128 // This is not stored or indexed. it is copied to text by the schema
130 @Override
131 public String toString() {
132 return "content"; //NON-NLS
133 }
134 },
135 // String representation for regular expression searching
137 @Override
138 public String toString() {
139 return "content_str"; //NON-NLS
140 }
141 },
142 // default search field. Populated by schema
144 @Override
145 public String toString() {
146 return "text"; //NON-NLS
147 }
148 },
149 // no longer populated. Was used for regular expression searching.
150 // Should not be used.
152 @Override
153 public String toString() {
154 return "content_ws"; //NON-NLS
155 }
156 },
158 @Override
159 public String toString() {
160 return "content_ja"; //NON-NLS
161 }
162 },
164 @Override
165 public String toString() {
166 return "language"; //NON-NLS
167 }
168 },
170 @Override
171 public String toString() {
172 return "file_name"; //NON-NLS
173 }
174 },
175 // note that we no longer store or index this field
177 @Override
178 public String toString() {
179 return "ctime"; //NON-NLS
180 }
181 },
182 // note that we no longer store or index this field
184 @Override
185 public String toString() {
186 return "atime"; //NON-NLS
187 }
188 },
189 // note that we no longer store or index this field
191 @Override
192 public String toString() {
193 return "mtime"; //NON-NLS
194 }
195 },
196 // note that we no longer store or index this field
198 @Override
199 public String toString() {
200 return "crtime"; //NON-NLS
201 }
202 },
204 @Override
205 public String toString() {
206 return "num_chunks"; //NON-NLS
207 }
208 },
210 @Override
211 public String toString() {
212 return "chunk_size"; //NON-NLS
213 }
214 },
215
221 @Override
222 public String toString() {
223 return "termfreq"; //NON-NLS
224 }
225 }
226 };
227
228 public static final String HL_ANALYZE_CHARS_UNLIMITED = "500000"; //max 1MB in a chunk. use -1 for unlimited, but -1 option may not be supported (not documented)
229 //max content size we can send to Solr
230 public static final long MAX_CONTENT_SIZE = 1L * 31 * 1024 * 1024;
231 private static final Logger logger = Logger.getLogger(Server.class.getName());
232 public static final String CORE_EVT = "CORE_EVT"; //NON-NLS
233 @Deprecated
234 public static final char ID_CHUNK_SEP = '_';
235 public static final String CHUNK_ID_SEPARATOR = "_";
236 private String javaPath = "java";
237 public static final Charset DEFAULT_INDEXED_TEXT_CHARSET = Charset.forName("UTF-8");
238 private Process curSolrProcess = null;
239 static final String PROPERTIES_FILE = KeywordSearchSettings.MODULE_NAME;
240 static final String PROPERTIES_CURRENT_SERVER_PORT = "IndexingServerPort"; //NON-NLS
241 static final String PROPERTIES_CURRENT_STOP_PORT = "IndexingServerStopPort"; //NON-NLS
242 private static final String KEY = "jjk#09s"; //NON-NLS
243 static final String DEFAULT_SOLR_SERVER_HOST = "localhost"; //NON-NLS
244 static final int DEFAULT_SOLR_SERVER_PORT = 23232;
245 static final int DEFAULT_SOLR_STOP_PORT = 34343;
246 private int localSolrServerPort = 0;
247 private int localSolrStopPort = 0;
248 private File localSolrFolder;
249 private static final String SOLR = "solr";
250 private static final String CORE_PROPERTIES = "core.properties";
251 private static final boolean DEBUG = false;//(Version.getBuildType() == Version.Type.DEVELOPMENT);
252 private static final int NUM_COLLECTION_CREATION_RETRIES = 5;
253 private static final int NUM_EMBEDDED_SERVER_RETRIES = 12; // attempt to connect to embedded Solr server for 1 minute
254 private static final int EMBEDDED_SERVER_RETRY_WAIT_SEC = 5;
255
256 public enum CORE_EVT_STATES {
257
259 };
260
261 private enum SOLR_VERSION {
262
264 };
265
266 // A reference to the locally running Solr instance.
267 private HttpSolrClient localSolrServer = null;
268 private SOLR_VERSION localServerVersion = SOLR_VERSION.SOLR8; // start local Solr 8 by default
269
270 // A reference to the remote/network running Solr instance.
271 private HttpSolrClient remoteSolrServer;
272
273 private Collection currentCollection;
274 private final ReentrantReadWriteLock currentCoreLock;
275
276 private final ServerAction serverAction;
278
283 Server() {
284 initSettings();
285
286 localSolrServer = getSolrClient("http://localhost:" + localSolrServerPort + "/solr");
287
288 serverAction = new ServerAction();
289 File solr8Folder = InstalledFileLocator.getDefault().locate("solr", Server.class.getPackage().getName(), false); //NON-NLS
290 File solr4Folder = InstalledFileLocator.getDefault().locate("solr4", Server.class.getPackage().getName(), false); //NON-NLS
291
292 // Figure out where Java is located. The Java home location
293 // will be passed as the SOLR_JAVA_HOME environment
294 // variable to the Solr script but it can be overridden by the user in
295 // either autopsy-solr.cmd or autopsy-solr-in.cmd.
297
298 Path solr8Home = Paths.get(PlatformUtil.getUserDirectory().getAbsolutePath(), "solr"); //NON-NLS
299 try {
300 // Always copy the config files, as they may have changed. Otherwise potentially stale Solr configuration is being used.
301 if (!solr8Home.toFile().exists()) {
302 Files.createDirectory(solr8Home);
303 } else {
304 // delete the configsets directory as the Autopsy configset could have changed
305 FileUtil.deleteDir(solr8Home.resolve("configsets").toFile());
306 }
307 Files.copy(Paths.get(solr8Folder.getAbsolutePath(), "server", "solr", "solr.xml"), solr8Home.resolve("solr.xml"), REPLACE_EXISTING); //NON-NLS
308 Files.copy(Paths.get(solr8Folder.getAbsolutePath(), "server", "solr", "zoo.cfg"), solr8Home.resolve("zoo.cfg"), REPLACE_EXISTING); //NON-NLS
309 FileUtils.copyDirectory(Paths.get(solr8Folder.getAbsolutePath(), "server", "solr", "configsets").toFile(), solr8Home.resolve("configsets").toFile()); //NON-NLS
310 } catch (IOException ex) {
311 logger.log(Level.SEVERE, "Failed to create Solr 8 home folder:", ex); //NON-NLS
312 }
313
314 Path solr4Home = Paths.get(PlatformUtil.getUserDirectory().getAbsolutePath(), "solr4"); //NON-NLS
315 try {
316 // Always copy the config files, as they may have changed. Otherwise potentially stale Solr configuration is being used.
317 if (!solr4Home.toFile().exists()) {
318 Files.createDirectory(solr4Home);
319 }
320 Files.copy(Paths.get(solr4Folder.getAbsolutePath(), "solr", "solr.xml"), solr4Home.resolve("solr.xml"), REPLACE_EXISTING); //NON-NLS
321 Files.copy(Paths.get(solr4Folder.getAbsolutePath(), "solr", "zoo.cfg"), solr4Home.resolve("zoo.cfg"), REPLACE_EXISTING); //NON-NLS
322 } catch (IOException ex) {
323 logger.log(Level.SEVERE, "Failed to create Solr 4 home folder:", ex); //NON-NLS
324 }
325
326 currentCoreLock = new ReentrantReadWriteLock(true);
327
328 logger.log(Level.INFO, "Created Server instance using Java at {0}", javaPath); //NON-NLS
329 }
330
331 private void initSettings() {
332
333 if (ModuleSettings.settingExists(PROPERTIES_FILE, PROPERTIES_CURRENT_SERVER_PORT)) {
334 try {
335 localSolrServerPort = Integer.decode(ModuleSettings.getConfigSetting(PROPERTIES_FILE, PROPERTIES_CURRENT_SERVER_PORT));
336 } catch (NumberFormatException nfe) {
337 logger.log(Level.WARNING, "Could not decode indexing server port, value was not a valid port number, using the default. ", nfe); //NON-NLS
338 localSolrServerPort = DEFAULT_SOLR_SERVER_PORT;
339 }
340 } else {
341 localSolrServerPort = DEFAULT_SOLR_SERVER_PORT;
342 ModuleSettings.setConfigSetting(PROPERTIES_FILE, PROPERTIES_CURRENT_SERVER_PORT, String.valueOf(localSolrServerPort));
343 }
344
345 if (ModuleSettings.settingExists(PROPERTIES_FILE, PROPERTIES_CURRENT_STOP_PORT)) {
346 try {
347 localSolrStopPort = Integer.decode(ModuleSettings.getConfigSetting(PROPERTIES_FILE, PROPERTIES_CURRENT_STOP_PORT));
348 } catch (NumberFormatException nfe) {
349 logger.log(Level.WARNING, "Could not decode indexing server stop port, value was not a valid port number, using default", nfe); //NON-NLS
350 localSolrStopPort = DEFAULT_SOLR_STOP_PORT;
351 }
352 } else {
353 localSolrStopPort = DEFAULT_SOLR_STOP_PORT;
354 ModuleSettings.setConfigSetting(PROPERTIES_FILE, PROPERTIES_CURRENT_STOP_PORT, String.valueOf(localSolrStopPort));
355 }
356 }
357
358 private HttpSolrClient getSolrClient(String solrUrl) {
359 int connectionTimeoutMs = org.sleuthkit.autopsy.keywordsearch.UserPreferences.getConnectionTimeout();
360 return new HttpSolrClient.Builder(solrUrl)
361 .withSocketTimeout(connectionTimeoutMs)
362 .withConnectionTimeout(connectionTimeoutMs)
363 .build();
364 }
365
366 private ConcurrentUpdateSolrClient getConcurrentClient(String solrUrl) {
367 int numThreads = org.sleuthkit.autopsy.keywordsearch.UserPreferences.getNumThreads();
368 int numDocs = org.sleuthkit.autopsy.keywordsearch.UserPreferences.getDocumentsQueueSize();
369 int connectionTimeoutMs = org.sleuthkit.autopsy.keywordsearch.UserPreferences.getConnectionTimeout();
370 logger.log(Level.INFO, "Creating new ConcurrentUpdateSolrClient: {0}", solrUrl); //NON-NLS
371 logger.log(Level.INFO, "Queue size = {0}, Number of threads = {1}, Connection Timeout (ms) = {2}", new Object[]{numDocs, numThreads, connectionTimeoutMs}); //NON-NLS
372 ConcurrentUpdateSolrClient client = new ConcurrentUpdateSolrClient.Builder(solrUrl)
373 .withQueueSize(numDocs)
374 .withThreadCount(numThreads)
375 .withSocketTimeout(connectionTimeoutMs)
376 .withConnectionTimeout(connectionTimeoutMs)
377 .build();
378
379 return client;
380 }
381
382 private CloudSolrClient getCloudSolrClient(String host, String port, String defaultCollectionName) throws KeywordSearchModuleException {
383 List<String> solrServerList = getSolrServerList(host, port);
384 List<String> solrUrls = new ArrayList<>();
385 for (String server : solrServerList) {
386 solrUrls.add("http://" + server + "/solr");
387 logger.log(Level.INFO, "Using Solr server: {0}", server);
388 }
389
390 logger.log(Level.INFO, "Creating new CloudSolrClient"); //NON-NLS
391 int connectionTimeoutMs = org.sleuthkit.autopsy.keywordsearch.UserPreferences.getConnectionTimeout();
392 CloudSolrClient client = new CloudSolrClient.Builder(solrUrls)
393 .withConnectionTimeout(connectionTimeoutMs)
394 .withSocketTimeout(connectionTimeoutMs)
395 .build();
396 if (!defaultCollectionName.isEmpty()) {
397 client.setDefaultCollection(defaultCollectionName);
398 }
399 client.connect();
400 return client;
401 }
402
403 @Override
404 public void finalize() throws java.lang.Throwable {
405 stop();
406 super.finalize();
407 }
408
409 public void addServerActionListener(PropertyChangeListener l) {
410 serverAction.addPropertyChangeListener(l);
411 }
412
413 int getLocalSolrServerPort() {
414 return localSolrServerPort;
415 }
416
417 int getLocalSolrStopPort() {
418 return localSolrStopPort;
419 }
420
424 private static class InputStreamPrinterThread extends Thread {
425
426 InputStream stream;
427 OutputStream out;
428 volatile boolean doRun = true;
429
430 InputStreamPrinterThread(InputStream stream, String type) {
431 this.stream = stream;
432 try {
433 final String log = Places.getUserDirectory().getAbsolutePath()
434 + File.separator + "var" + File.separator + "log" //NON-NLS
435 + File.separator + "solr.log." + type; //NON-NLS
436 File outputFile = new File(log.concat(".0"));
437 File first = new File(log.concat(".1"));
438 File second = new File(log.concat(".2"));
439 if (second.exists()) {
440 second.delete();
441 }
442 if (first.exists()) {
443 first.renameTo(second);
444 }
445 if (outputFile.exists()) {
446 outputFile.renameTo(first);
447 } else {
448 outputFile.createNewFile();
449 }
450 out = new FileOutputStream(outputFile);
451
452 } catch (Exception ex) {
453 logger.log(Level.WARNING, "Failed to create solr log file", ex); //NON-NLS
454 }
455 }
456
457 void stopRun() {
458 doRun = false;
459 }
460
461 @Override
462 public void run() {
463
464 try (InputStreamReader isr = new InputStreamReader(stream);
465 BufferedReader br = new BufferedReader(isr);
466 OutputStreamWriter osw = new OutputStreamWriter(out, PlatformUtil.getDefaultPlatformCharset());
467 BufferedWriter bw = new BufferedWriter(osw);) {
468
469 String line = null;
470 while (doRun && (line = br.readLine()) != null) {
471 bw.write(line);
472 bw.newLine();
473 if (DEBUG) {
474 //flush buffers if dev version for debugging
475 bw.flush();
476 }
477 }
478 bw.flush();
479 } catch (IOException ex) {
480 logger.log(Level.SEVERE, "Error redirecting Solr output stream", ex); //NON-NLS
481 }
482 }
483 }
484
494 private Process runLocalSolr8ControlCommand(List<String> solrArguments) throws IOException {
495 final String MAX_SOLR_MEM_MB_PAR = "-Xmx" + UserPreferences.getMaxSolrVMSize() + "m"; //NON-NLS
496
497 // This is our customized version of the Solr batch script to start/stop Solr.
498 File solr8Folder = InstalledFileLocator.getDefault().locate("solr", Server.class.getPackage().getName(), false); //NON-NLS
499 Path solr8CmdPath;
501 solr8CmdPath = Paths.get(solr8Folder.getAbsolutePath(), "bin", "autopsy-solr.cmd"); //NON-NLS
502 } else {
503 solr8CmdPath = Paths.get(solr8Folder.getAbsolutePath(), "bin", "autopsy-solr"); //NON-NLS
504 }
505 Path solr8Home = Paths.get(PlatformUtil.getUserDirectory().getAbsolutePath(), "solr"); //NON-NLS
506
507 List<String> commandLine = new ArrayList<>();
508 commandLine.add(solr8CmdPath.toString());
509 commandLine.addAll(solrArguments);
510
511 ProcessBuilder solrProcessBuilder = new ProcessBuilder(commandLine);
512 solrProcessBuilder.directory(solr8Folder);
513
514 // Redirect stdout and stderr to files to prevent blocking.
515 Path solrStdoutPath = Paths.get(Places.getUserDirectory().getAbsolutePath(), "var", "log", "solr.log.stdout"); //NON-NLS
516 solrProcessBuilder.redirectOutput(solrStdoutPath.toFile());
517
518 Path solrStderrPath = Paths.get(Places.getUserDirectory().getAbsolutePath(), "var", "log", "solr.log.stderr"); //NON-NLS
519 solrProcessBuilder.redirectError(solrStderrPath.toFile());
520
521 // get the path to the JRE folder. That's what Solr needs as SOLR_JAVA_HOME
522 String jreFolderPath = new File(javaPath).getParentFile().getParentFile().getAbsolutePath();
523
524 solrProcessBuilder.environment().put("SOLR_JAVA_HOME", jreFolderPath); // NON-NLS
525 solrProcessBuilder.environment().put("SOLR_HOME", solr8Home.toString()); // NON-NLS
526 solrProcessBuilder.environment().put("STOP_KEY", KEY); // NON-NLS
527 solrProcessBuilder.environment().put("SOLR_JAVA_MEM", MAX_SOLR_MEM_MB_PAR); // NON-NLS
528 logger.log(Level.INFO, "Setting Solr 8 directory: {0}", solr8Folder.toString()); //NON-NLS
529 logger.log(Level.INFO, "Running Solr 8 command: {0} from {1}", new Object[]{solrProcessBuilder.command(), solr8Folder.toString()}); //NON-NLS
530 Process process = solrProcessBuilder.start();
531 logger.log(Level.INFO, "Finished running Solr 8 command"); //NON-NLS
532 return process;
533 }
534
544 private Process runLocalSolr4ControlCommand(List<String> solrArguments) throws IOException {
545 final String MAX_SOLR_MEM_MB_PAR = "-Xmx" + UserPreferences.getMaxSolrVMSize() + "m"; //NON-NLS
546 File solr4Folder = InstalledFileLocator.getDefault().locate("solr4", Server.class.getPackage().getName(), false); //NON-NLS
547
548 List<String> commandLine = new ArrayList<>();
549 commandLine.add(javaPath);
550 commandLine.add(MAX_SOLR_MEM_MB_PAR);
551 commandLine.add("-DSTOP.PORT=" + localSolrStopPort); //NON-NLS
552 commandLine.add("-Djetty.port=" + localSolrServerPort); //NON-NLS
553 commandLine.add("-DSTOP.KEY=" + KEY); //NON-NLS
554 commandLine.add("-jar"); //NON-NLS
555 commandLine.add("start.jar"); //NON-NLS
556
557 commandLine.addAll(solrArguments);
558
559 ProcessBuilder solrProcessBuilder = new ProcessBuilder(commandLine);
560 solrProcessBuilder.directory(solr4Folder);
561
562 // Redirect stdout and stderr to files to prevent blocking.
563 Path solrStdoutPath = Paths.get(Places.getUserDirectory().getAbsolutePath(), "var", "log", "solr.log.stdout"); //NON-NLS
564 solrProcessBuilder.redirectOutput(solrStdoutPath.toFile());
565
566 Path solrStderrPath = Paths.get(Places.getUserDirectory().getAbsolutePath(), "var", "log", "solr.log.stderr"); //NON-NLS
567 solrProcessBuilder.redirectError(solrStderrPath.toFile());
568
569 logger.log(Level.INFO, "Running Solr 4 command: {0}", solrProcessBuilder.command()); //NON-NLS
570 Process process = solrProcessBuilder.start();
571 logger.log(Level.INFO, "Finished running Solr 4 command"); //NON-NLS
572 return process;
573 }
574
580 List<Long> getSolrPIDs() {
581 List<Long> pids = new ArrayList<>();
582
583 //NOTE: these needs to be in sync with process start string in start()
584 final String pidsQuery = "-DSTOP.KEY=" + KEY + "%start.jar"; //NON-NLS
585
586 long[] pidsArr = PlatformUtil.getJavaPIDs(pidsQuery);
587 if (pidsArr != null) {
588 for (int i = 0; i < pidsArr.length; ++i) {
589 pids.add(pidsArr[i]);
590 }
591 }
592
593 return pids;
594 }
595
600 void killSolr() {
601 List<Long> solrPids = getSolrPIDs();
602 for (long pid : solrPids) {
603 logger.log(Level.INFO, "Trying to kill old Solr process, PID: {0}", pid); //NON-NLS
604 PlatformUtil.killProcess(pid);
605 }
606 }
607
608 void start() throws KeywordSearchModuleException, SolrServerNoPortException, SolrServerException {
609 startLocalSolr(SOLR_VERSION.SOLR8);
610 }
611
612 @Messages({
613 "# {0} - indexVersion",
614 "Server_configureSolrConnection_illegalSolrVersion=The solr version in the case: {0}, is not supported."
615 })
616 private void configureSolrConnection(Case theCase, Index index) throws KeywordSearchModuleException, SolrServerNoPortException {
617
618 try {
619 if (theCase.getCaseType() == CaseType.SINGLE_USER_CASE) {
620
621 // makes sure the proper local Solr server is running
622 if (!IndexFinder.getCurrentSolrVersion().equals(index.getSolrVersion())) {
623 throw new KeywordSearchModuleException(Bundle.Server_configureSolrConnection_illegalSolrVersion(index.getSolrVersion()));
624 }
625
626 startLocalSolr(SOLR_VERSION.SOLR8);
627
628 // check if the local Solr server is running
629 if (!this.isLocalSolrRunning()) {
630 logger.log(Level.SEVERE, "Local Solr server is not running"); //NON-NLS
631 throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.openCore.exception.msg"));
632 }
633 } else {
634 // create SolrJ client to connect to remore Solr server
636
637 // test the connection
638 connectToSolrServer(remoteSolrServer);
639 }
640 } catch (SolrServerException | IOException ex) {
641 throw new KeywordSearchModuleException(NbBundle.getMessage(Server.class, "Server.connect.exception.msg", ex.getLocalizedMessage()), ex);
642 }
643 }
644
659 private HttpSolrClient configureMultiUserConnection(Case theCase, Index index, String name) throws KeywordSearchModuleException {
660
661 // read Solr connection info from user preferences, unless "solrserver.txt" is present
663 if (properties.host.isEmpty() || properties.port.isEmpty()) {
664 throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.connectionInfoMissing.exception.msg", index.getSolrVersion()));
665 }
666 String solrUrl = "http://" + properties.host + ":" + properties.port + "/solr";
667
668 if (!name.isEmpty()) {
669 solrUrl = solrUrl + "/" + name;
670 }
671
672 // create SolrJ client to connect to remore Solr server
673 return getSolrClient(solrUrl);
674 }
675
681 @NbBundle.Messages({
682 "Server.status.failed.msg=Local Solr server did not respond to status request. This may be because the server failed to start or is taking too long to initialize.",})
683 synchronized void startLocalSolr(SOLR_VERSION version) throws KeywordSearchModuleException, SolrServerNoPortException, SolrServerException {
684
685 logger.log(Level.INFO, "Starting local Solr " + version + " server"); //NON-NLS
686 if (version == SOLR_VERSION.SOLR8) {
687 localSolrFolder = InstalledFileLocator.getDefault().locate("solr", Server.class.getPackage().getName(), false); //NON-NLS
688 } else {
689 throw new KeywordSearchModuleException(Bundle.Server_configureSolrConnection_illegalSolrVersion(version.name()));
690 }
691
692 if (isLocalSolrRunning()) {
693 if (localServerVersion.equals(version)) {
694 // this version of local server is already running
695 logger.log(Level.INFO, "Local Solr " + version + " server is already running"); //NON-NLS
696 return;
697 } else {
698 // wrong version of local server is running, stop it
699 stop();
700 }
701 }
702
703 // set which version of local server is currently running
704 localServerVersion = version;
705
706 if (!isPortAvailable(localSolrServerPort)) {
707 // There is something already listening on our port. Let's see if
708 // this is from an earlier run that didn't successfully shut down
709 // and if so kill it.
710 final List<Long> pids = this.getSolrPIDs();
711
712 // If the culprit listening on the port is not a Solr process
713 // we refuse to start.
714 if (pids.isEmpty()) {
715 throw new SolrServerNoPortException(localSolrServerPort);
716 }
717
718 // Ok, we've tried to stop it above but there still appears to be
719 // a Solr process listening on our port so we forcefully kill it.
720 killSolr();
721
722 // If either of the ports are still in use after our attempt to kill
723 // previously running processes we give up and throw an exception.
724 if (!isPortAvailable(localSolrServerPort)) {
725 throw new SolrServerNoPortException(localSolrServerPort);
726 }
727 if (!isPortAvailable(localSolrStopPort)) {
728 throw new SolrServerNoPortException(localSolrStopPort);
729 }
730 }
731
732 if (isPortAvailable(localSolrServerPort)) {
733 logger.log(Level.INFO, "Port [{0}] available, starting Solr", localSolrServerPort); //NON-NLS
734 try {
735 if (version == SOLR_VERSION.SOLR8) {
736 logger.log(Level.INFO, "Starting Solr 8 server"); //NON-NLS
737 curSolrProcess = runLocalSolr8ControlCommand(new ArrayList<>(Arrays.asList("start", "-p", //NON-NLS
738 Integer.toString(localSolrServerPort)))); //NON-NLS
739 } else {
740 // solr4
741 logger.log(Level.INFO, "Starting Solr 4 server"); //NON-NLS
743 Arrays.asList("-Dbootstrap_confdir=../solr/configsets/AutopsyConfig/conf", //NON-NLS
744 "-Dcollection.configName=AutopsyConfig"))); //NON-NLS
745 }
746
747 // Wait for the Solr server to start and respond to a statusRequest request.
748 for (int numRetries = 0; numRetries < NUM_EMBEDDED_SERVER_RETRIES; numRetries++) {
749 if (isLocalSolrRunning()) {
750 final List<Long> pids = this.getSolrPIDs();
751 logger.log(Level.INFO, "New Solr process PID: {0}", pids); //NON-NLS
752 return;
753 }
754
755 // Local Solr server did not respond so we sleep for
756 // 5 seconds before trying again.
757 try {
758 TimeUnit.SECONDS.sleep(EMBEDDED_SERVER_RETRY_WAIT_SEC);
759 } catch (InterruptedException ex) {
760 logger.log(Level.WARNING, "Timer interrupted"); //NON-NLS
761 }
762 }
763
764 // If we get here the Solr server has not responded to connection
765 // attempts in a timely fashion.
766 logger.log(Level.WARNING, "Local Solr server failed to respond to status requests.");
767 WindowManager.getDefault().invokeWhenUIReady(new Runnable() {
768 @Override
769 public void run() {
770 MessageNotifyUtil.Notify.error(
771 NbBundle.getMessage(this.getClass(), "Installer.errorInitKsmMsg"),
772 Bundle.Server_status_failed_msg());
773 }
774 });
775 } catch (SecurityException ex) {
776 throw new KeywordSearchModuleException(
777 NbBundle.getMessage(this.getClass(), "Server.start.exception.cantStartSolr.msg"), ex);
778 } catch (IOException ex) {
779 throw new KeywordSearchModuleException(
780 NbBundle.getMessage(this.getClass(), "Server.start.exception.cantStartSolr.msg2"), ex);
781 }
782 }
783 }
784
791 static boolean isPortAvailable(int port) {
792 // implementation taken from https://stackoverflow.com/a/435579
793 if (port < 1 || port > 65535) {
794 throw new IllegalArgumentException("Invalid start port: " + port);
795 }
796
797 ServerSocket ss = null;
798 DatagramSocket ds = null;
799 try {
800 ss = new ServerSocket(port);
801 ss.setReuseAddress(true);
802 ds = new DatagramSocket(port);
803 ds.setReuseAddress(true);
804 return true;
805 } catch (IOException e) {
806 } finally {
807 if (ds != null) {
808 ds.close();
809 }
810
811 if (ss != null) {
812 try {
813 ss.close();
814 } catch (IOException e) {
815 /* should not be thrown */
816 }
817 }
818 }
819
820 return false;
821 }
822
823
829 void changeSolrServerPort(int port) {
830 localSolrServerPort = port;
831 ModuleSettings.setConfigSetting(PROPERTIES_FILE, PROPERTIES_CURRENT_SERVER_PORT, String.valueOf(port));
832 }
833
839 void changeSolrStopPort(int port) {
840 localSolrStopPort = port;
841 ModuleSettings.setConfigSetting(PROPERTIES_FILE, PROPERTIES_CURRENT_STOP_PORT, String.valueOf(port));
842 }
843
849 synchronized void stop() {
850
851 try {
852 // Close any open core before stopping server
853 closeCore();
854 } catch (KeywordSearchModuleException e) {
855 logger.log(Level.WARNING, "Failed to close core: ", e); //NON-NLS
856 }
857
859 }
860
864 private void stopLocalSolr() {
865 try {
866 //try graceful shutdown
867 Process process;
869 logger.log(Level.INFO, "Stopping Solr 8 server"); //NON-NLS
870 process = runLocalSolr8ControlCommand(new ArrayList<>(Arrays.asList("stop", "-k", KEY, "-p", Integer.toString(localSolrServerPort)))); //NON-NLS
871 } else {
872 // solr 4
873 logger.log(Level.INFO, "Stopping Solr 4 server"); //NON-NLS
874 process = runLocalSolr4ControlCommand(new ArrayList<>(Arrays.asList("--stop"))); //NON-NLS
875 }
876
877 logger.log(Level.INFO, "Waiting for Solr server to stop"); //NON-NLS
878 process.waitFor();
879
880 //if still running, forcefully stop it
881 if (curSolrProcess != null) {
882 curSolrProcess.destroy();
883 curSolrProcess = null;
884 }
885
886 } catch (IOException | InterruptedException ex) {
887 logger.log(Level.WARNING, "Error while attempting to stop Solr server", ex);
888 } finally {
889 //stop Solr stream -> log redirect threads
890 try {
891 if (errorRedirectThread != null) {
892 errorRedirectThread.stopRun();
893 errorRedirectThread = null;
894 }
895 } finally {
896 //if still running, kill it
897 killSolr();
898 }
899
900 logger.log(Level.INFO, "Finished stopping Solr server"); //NON-NLS
901 }
902 }
903
911 synchronized boolean isLocalSolrRunning() throws KeywordSearchModuleException {
912 try {
913
914 if (isPortAvailable(localSolrServerPort)) {
915 return false;
916 }
917
918 // making a statusRequest request here instead of just doing solrServer.ping(), because
919 // that doesn't work when there are no cores
920 //TODO handle timeout in cases when some other type of server on that port
922
923 logger.log(Level.INFO, "Solr server is running"); //NON-NLS
924 } catch (SolrServerException ex) {
925
926 Throwable cause = ex.getRootCause();
927
928 // TODO: check if SocketExceptions should actually happen (is
929 // probably caused by starting a connection as the server finishes
930 // shutting down)
931 if (cause instanceof ConnectException || cause instanceof SocketException) { //|| cause instanceof NoHttpResponseException) {
932 logger.log(Level.INFO, "Solr server is not running, cause: {0}", cause.getMessage()); //NON-NLS
933 return false;
934 } else {
935 throw new KeywordSearchModuleException(
936 NbBundle.getMessage(this.getClass(), "Server.isRunning.exception.errCheckSolrRunning.msg"), ex);
937 }
938 } catch (SolrException ex) {
939 // Just log 404 errors for now...
940 logger.log(Level.INFO, "Solr server is not running", ex); //NON-NLS
941 return false;
942 } catch (IOException ex) {
943 throw new KeywordSearchModuleException(
944 NbBundle.getMessage(this.getClass(), "Server.isRunning.exception.errCheckSolrRunning.msg2"), ex);
945 }
946
947 return true;
948 }
949
950 /*
951 * ** Convenience methods for use while we only open one case at a time ***
952 */
962 void openCoreForCase(Case theCase, Index index) throws KeywordSearchModuleException {
963 currentCoreLock.writeLock().lock();
964 try {
965 currentCollection = openCore(theCase, index);
966
967 try {
968 // execute a test query. if it fails, an exception will be thrown
970 } catch (NoOpenCoreException ex) {
971 throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.openCore.exception.cantOpen.msg"), ex);
972 }
973
975 } finally {
976 currentCoreLock.writeLock().unlock();
977 }
978 }
979
985 boolean coreIsOpen() {
986 currentCoreLock.readLock().lock();
987 try {
988 return (null != currentCollection);
989 } finally {
990 currentCoreLock.readLock().unlock();
991 }
992 }
993
994 Index getIndexInfo() throws NoOpenCoreException {
995 currentCoreLock.readLock().lock();
996 try {
997 if (null == currentCollection) {
998 throw new NoOpenCoreException();
999 }
1000 return currentCollection.getIndexInfo();
1001 } finally {
1002 currentCoreLock.readLock().unlock();
1003 }
1004 }
1005
1006 void closeCore() throws KeywordSearchModuleException {
1007 currentCoreLock.writeLock().lock();
1008 try {
1009 if (null != currentCollection) {
1010 currentCollection.close();
1012 }
1013 } finally {
1014 currentCollection = null;
1015 currentCoreLock.writeLock().unlock();
1016 }
1017 }
1018
1019 void addDocument(SolrInputDocument doc) throws KeywordSearchModuleException, NoOpenCoreException {
1020 currentCoreLock.readLock().lock();
1021 try {
1022 if (null == currentCollection) {
1023 throw new NoOpenCoreException();
1024 }
1025 TimingMetric metric = HealthMonitor.getTimingMetric("Solr: Index chunk");
1026 currentCollection.addDocument(doc);
1027 HealthMonitor.submitTimingMetric(metric);
1028 } finally {
1029 currentCoreLock.readLock().unlock();
1030 }
1031 }
1032
1041 @NbBundle.Messages({
1042 "# {0} - colelction name", "Server.deleteCore.exception.msg=Failed to delete Solr colelction {0}",})
1043 void deleteCollection(String coreName, CaseMetadata metadata) throws KeywordSearchServiceException, KeywordSearchModuleException {
1044 try {
1045 HttpSolrClient solrServer;
1046 if (metadata.getCaseType() == CaseType.SINGLE_USER_CASE) {
1047 solrServer = getSolrClient("http://localhost:" + localSolrServerPort + "/solr"); //NON-NLS
1048 CoreAdminResponse response = CoreAdminRequest.getStatus(coreName, solrServer);
1049 if (null != response.getCoreStatus(coreName).get("instanceDir")) { //NON-NLS
1050 /*
1051 * Send a core unload request to the Solr server, with the
1052 * parameter set that request deleting the index and the
1053 * instance directory (deleteInstanceDir = true). Note that
1054 * this removes everything related to the core on the server
1055 * (the index directory, the configuration files, etc.), but
1056 * does not delete the actual Solr text index because it is
1057 * currently stored in the case directory.
1058 */
1059 org.apache.solr.client.solrj.request.CoreAdminRequest.unloadCore(coreName, true, true, solrServer);
1060 }
1061 } else {
1062 IndexingServerProperties properties = getMultiUserServerProperties(metadata.getCaseDirectory());
1063 solrServer = getSolrClient("http://" + properties.getHost() + ":" + properties.getPort() + "/solr");
1064 connectToSolrServer(solrServer);
1065
1066 CollectionAdminRequest.Delete deleteCollectionRequest = CollectionAdminRequest.deleteCollection(coreName);
1067 CollectionAdminResponse response = deleteCollectionRequest.process(solrServer);
1068 if (response.isSuccess()) {
1069 logger.log(Level.INFO, "Deleted collection {0}", coreName); //NON-NLS
1070 } else {
1071 logger.log(Level.WARNING, "Unable to delete collection {0}", coreName); //NON-NLS
1072 }
1073 }
1074 } catch (SolrServerException | IOException ex) {
1075 // We will get a RemoteSolrException with cause == null and detailsMessage
1076 // == "Already closed" if the core is not loaded. This is not an error in this scenario.
1077 if (!ex.getMessage().equals("Already closed")) { // NON-NLS
1078 throw new KeywordSearchServiceException(Bundle.Server_deleteCore_exception_msg(coreName), ex);
1079 }
1080 }
1081 }
1082
1094 @NbBundle.Messages({
1095 "Server.exceptionMessage.unableToCreateCollection=Unable to create Solr collection",
1096 "Server.exceptionMessage.unableToBackupCollection=Unable to backup Solr collection",
1097 "Server.exceptionMessage.unableToRestoreCollection=Unable to restore Solr collection",
1098 })
1099 private Collection openCore(Case theCase, Index index) throws KeywordSearchModuleException {
1100
1101 int numShardsToUse = 1;
1102 try {
1103 // connect to proper Solr server
1104 configureSolrConnection(theCase, index);
1105
1106 if (theCase.getCaseType() == CaseType.MULTI_USER_CASE) {
1107 // select number of shards to use
1108 numShardsToUse = getNumShardsToUse();
1109 }
1110 } catch (Exception ex) {
1111 // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
1112 throw new KeywordSearchModuleException(NbBundle.getMessage(Server.class, "Server.connect.exception.msg", ex.getLocalizedMessage()), ex);
1113 }
1114
1115 try {
1116 String collectionName = index.getIndexName();
1117
1118 if (theCase.getCaseType() == CaseType.MULTI_USER_CASE) {
1119 if (!collectionExists(collectionName)) {
1120 /*
1121 * The collection does not exist. Make a request that will cause the colelction to be created.
1122 */
1123 boolean doRetry = false;
1124 for (int reTryAttempt = 0; reTryAttempt < NUM_COLLECTION_CREATION_RETRIES; reTryAttempt++) {
1125 try {
1126 doRetry = false;
1127 createMultiUserCollection(collectionName, numShardsToUse);
1128 } catch (Exception ex) {
1129 if (reTryAttempt >= NUM_COLLECTION_CREATION_RETRIES) {
1130 logger.log(Level.SEVERE, "Unable to create Solr collection " + collectionName, ex); //NON-NLS
1131 throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.openCore.exception.cantOpen.msg"), ex);
1132 } else {
1133 logger.log(Level.SEVERE, "Unable to create Solr collection " + collectionName + ". Re-trying...", ex); //NON-NLS
1134 Thread.sleep(1000L);
1135 doRetry = true;
1136 }
1137 }
1138 if (!doRetry) {
1139 break;
1140 }
1141 }
1142 }
1143 } else {
1144 if (!coreIsLoaded(collectionName)) {
1145 // In single user mode, the index is stored in case output directory
1146 File dataDir = new File(new File(index.getIndexPath()).getParent()); // "data dir" is the parent of the index directory
1147 if (!dataDir.exists()) {
1148 dataDir.mkdirs();
1149 }
1150
1151 // In single user mode, if there is a core.properties file already,
1152 // we've hit a solr bug. Compensate by deleting it.
1153 if (theCase.getCaseType() == CaseType.SINGLE_USER_CASE) {
1154 Path corePropertiesFile = Paths.get(localSolrFolder.toString(), SOLR, collectionName, CORE_PROPERTIES);
1155 if (corePropertiesFile.toFile().exists()) {
1156 try {
1157 corePropertiesFile.toFile().delete();
1158 } catch (Exception ex) {
1159 logger.log(Level.INFO, "Could not delete pre-existing core.properties prior to opening the core."); //NON-NLS
1160 }
1161 }
1162 }
1163
1164 // for single user cases, we unload the core when we close the case. So we have to load the core again.
1165 CoreAdminRequest.Create createCoreRequest = new CoreAdminRequest.Create();
1166 createCoreRequest.setDataDir(dataDir.getAbsolutePath());
1167 createCoreRequest.setCoreName(collectionName);
1168 createCoreRequest.setConfigSet("AutopsyConfig"); //NON-NLS
1169 createCoreRequest.setIsLoadOnStartup(false);
1170 createCoreRequest.setIsTransient(true);
1171 localSolrServer.request(createCoreRequest);
1172
1173 if (!coreIndexFolderExists(collectionName)) {
1174 throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.openCore.exception.noIndexDir.msg"));
1175 }
1176 }
1177 }
1178
1179 return new Collection(collectionName, theCase, index);
1180
1181 } catch (Exception ex) {
1182 logger.log(Level.SEVERE, "Exception during Solr collection creation.", ex); //NON-NLS
1183 throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.openCore.exception.cantOpen.msg"), ex);
1184 }
1185 }
1186
1188
1189 // if we want to use a specific sharding strategy, use that
1190 if (org.sleuthkit.autopsy.keywordsearch.UserPreferences.getMaxNumShards() > 0) {
1191 return org.sleuthkit.autopsy.keywordsearch.UserPreferences.getMaxNumShards();
1192 }
1193
1194 // otherwise get list of all live Solr servers in the cluster
1195 List<String> solrServerList = getSolrServerList(remoteSolrServer);
1196 // shard across all available servers
1197 return solrServerList.size();
1198 }
1199
1200 /*
1201 * Poll the remote Solr server for list of existing collections, and check if
1202 * the collection of interest exists.
1203 *
1204 * @param collectionName The name of the collection.
1205 *
1206 * @return True if the collection exists, false otherwise.
1207 *
1208 * @throws SolrServerException If there is a problem communicating with the
1209 * Solr server.
1210 * @throws IOException If there is a problem communicating with the Solr
1211 * server.
1212 */
1213 private boolean collectionExists(String collectionName) throws SolrServerException, IOException {
1214 CollectionAdminRequest.List req = new CollectionAdminRequest.List();
1215 CollectionAdminResponse response = req.process(remoteSolrServer);
1216 List<?> existingCollections = (List<?>) response.getResponse().get("collections");
1217 if (existingCollections == null) {
1218 existingCollections = new ArrayList<>();
1219 }
1220 return existingCollections.contains(collectionName);
1221 }
1222
1223 /* NOTE: Keeping this code for reference, since it works.
1224 private boolean collectionExists(String collectionName) throws SolrServerException, IOException {
1225
1226 // TODO we could potentially use this API. Currently set exception "Solr instance is not running in SolrCloud mode"
1227 // List<String> list = CollectionAdminRequest.listCollections(localSolrServer);
1228
1229 CollectionAdminRequest.ClusterStatus statusRequest = CollectionAdminRequest.getClusterStatus().setCollectionName(collectionName);
1230 CollectionAdminResponse statusResponse;
1231 try {
1232 statusResponse = statusRequest.process(remoteSolrServer);
1233 } catch (RemoteSolrException ex) {
1234 // collection doesn't exist
1235 return false;
1236 }
1237
1238 if (statusResponse == null) {
1239 return false;
1240 }
1241
1242 NamedList error = (NamedList) statusResponse.getResponse().get("error");
1243 if (error != null) {
1244 return false;
1245 }
1246
1247 // For some reason this returns info about all collections even though it's supposed to only return about the one we are requesting
1248 NamedList cluster = (NamedList) statusResponse.getResponse().get("cluster");
1249 NamedList collections = (NamedList) cluster.get("collections");
1250 if (collections != null) {
1251 Object collection = collections.get(collectionName);
1252 return (collection != null);
1253 } else {
1254 return false;
1255 }
1256 }*/
1257
1258 private void createMultiUserCollection(String collectionName, int numShardsToUse) throws KeywordSearchModuleException, SolrServerException, IOException {
1259 /*
1260 * The core either does not exist or it is not loaded. Make a
1261 * request that will cause the core to be created if it does not
1262 * exist or loaded if it already exists.
1263 */
1264
1265 Integer numShards = numShardsToUse;
1266 logger.log(Level.INFO, "numShardsToUse: {0}", numShardsToUse);
1267 Integer numNrtReplicas = 1;
1268 Integer numTlogReplicas = 0;
1269 Integer numPullReplicas = 0;
1270 CollectionAdminRequest.Create createCollectionRequest = CollectionAdminRequest.createCollection(collectionName, "AutopsyConfig", numShards, numNrtReplicas, numTlogReplicas, numPullReplicas);
1271
1272 CollectionAdminResponse createResponse = createCollectionRequest.process(remoteSolrServer);
1273 if (createResponse.isSuccess()) {
1274 logger.log(Level.INFO, "Collection {0} successfully created.", collectionName);
1275 } else {
1276 logger.log(Level.SEVERE, "Unable to create Solr collection {0}", collectionName); //NON-NLS
1277 throw new KeywordSearchModuleException(Bundle.Server_exceptionMessage_unableToCreateCollection());
1278 }
1279
1280 /* If we need core name:
1281 Map<String, NamedList<Integer>> status = createResponse.getCollectionCoresStatus();
1282 existingCoreName = status.keySet().iterator().next();*/
1283 if (!collectionExists(collectionName)) {
1284 throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.openCore.exception.noIndexDir.msg"));
1285 }
1286 }
1287
1288 private void backupCollection(String collectionName, String backupName, String pathToBackupLocation) throws SolrServerException, IOException, KeywordSearchModuleException {
1289 CollectionAdminRequest.Backup backup = CollectionAdminRequest.backupCollection(collectionName, backupName)
1290 .setLocation(pathToBackupLocation);
1291
1292 CollectionAdminResponse backupResponse = backup.process(remoteSolrServer);
1293 if (backupResponse.isSuccess()) {
1294 logger.log(Level.INFO, "Collection {0} successfully backep up.", collectionName);
1295 } else {
1296 logger.log(Level.SEVERE, "Unable to back up Solr collection {0}", collectionName); //NON-NLS
1297 throw new KeywordSearchModuleException(Bundle.Server_exceptionMessage_unableToBackupCollection());
1298 }
1299 }
1300
1301 private void restoreCollection(String backupName, String restoreCollectionName, String pathToBackupLocation) throws SolrServerException, IOException, KeywordSearchModuleException {
1302
1303 CollectionAdminRequest.Restore restore = CollectionAdminRequest.restoreCollection(restoreCollectionName, backupName)
1304 .setLocation(pathToBackupLocation);
1305
1306 CollectionAdminResponse restoreResponse = restore.process(remoteSolrServer);
1307 if (restoreResponse.isSuccess()) {
1308 logger.log(Level.INFO, "Collection {0} successfully resored.", restoreCollectionName);
1309 } else {
1310 logger.log(Level.SEVERE, "Unable to restore Solr collection {0}", restoreCollectionName); //NON-NLS
1311 throw new KeywordSearchModuleException(Bundle.Server_exceptionMessage_unableToRestoreCollection());
1312 }
1313 }
1314
1329 private boolean coreIsLoaded(String coreName) throws SolrServerException, IOException {
1330 CoreAdminResponse response = CoreAdminRequest.getStatus(coreName, localSolrServer);
1331 return response.getCoreStatus(coreName).get("instanceDir") != null; //NON-NLS
1332 }
1333
1346 private boolean coreIndexFolderExists(String coreName) throws SolrServerException, IOException {
1347 CoreAdminResponse response = CoreAdminRequest.getStatus(coreName, localSolrServer);
1348 Object dataDirPath = response.getCoreStatus(coreName).get("dataDir"); //NON-NLS
1349 if (null != dataDirPath) {
1350 File indexDir = Paths.get((String) dataDirPath, "index").toFile(); //NON-NLS
1351 return indexDir.exists();
1352 } else {
1353 return false;
1354 }
1355 }
1356
1368 public static IndexingServerProperties getMultiUserServerProperties(String caseDirectory) {
1369
1370 // if "solrserver.txt" is present, use those connection settings
1371 Path serverFilePath = Paths.get(caseDirectory, "solrserver.txt"); //NON-NLS
1372 if (serverFilePath.toFile().exists()) {
1373 try {
1374 List<String> lines = Files.readAllLines(serverFilePath);
1375 if (lines.isEmpty()) {
1376 logger.log(Level.SEVERE, "solrserver.txt file does not contain any data"); //NON-NLS
1377 } else if (!lines.get(0).contains(",")) {
1378 logger.log(Level.SEVERE, "solrserver.txt file is corrupt - could not read host/port from " + lines.get(0)); //NON-NLS
1379 } else {
1380 String[] parts = lines.get(0).split(",");
1381 if (parts.length != 2) {
1382 logger.log(Level.SEVERE, "solrserver.txt file is corrupt - could not read host/port from " + lines.get(0)); //NON-NLS
1383 } else {
1384 return new IndexingServerProperties(parts[0], parts[1]);
1385 }
1386 }
1387 } catch (IOException ex) {
1388 logger.log(Level.SEVERE, "solrserver.txt file could not be read", ex); //NON-NLS
1389 }
1390 }
1391
1392 // otherwise (or if an error occurred) determine Solr version of the current case
1393 List<Index> indexes = new ArrayList<>();
1394 try {
1395 IndexMetadata indexMetadata = new IndexMetadata(caseDirectory);
1396 indexes = indexMetadata.getIndexes();
1397 } catch (IndexMetadata.TextIndexMetadataException ex) {
1398 logger.log(Level.SEVERE, "Unable to read text index metadata file: " + caseDirectory, ex);
1399
1400 // default to using the latest Solr version settings
1401 String host = UserPreferences.getIndexingServerHost();
1402 String port = UserPreferences.getIndexingServerPort();
1403 return new IndexingServerProperties(host, port);
1404 }
1405
1406 // select which index to use. In practice, all cases always have only one
1407 // index but there is support for having multiple indexes.
1408 Index indexToUse = IndexFinder.identifyIndexToUse(indexes);
1409 if (indexToUse == null) {
1410 // unable to find index that can be used
1411 logger.log(Level.SEVERE, "Unable to find index that can be used for case: {0}", caseDirectory);
1412
1413 // default to using the latest Solr version settings
1414 String host = UserPreferences.getIndexingServerHost();
1415 String port = UserPreferences.getIndexingServerPort();
1416 return new IndexingServerProperties(host, port);
1417 }
1418
1419 // return connection info for the Solr version of the current index
1420 if (IndexFinder.getCurrentSolrVersion().equals(indexToUse.getSolrVersion())) {
1421 // Solr 8
1422 String host = UserPreferences.getIndexingServerHost();
1423 String port = UserPreferences.getIndexingServerPort();
1424 return new IndexingServerProperties(host, port);
1425 } else {
1426 // Solr 4
1427 String host = UserPreferences.getSolr4ServerHost().trim();
1428 String port = UserPreferences.getSolr4ServerPort().trim();
1429 return new IndexingServerProperties(host, port);
1430 }
1431 }
1432
1444 public static void selectSolrServerForCase(Path rootOutputDirectory, Path caseDirectoryPath) throws KeywordSearchModuleException {
1445 // Look for the solr server list file
1446 String serverListName = "solrServerList.txt"; //NON-NLS
1447 Path serverListPath = Paths.get(rootOutputDirectory.toString(), serverListName);
1448 if (serverListPath.toFile().exists()) {
1449
1450 // Read the list of solr servers
1451 List<String> lines;
1452 try {
1453 lines = Files.readAllLines(serverListPath);
1454 } catch (IOException ex) {
1455 throw new KeywordSearchModuleException(serverListName + " could not be read", ex); //NON-NLS
1456 }
1457
1458 // Remove any lines that don't contain a comma (these are likely just whitespace)
1459 for (Iterator<String> iterator = lines.iterator(); iterator.hasNext();) {
1460 String line = iterator.next();
1461 if (!line.contains(",")) {
1462 // Remove the current element from the iterator and the list.
1463 iterator.remove();
1464 }
1465 }
1466 if (lines.isEmpty()) {
1467 throw new KeywordSearchModuleException(serverListName + " had no valid server information"); //NON-NLS
1468 }
1469
1470 // Choose which server to use
1471 int rnd = new Random().nextInt(lines.size());
1472 String[] parts = lines.get(rnd).split(",");
1473 if (parts.length != 2) {
1474 throw new KeywordSearchModuleException("Invalid server data: " + lines.get(rnd)); //NON-NLS
1475 }
1476
1477 // Split it up just to do a sanity check on the data
1478 String host = parts[0];
1479 String port = parts[1];
1480 if (host.isEmpty() || port.isEmpty()) {
1481 throw new KeywordSearchModuleException("Invalid server data: " + lines.get(rnd)); //NON-NLS
1482 }
1483
1484 // Write the server data to a file
1485 Path serverFile = Paths.get(caseDirectoryPath.toString(), "solrserver.txt"); //NON-NLS
1486 try {
1487 caseDirectoryPath.toFile().mkdirs();
1488 if (!caseDirectoryPath.toFile().exists()) {
1489 throw new KeywordSearchModuleException("Case directory " + caseDirectoryPath.toString() + " does not exist"); //NON-NLS
1490 }
1491 Files.write(serverFile, lines.get(rnd).getBytes());
1492 } catch (IOException ex) {
1493 throw new KeywordSearchModuleException(serverFile.toString() + " could not be written", ex); //NON-NLS
1494 }
1495 }
1496 }
1497
1501 public static class IndexingServerProperties {
1502
1503 private final String host;
1504 private final String port;
1505
1506 IndexingServerProperties(String host, String port) {
1507 this.host = host;
1508 this.port = port;
1509 }
1510
1516 public String getHost() {
1517 return host;
1518 }
1519
1525 public String getPort() {
1526 return port;
1527 }
1528 }
1529
1535 void commit() throws SolrServerException, NoOpenCoreException {
1536 currentCoreLock.readLock().lock();
1537 try {
1538 if (null == currentCollection) {
1539 throw new NoOpenCoreException();
1540 }
1541 currentCollection.commit();
1542 } finally {
1543 currentCoreLock.readLock().unlock();
1544 }
1545 }
1546
1547 NamedList<Object> request(SolrRequest<?> request) throws SolrServerException, RemoteSolrException, NoOpenCoreException {
1548 currentCoreLock.readLock().lock();
1549 try {
1550 if (null == currentCollection) {
1551 throw new NoOpenCoreException();
1552 }
1553 return currentCollection.request(request);
1554 } finally {
1555 currentCoreLock.readLock().unlock();
1556 }
1557 }
1558
1570 currentCoreLock.readLock().lock();
1571 try {
1572 if (null == currentCollection) {
1573 throw new NoOpenCoreException();
1574 }
1575 try {
1576 return currentCollection.queryNumIndexedFiles();
1577 } catch (Exception ex) {
1578 // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
1579 throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.queryNumIdxFiles.exception.msg"), ex);
1580 }
1581 } finally {
1582 currentCoreLock.readLock().unlock();
1583 }
1584 }
1585
1596 currentCoreLock.readLock().lock();
1597 try {
1598 if (null == currentCollection) {
1599 throw new NoOpenCoreException();
1600 }
1601 try {
1602 return currentCollection.queryNumIndexedChunks();
1603 } catch (Exception ex) {
1604 // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
1605 throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.queryNumIdxChunks.exception.msg"), ex);
1606 }
1607 } finally {
1608 currentCoreLock.readLock().unlock();
1609 }
1610 }
1611
1622 currentCoreLock.readLock().lock();
1623 try {
1624 if (null == currentCollection) {
1625 throw new NoOpenCoreException();
1626 }
1627 try {
1628 return currentCollection.queryNumIndexedDocuments();
1629 } catch (Exception ex) {
1630 // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
1631 throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.queryNumIdxDocs.exception.msg"), ex);
1632 }
1633 } finally {
1634 currentCoreLock.readLock().unlock();
1635 }
1636 }
1637
1649 currentCoreLock.readLock().lock();
1650 try {
1651 if (null == currentCollection) {
1652 throw new NoOpenCoreException();
1653 }
1654 try {
1655 int totalNumChunks = currentCollection.queryTotalNumFileChunks(contentID);
1656 if (totalNumChunks == 0) {
1657 return false;
1658 }
1659
1660 int numIndexedChunks = currentCollection.queryNumIndexedChunks(contentID);
1661 return numIndexedChunks == totalNumChunks;
1662 } catch (Exception ex) {
1663 // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
1664 throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.queryIsIdxd.exception.msg"), ex);
1665 }
1666
1667 } finally {
1668 currentCoreLock.readLock().unlock();
1669 }
1670 }
1671
1684 currentCoreLock.readLock().lock();
1685 try {
1686 if (null == currentCollection) {
1687 throw new NoOpenCoreException();
1688 }
1689 try {
1690 return currentCollection.queryTotalNumFileChunks(fileID);
1691 } catch (Exception ex) {
1692 // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
1693 throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.queryNumFileChunks.exception.msg"), ex);
1694 }
1695 } finally {
1696 currentCoreLock.readLock().unlock();
1697 }
1698 }
1699
1710 public QueryResponse query(SolrQuery sq) throws KeywordSearchModuleException, NoOpenCoreException, IOException {
1711 currentCoreLock.readLock().lock();
1712 try {
1713 if (null == currentCollection) {
1714 throw new NoOpenCoreException();
1715 }
1716 try {
1717 return currentCollection.query(sq);
1718 } catch (Exception ex) {
1719 // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
1720 logger.log(Level.SEVERE, "Solr query failed: " + sq.getQuery(), ex); //NON-NLS
1721 throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.query.exception.msg", sq.getQuery()), ex);
1722 }
1723 } finally {
1724 currentCoreLock.readLock().unlock();
1725 }
1726 }
1727
1739 public QueryResponse query(SolrQuery sq, SolrRequest.METHOD method) throws KeywordSearchModuleException, NoOpenCoreException {
1740 currentCoreLock.readLock().lock();
1741 try {
1742 if (null == currentCollection) {
1743 throw new NoOpenCoreException();
1744 }
1745 try {
1746 return currentCollection.query(sq, method);
1747 } catch (Exception ex) {
1748 // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
1749 logger.log(Level.SEVERE, "Solr query failed: " + sq.getQuery(), ex); //NON-NLS
1750 throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.query2.exception.msg", sq.getQuery()), ex);
1751 }
1752 } finally {
1753 currentCoreLock.readLock().unlock();
1754 }
1755 }
1756
1767 public TermsResponse queryTerms(SolrQuery sq) throws KeywordSearchModuleException, NoOpenCoreException {
1768 currentCoreLock.readLock().lock();
1769 try {
1770 if (null == currentCollection) {
1771 throw new NoOpenCoreException();
1772 }
1773 try {
1774 return currentCollection.queryTerms(sq);
1775 } catch (Exception ex) {
1776 // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
1777 logger.log(Level.SEVERE, "Solr terms query failed: " + sq.getQuery(), ex); //NON-NLS
1778 throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.queryTerms.exception.msg", sq.getQuery()), ex);
1779 }
1780 } finally {
1781 currentCoreLock.readLock().unlock();
1782 }
1783 }
1784
1792 void deleteDataSource(Long dataSourceId) throws IOException, KeywordSearchModuleException, NoOpenCoreException, SolrServerException {
1793 try {
1794 currentCoreLock.writeLock().lock();
1795 if (null == currentCollection) {
1796 throw new NoOpenCoreException();
1797 }
1798 currentCollection.deleteDataSource(dataSourceId);
1799 currentCollection.commit();
1800 } finally {
1801 currentCoreLock.writeLock().unlock();
1802 }
1803 }
1804
1813 @NbBundle.Messages({
1814 "Server.getAllTerms.error=Extraction of all unique Solr terms failed:"})
1815 void extractAllTermsForDataSource(Path outputFile, ReportProgressPanel progressPanel) throws KeywordSearchModuleException, NoOpenCoreException {
1816 try {
1817 currentCoreLock.writeLock().lock();
1818 if (null == currentCollection) {
1819 throw new NoOpenCoreException();
1820 }
1821 try {
1822 currentCollection.extractAllTermsForDataSource(outputFile, progressPanel);
1823 } catch (Exception ex) {
1824 // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
1825 logger.log(Level.SEVERE, "Extraction of all unique Solr terms failed: ", ex); //NON-NLS
1826 throw new KeywordSearchModuleException(Bundle.Server_getAllTerms_error(), ex);
1827 }
1828 } finally {
1829 currentCoreLock.writeLock().unlock();
1830 }
1831 }
1832
1842 public String getSolrContent(final Content content) throws NoOpenCoreException {
1843 currentCoreLock.readLock().lock();
1844 try {
1845 if (null == currentCollection) {
1846 throw new NoOpenCoreException();
1847 }
1848 return currentCollection.getSolrContent(content.getId(), 0);
1849 } finally {
1850 currentCoreLock.readLock().unlock();
1851 }
1852 }
1853
1866 public String getSolrContent(final Content content, int chunkID) throws NoOpenCoreException {
1867 currentCoreLock.readLock().lock();
1868 try {
1869 if (null == currentCollection) {
1870 throw new NoOpenCoreException();
1871 }
1872 return currentCollection.getSolrContent(content.getId(), chunkID);
1873 } finally {
1874 currentCoreLock.readLock().unlock();
1875 }
1876 }
1877
1887 public String getSolrContent(final long objectID) throws NoOpenCoreException {
1888 currentCoreLock.readLock().lock();
1889 try {
1890 if (null == currentCollection) {
1891 throw new NoOpenCoreException();
1892 }
1893 return currentCollection.getSolrContent(objectID, 0);
1894 } finally {
1895 currentCoreLock.readLock().unlock();
1896 }
1897 }
1898
1909 public String getSolrContent(final long objectID, final int chunkID) throws NoOpenCoreException {
1910 currentCoreLock.readLock().lock();
1911 try {
1912 if (null == currentCollection) {
1913 throw new NoOpenCoreException();
1914 }
1915 return currentCollection.getSolrContent(objectID, chunkID);
1916 } finally {
1917 currentCoreLock.readLock().unlock();
1918 }
1919 }
1920
1930 public static String getChunkIdString(long parentID, int childID) {
1931 return Long.toString(parentID) + Server.CHUNK_ID_SEPARATOR + Integer.toString(childID);
1932 }
1933
1940 private void connectToEmbeddedSolrServer() throws SolrServerException, IOException {
1941 TimingMetric metric = HealthMonitor.getTimingMetric("Solr: Connectivity check");
1942 CoreAdminRequest.getStatus(null, localSolrServer);
1944 }
1945
1957 void connectToSolrServer(String host, String port) throws SolrServerException, IOException {
1958 try (HttpSolrClient solrServer = getSolrClient("http://" + host + ":" + port + "/solr")) {
1959 connectToSolrServer(solrServer);
1960 }
1961 }
1962
1972 private void connectToSolrServer(HttpSolrClient solrServer) throws SolrServerException, IOException {
1973 TimingMetric metric = HealthMonitor.getTimingMetric("Solr: Connectivity check");
1974 CollectionAdminRequest.ClusterStatus statusRequest = CollectionAdminRequest.getClusterStatus();
1975 CollectionAdminResponse statusResponse = statusRequest.process(solrServer);
1976 int statusCode = Integer.valueOf(((NamedList) statusResponse.getResponse().get("responseHeader")).get("status").toString());
1977 if (statusCode != 0) {
1978 logger.log(Level.WARNING, "Could not connect to Solr server "); //NON-NLS
1979 } else {
1980 logger.log(Level.INFO, "Connected to Solr server "); //NON-NLS
1981 }
1983 }
1984
1985 private List<String> getSolrServerList(String host, String port) throws KeywordSearchModuleException {
1986 HttpSolrClient solrServer = getSolrClient("http://" + host + ":" + port + "/solr");
1987 return getSolrServerList(solrServer);
1988 }
1989
1990 private List<String> getSolrServerList(HttpSolrClient solrServer) throws KeywordSearchModuleException {
1991
1992 try {
1993 CollectionAdminRequest.ClusterStatus statusRequest = CollectionAdminRequest.getClusterStatus();
1994 CollectionAdminResponse statusResponse;
1995 try {
1996 statusResponse = statusRequest.process(solrServer);
1997 } catch (RemoteSolrException ex) {
1998 // collection doesn't exist
1999 return Collections.emptyList();
2000 }
2001
2002 if (statusResponse == null) {
2003 return Collections.emptyList();
2004 }
2005
2006 NamedList<?> error = (NamedList) statusResponse.getResponse().get("error");
2007 if (error != null) {
2008 return Collections.emptyList();
2009 }
2010
2011 NamedList<?> cluster = (NamedList) statusResponse.getResponse().get("cluster");
2012 @SuppressWarnings("unchecked")
2013 List<String> liveNodes = (ArrayList) cluster.get("live_nodes");
2014
2015 if (liveNodes != null) {
2016 liveNodes = liveNodes.stream()
2017 .map(serverStr -> serverStr.endsWith("_solr")
2018 ? serverStr.substring(0, serverStr.length() - "_solr".length())
2019 : serverStr)
2020 .collect(Collectors.toList());
2021 }
2022 return liveNodes;
2023 } catch (Exception ex) {
2024 // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
2026 NbBundle.getMessage(this.getClass(), "Server.serverList.exception.msg", solrServer.getBaseURL()));
2027 }
2028 }
2029
2030 class Collection {
2031
2032 // handle to the collection in Solr
2033 private final String name;
2034
2035 private final CaseType caseType;
2036
2037 private final Index textIndex;
2038
2039 // We use different Solr clients for different operations. HttpSolrClient is geared towards query performance.
2040 // ConcurrentUpdateSolrClient is geared towards batching solr documents for better indexing throughput. We
2041 // have implemented our own batching algorithm so we will probably not use ConcurrentUpdateSolrClient.
2042 // CloudSolrClient is geared towards SolrCloud deployments. These are only good for collection-specific operations.
2043 private HttpSolrClient queryClient;
2044 private SolrClient indexingClient;
2045
2046 private final int maxBufferSize;
2047 private final List<SolrInputDocument> buffer;
2048 private final Object bufferLock;
2049
2050 /* (JIRA-7521) Sometimes we get into a situation where Solr server is no longer able to index new data.
2051 * Typically main reason for this is Solr running out of memory. In this case we will stop trying to send new
2052 * data to Solr (for this collection) after certain number of consecutive batches have failed. */
2053 private static final int MAX_NUM_CONSECUTIVE_FAILURES = 5;
2054 private AtomicInteger numConsecutiveFailures = new AtomicInteger(0);
2055 private AtomicBoolean skipIndexing = new AtomicBoolean(false);
2056
2057 private final ScheduledThreadPoolExecutor periodicTasksExecutor;
2058 private static final long PERIODIC_BATCH_SEND_INTERVAL_MINUTES = 10;
2059 private static final int NUM_BATCH_UPDATE_RETRIES = 10;
2060 private static final long SLEEP_BETWEEN_RETRIES_MS = 10000; // 10 seconds
2061
2062 private Collection(String name, Case theCase, Index index) throws TimeoutException, InterruptedException, KeywordSearchModuleException {
2063 this.name = name;
2064 this.caseType = theCase.getCaseType();
2065 this.textIndex = index;
2066 bufferLock = new Object();
2067
2068 if (caseType == CaseType.SINGLE_USER_CASE) {
2069 // get SolrJ client
2070 queryClient = getSolrClient("http://localhost:" + localSolrServerPort + "/solr/" + name); // HttpClient
2071 indexingClient = getSolrClient("http://localhost:" + localSolrServerPort + "/solr/" + name); // HttpClient
2072 } else {
2073 // read Solr connection info from user preferences, unless "solrserver.txt" is present
2074 queryClient = configureMultiUserConnection(theCase, index, name);
2075
2076 // for MU cases, use CloudSolrClient for indexing. Indexing is only supported for Solr 8.
2077 if (IndexFinder.getCurrentSolrVersion().equals(index.getSolrVersion())) {
2078 IndexingServerProperties properties = getMultiUserServerProperties(theCase.getCaseDirectory());
2079 indexingClient = getCloudSolrClient(properties.getHost(), properties.getPort(), name); // CloudClient
2080 } else {
2081 indexingClient = configureMultiUserConnection(theCase, index, name); // HttpClient
2082 }
2083 }
2084
2085 // document batching
2086 maxBufferSize = org.sleuthkit.autopsy.keywordsearch.UserPreferences.getDocumentsQueueSize();
2087 logger.log(Level.INFO, "Using Solr document queue size = {0}", maxBufferSize); //NON-NLS
2088 buffer = new ArrayList<>(maxBufferSize);
2089 periodicTasksExecutor = new ScheduledThreadPoolExecutor(1, new ThreadFactoryBuilder().setNameFormat("periodic-batched-document-task-%d").build()); //NON-NLS
2090 periodicTasksExecutor.scheduleWithFixedDelay(new SendBatchedDocumentsTask(), PERIODIC_BATCH_SEND_INTERVAL_MINUTES, PERIODIC_BATCH_SEND_INTERVAL_MINUTES, TimeUnit.MINUTES);
2091 }
2092
2099 private final class SendBatchedDocumentsTask implements Runnable {
2100
2101 @Override
2102 public void run() {
2103
2104 if (skipIndexing.get()) {
2105 return;
2106 }
2107
2108 List<SolrInputDocument> clone;
2109 synchronized (bufferLock) {
2110
2111 if (buffer.isEmpty()) {
2112 return;
2113 }
2114
2115 // Buffer is full. Make a clone and release the lock, so that we don't
2116 // hold other ingest threads
2117 clone = buffer.stream().collect(toList());
2118 buffer.clear();
2119 }
2120
2121 try {
2122 // send the cloned list to Solr
2123 sendBufferedDocs(clone);
2124 } catch (KeywordSearchModuleException ex) {
2125 logger.log(Level.SEVERE, "Periodic batched document update failed", ex); //NON-NLS
2126 }
2127 }
2128 }
2129
2135 String getName() {
2136 return name;
2137 }
2138
2139 private Index getIndexInfo() {
2140 return this.textIndex;
2141 }
2142
2143 private QueryResponse query(SolrQuery sq) throws SolrServerException, IOException {
2144 return queryClient.query(sq);
2145 }
2146
2147 private NamedList<Object> request(SolrRequest<?> request) throws SolrServerException, RemoteSolrException {
2148 try {
2149 return queryClient.request(request);
2150 } catch (Exception e) {
2151 // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
2152 logger.log(Level.WARNING, "Could not issue Solr request. ", e); //NON-NLS
2153 throw new SolrServerException(
2154 NbBundle.getMessage(this.getClass(), "Server.request.exception.exception.msg"), e);
2155 }
2156
2157 }
2158
2159 private QueryResponse query(SolrQuery sq, SolrRequest.METHOD method) throws SolrServerException, IOException {
2160 return queryClient.query(sq, method);
2161 }
2162
2163 private TermsResponse queryTerms(SolrQuery sq) throws SolrServerException, IOException {
2164 QueryResponse qres = queryClient.query(sq);
2165 return qres.getTermsResponse();
2166 }
2167
2168 private void commit() throws SolrServerException {
2169 List<SolrInputDocument> clone;
2170 synchronized (bufferLock) {
2171 // Make a clone and release the lock, so that we don't
2172 // hold other ingest threads
2173 clone = buffer.stream().collect(toList());
2174 buffer.clear();
2175 }
2176
2177 try {
2178 sendBufferedDocs(clone);
2179 } catch (KeywordSearchModuleException ex) {
2180 throw new SolrServerException(NbBundle.getMessage(this.getClass(), "Server.commit.exception.msg"), ex);
2181 }
2182
2183 try {
2184 //commit and block
2185 indexingClient.commit(true, true);
2186 } catch (Exception e) {
2187 // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
2188 logger.log(Level.WARNING, "Could not commit index. ", e); //NON-NLS
2189 throw new SolrServerException(NbBundle.getMessage(this.getClass(), "Server.commit.exception.msg"), e);
2190 }
2191 }
2192
2193 private void deleteDataSource(Long dsObjId) throws IOException, SolrServerException {
2194 String dataSourceId = Long.toString(dsObjId);
2195 String deleteQuery = "image_id:" + dataSourceId;
2196
2197 queryClient.deleteByQuery(deleteQuery);
2198 }
2199
2211 @NbBundle.Messages({
2212 "# {0} - Number of extracted terms",
2213 "ExtractAllTermsReport.numberExtractedTerms=Extracted {0} terms..."
2214 })
2215 private void extractAllTermsForDataSource(Path outputFile, ReportProgressPanel progressPanel) throws IOException, SolrServerException, NoCurrentCaseException, KeywordSearchModuleException {
2216
2217 Files.deleteIfExists(outputFile);
2218 OpenOption[] options = new OpenOption[] { java.nio.file.StandardOpenOption.CREATE, java.nio.file.StandardOpenOption.APPEND };
2219
2220 // step through the terms
2221 int termStep = 1000;
2222 long numExtractedTerms = 0;
2223 String firstTerm = "";
2224 while (true) {
2225 SolrQuery query = new SolrQuery();
2226 query.setRequestHandler("/terms");
2227 query.setTerms(true);
2228 query.setTermsLimit(termStep);
2229 query.setTermsLower(firstTerm);
2230 query.setTermsLowerInclusive(false);
2231
2232 // Returned terms sorted by "index" order, which is the fastest way. Per Solr documentation:
2233 // "Retrieving terms in index order is very fast since the implementation directly uses Lucene’s TermEnum to iterate over the term dictionary."
2234 // All other sort criteria return very inconsistent and overlapping resuts.
2235 query.setTermsSortString("index");
2236
2237 // "text" field is the schema field that we populate with (lowercased) terms
2238 query.addTermsField(Server.Schema.TEXT.toString());
2239 query.setTermsMinCount(0);
2240
2241 // Unfortunatelly Solr "terms queries" do not support any filtering so we can't filter by data source this way.
2242 // query.addFilterQuery(Server.Schema.IMAGE_ID.toString() + ":" + dataSourceId);
2243
2244 QueryRequest request = new QueryRequest(query);
2245 TermsResponse response = request.process(queryClient).getTermsResponse();
2246 List<Term> terms = response.getTerms(Server.Schema.TEXT.toString());
2247
2248 if (terms == null || terms.isEmpty()) {
2249 numExtractedTerms += terms.size();
2250 progressPanel.updateStatusLabel(Bundle.ExtractAllTermsReport_numberExtractedTerms(numExtractedTerms));
2251 break;
2252 }
2253
2254 // set the first term for the next query
2255 firstTerm = terms.get(terms.size()-1).getTerm();
2256
2257 List<String> listTerms = terms.stream().map(Term::getTerm).collect(Collectors.toList());
2258 Files.write(outputFile, listTerms, options);
2259
2260 numExtractedTerms += termStep;
2261 progressPanel.updateStatusLabel(Bundle.ExtractAllTermsReport_numberExtractedTerms(numExtractedTerms));
2262 }
2263 }
2264
2273 void addDocument(SolrInputDocument doc) throws KeywordSearchModuleException {
2274
2275 if (skipIndexing.get()) {
2276 return;
2277 }
2278
2279 List<SolrInputDocument> clone;
2280 synchronized (bufferLock) {
2281 buffer.add(doc);
2282 // buffer documents if the buffer is not full
2283 if (buffer.size() < maxBufferSize) {
2284 return;
2285 }
2286
2287 // Buffer is full. Make a clone and release the lock, so that we don't
2288 // hold other ingest threads
2289 clone = buffer.stream().collect(toList());
2290 buffer.clear();
2291 }
2292
2293 // send the cloned list to Solr
2294 sendBufferedDocs(clone);
2295 }
2296
2304 @NbBundle.Messages({
2305 "Collection.unableToIndexData.error=Unable to add data to text index. All future text indexing for the current case will be skipped.",
2306
2307 })
2308 private void sendBufferedDocs(List<SolrInputDocument> docBuffer) throws KeywordSearchModuleException {
2309
2310 if (docBuffer.isEmpty()) {
2311 return;
2312 }
2313
2314 try {
2315 boolean success = true;
2316 for (int reTryAttempt = 0; reTryAttempt < NUM_BATCH_UPDATE_RETRIES; reTryAttempt++) {
2317 try {
2318 success = true;
2319 indexingClient.add(docBuffer);
2320 } catch (Exception ex) {
2321 success = false;
2322 if (reTryAttempt < NUM_BATCH_UPDATE_RETRIES - 1) {
2323 logger.log(Level.WARNING, "Unable to send document batch to Solr. Re-trying...", ex); //NON-NLS
2324 try {
2325 Thread.sleep(SLEEP_BETWEEN_RETRIES_MS);
2326 } catch (InterruptedException ignore) {
2327 throw new KeywordSearchModuleException(
2328 NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"), ex); //NON-NLS
2329 }
2330 }
2331 }
2332 if (success) {
2333 numConsecutiveFailures.set(0);
2334 if (reTryAttempt > 0) {
2335 logger.log(Level.INFO, "Batch update suceeded after {0} re-try", reTryAttempt); //NON-NLS
2336 }
2337 return;
2338 }
2339 }
2340 // if we are here, it means all re-try attempts failed
2341 logger.log(Level.SEVERE, "Unable to send document batch to Solr. All re-try attempts failed!"); //NON-NLS
2342 throw new KeywordSearchModuleException(NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg")); //NON-NLS
2343 } catch (Exception ex) {
2344 // Solr throws a lot of unexpected exception types
2345 numConsecutiveFailures.incrementAndGet();
2346 logger.log(Level.SEVERE, "Could not add batched documents to index", ex); //NON-NLS
2347
2348 // display message to user that that a document batch is missing from the index
2349 MessageNotifyUtil.Notify.error(
2350 NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"),
2351 NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"));
2352 throw new KeywordSearchModuleException(
2353 NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"), ex); //NON-NLS
2354 } finally {
2355 if (numConsecutiveFailures.get() >= MAX_NUM_CONSECUTIVE_FAILURES) {
2356 // skip all future indexing
2357 skipIndexing.set(true);
2358 logger.log(Level.SEVERE, "Unable to add data to text index. All future text indexing for the current case will be skipped!"); //NON-NLS
2359
2360 // display message to user that no more data will be added to the index
2361 MessageNotifyUtil.Notify.error(
2362 NbBundle.getMessage(this.getClass(), "Server.addDocBatch.exception.msg"),
2363 Bundle.Collection_unableToIndexData_error());
2364 if (RuntimeProperties.runningWithGUI()) {
2365 MessageNotifyUtil.Message.error(Bundle.Collection_unableToIndexData_error());
2366 }
2367 }
2368 docBuffer.clear();
2369 }
2370 }
2371
2382 private String getSolrContent(long contentID, int chunkID) {
2383 final SolrQuery q = new SolrQuery();
2384 q.setQuery("*:*");
2385 String filterQuery = Schema.ID.toString() + ":" + KeywordSearchUtil.escapeLuceneQuery(Long.toString(contentID));
2386 if (chunkID != 0) {
2387 filterQuery = filterQuery + Server.CHUNK_ID_SEPARATOR + chunkID;
2388 }
2389 q.addFilterQuery(filterQuery);
2390 q.setFields(Schema.TEXT.toString());
2391 try {
2392 // Get the first result.
2393 SolrDocumentList solrDocuments = queryClient.query(q).getResults();
2394
2395 if (!solrDocuments.isEmpty()) {
2396 SolrDocument solrDocument = solrDocuments.get(0);
2397 if (solrDocument != null) {
2398 java.util.Collection<Object> fieldValues = solrDocument.getFieldValues(Schema.TEXT.toString());
2399 if (fieldValues.size() == 1) // The indexed text field for artifacts will only have a single value.
2400 {
2401 return fieldValues.toArray(new String[0])[0];
2402 } else // The indexed text for files has 2 values, the file name and the file content.
2403 // We return the file content value.
2404 {
2405 return fieldValues.toArray(new String[0])[1];
2406 }
2407 }
2408 }
2409 } catch (Exception ex) {
2410 // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
2411 logger.log(Level.SEVERE, "Error getting content from Solr. Solr document id " + contentID + ", chunk id " + chunkID + ", query: " + filterQuery, ex); //NON-NLS
2412 return null;
2413 }
2414
2415 return null;
2416 }
2417
2418 synchronized void close() throws KeywordSearchModuleException {
2419 try {
2420
2421 // stop the periodic batch update task. If the task is already running,
2422 // allow it to finish.
2423 ThreadUtils.shutDownTaskExecutor(periodicTasksExecutor);
2424
2425 // We only unload cores for "single-user" cases.
2426 if (this.caseType == CaseType.MULTI_USER_CASE) {
2427 return;
2428 }
2429
2430 CoreAdminRequest.unloadCore(this.name, localSolrServer);
2431 } catch (Exception ex) {
2432 // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
2433 throw new KeywordSearchModuleException(
2434 NbBundle.getMessage(this.getClass(), "Server.close.exception.msg"), ex);
2435 } finally {
2436 try {
2437 queryClient.close();
2438 queryClient = null;
2439 indexingClient.close();
2440 indexingClient = null;
2441 } catch (IOException ex) {
2442 throw new KeywordSearchModuleException(
2443 NbBundle.getMessage(this.getClass(), "Server.close.exception.msg2"), ex);
2444 }
2445 }
2446 }
2447
2457 private int queryNumIndexedFiles() throws SolrServerException, IOException {
2459 }
2460
2470 private int queryNumIndexedChunks() throws SolrServerException, IOException {
2471 SolrQuery q = new SolrQuery(Server.Schema.ID + ":*" + Server.CHUNK_ID_SEPARATOR + "*");
2472 q.setRows(0);
2473 int numChunks = (int) query(q).getResults().getNumFound();
2474 return numChunks;
2475 }
2476
2487 private int queryNumIndexedDocuments() throws SolrServerException, IOException {
2488 SolrQuery q = new SolrQuery("*:*");
2489 q.setRows(0);
2490 return (int) query(q).getResults().getNumFound();
2491 }
2492
2502 private boolean queryIsIndexed(long contentID) throws SolrServerException, IOException {
2503 String id = KeywordSearchUtil.escapeLuceneQuery(Long.toString(contentID));
2504 SolrQuery q = new SolrQuery("*:*");
2505 q.addFilterQuery(Server.Schema.ID.toString() + ":" + id);
2506 //q.setFields(Server.Schema.ID.toString());
2507 q.setRows(0);
2508 return (int) query(q).getResults().getNumFound() != 0;
2509 }
2510
2525 private int queryTotalNumFileChunks(long contentID) throws SolrServerException, IOException {
2526 final SolrQuery q = new SolrQuery();
2527 q.setQuery("*:*");
2528 String filterQuery = Schema.ID.toString() + ":" + KeywordSearchUtil.escapeLuceneQuery(Long.toString(contentID));
2529 q.addFilterQuery(filterQuery);
2530 q.setFields(Schema.NUM_CHUNKS.toString());
2531 try {
2532 SolrDocumentList solrDocuments = query(q).getResults();
2533 if (!solrDocuments.isEmpty()) {
2534 SolrDocument solrDocument = solrDocuments.get(0);
2535 if (solrDocument != null && !solrDocument.isEmpty()) {
2536 Object fieldValue = solrDocument.getFieldValue(Schema.NUM_CHUNKS.toString());
2537 return (Integer)fieldValue;
2538 }
2539 }
2540 } catch (Exception ex) {
2541 // intentional "catch all" as Solr is known to throw all kinds of Runtime exceptions
2542 logger.log(Level.SEVERE, "Error getting content from Solr. Solr document id " + contentID + ", query: " + filterQuery, ex); //NON-NLS
2543 return 0;
2544 }
2545 // File not indexed
2546 return 0;
2547 }
2548
2560 int queryNumIndexedChunks(long contentID) throws SolrServerException, IOException {
2561 SolrQuery q = new SolrQuery(Server.Schema.ID + ":" + KeywordSearchUtil.escapeLuceneQuery(Long.toString(contentID)) + Server.CHUNK_ID_SEPARATOR + "*");
2562 q.setRows(0);
2563 int numChunks = (int) query(q).getResults().getNumFound();
2564 return numChunks;
2565 }
2566 }
2567
2568 class ServerAction extends AbstractAction {
2569
2570 private static final long serialVersionUID = 1L;
2571
2572 @Override
2573 public void actionPerformed(ActionEvent e) {
2574 logger.log(Level.INFO, e.paramString().trim());
2575 }
2576 }
2577
2581 class SolrServerNoPortException extends SocketException {
2582
2583 private static final long serialVersionUID = 1L;
2584
2588 private final int port;
2589
2590 SolrServerNoPortException(int port) {
2591 super(NbBundle.getMessage(Server.class, "Server.solrServerNoPortException.msg", port,
2592 Server.PROPERTIES_CURRENT_SERVER_PORT));
2593 this.port = port;
2594 }
2595
2596 int getPortNumber() {
2597 return port;
2598 }
2599 }
2600}
static boolean deleteDir(File dirPath)
Definition FileUtil.java:47
synchronized static Logger getLogger(String name)
Definition Logger.java:124
static synchronized void setConfigSetting(String moduleName, String settingName, String settingVal)
static synchronized String getConfigSetting(String moduleName, String settingName)
static synchronized boolean settingExists(String moduleName, String settingName)
static synchronized long[] getJavaPIDs(String argsSubQuery)
static TimingMetric getTimingMetric(String name)
static void submitTimingMetric(TimingMetric metric)
boolean collectionExists(String collectionName)
Definition Server.java:1213
String getSolrContent(final Content content, int chunkID)
Definition Server.java:1866
String getSolrContent(final Content content)
Definition Server.java:1842
void backupCollection(String collectionName, String backupName, String pathToBackupLocation)
Definition Server.java:1288
void configureSolrConnection(Case theCase, Index index)
Definition Server.java:616
TermsResponse queryTerms(SolrQuery sq)
Definition Server.java:1767
HttpSolrClient configureMultiUserConnection(Case theCase, Index index, String name)
Definition Server.java:659
static void selectSolrServerForCase(Path rootOutputDirectory, Path caseDirectoryPath)
Definition Server.java:1444
static final String HL_ANALYZE_CHARS_UNLIMITED
Definition Server.java:228
static final Charset DEFAULT_INDEXED_TEXT_CHARSET
default Charset to index text as
Definition Server.java:237
void addServerActionListener(PropertyChangeListener l)
Definition Server.java:409
Process runLocalSolr4ControlCommand(List< String > solrArguments)
Definition Server.java:544
QueryResponse query(SolrQuery sq, SolrRequest.METHOD method)
Definition Server.java:1739
boolean coreIndexFolderExists(String coreName)
Definition Server.java:1346
String getSolrContent(final long objectID, final int chunkID)
Definition Server.java:1909
boolean queryIsFullyIndexed(long contentID)
Definition Server.java:1648
Process runLocalSolr8ControlCommand(List< String > solrArguments)
Definition Server.java:494
void createMultiUserCollection(String collectionName, int numShardsToUse)
Definition Server.java:1258
QueryResponse query(SolrQuery sq)
Definition Server.java:1710
Collection openCore(Case theCase, Index index)
Definition Server.java:1099
List< String > getSolrServerList(HttpSolrClient solrServer)
Definition Server.java:1990
boolean coreIsLoaded(String coreName)
Definition Server.java:1329
CloudSolrClient getCloudSolrClient(String host, String port, String defaultCollectionName)
Definition Server.java:382
static String getChunkIdString(long parentID, int childID)
Definition Server.java:1930
List< String > getSolrServerList(String host, String port)
Definition Server.java:1985
void connectToSolrServer(HttpSolrClient solrServer)
Definition Server.java:1972
String getSolrContent(final long objectID)
Definition Server.java:1887
HttpSolrClient getSolrClient(String solrUrl)
Definition Server.java:358
InputStreamPrinterThread errorRedirectThread
Definition Server.java:277
ConcurrentUpdateSolrClient getConcurrentClient(String solrUrl)
Definition Server.java:366
static IndexingServerProperties getMultiUserServerProperties(String caseDirectory)
Definition Server.java:1368
final ReentrantReadWriteLock currentCoreLock
Definition Server.java:274
void restoreCollection(String backupName, String restoreCollectionName, String pathToBackupLocation)
Definition Server.java:1301

Copyright © 2012-2024 Sleuth Kit Labs. Generated on:
This work is licensed under a Creative Commons Attribution-Share Alike 3.0 United States License.