コード例 #1
0
 /** Make this TopComponent a listener to various change events. */
 private void subscribeToChangeEvents() {
   UserPreferences.addChangeListener(
       new PreferenceChangeListener() {
         @Override
         public void preferenceChange(PreferenceChangeEvent evt) {
           switch (evt.getKey()) {
             case UserPreferences.HIDE_KNOWN_FILES_IN_DATA_SOURCES_TREE:
               refreshContentTreeSafe();
               break;
             case UserPreferences.HIDE_KNOWN_FILES_IN_VIEWS_TREE:
               // TODO: Need a way to refresh the Views subtree
               break;
           }
         }
       });
   Case.addPropertyChangeListener(this);
   this.em.addPropertyChangeListener(this);
   IngestManager.getInstance().addIngestJobEventListener(this);
   IngestManager.getInstance().addIngestModuleEventListener(this);
 }
コード例 #2
0
ファイル: HashDbManager.java プロジェクト: lorz/autopsy
 /**
  * Removes a hash database from the set of hash databases used to classify files as known or known
  * bad and saves the configuration.
  *
  * @param hashDb
  * @throws HashDbManagerException
  */
 public synchronized void removeHashDatabase(HashDb hashDb) throws HashDbManagerException {
   // Don't remove a database if ingest is running
   boolean ingestIsRunning = IngestManager.getInstance().isIngestRunning();
   if (ingestIsRunning) {
     throw new HashDbManagerException(
         NbBundle.getMessage(this.getClass(), "HashDbManager.ingestRunningExceptionMsg"));
   }
   removeHashDatabaseInternal(hashDb);
   if (!save()) {
     throw new HashDbManagerException(
         NbBundle.getMessage(this.getClass(), "HashDbManager.saveErrorExceptionMsg"));
   }
 }
コード例 #3
0
ファイル: IngestMonitor.java プロジェクト: nrv3/autopsy
    @Override
    public void actionPerformed(ActionEvent e) {
      final IngestManager manager = IngestManager.getDefault();

      // runs checks only if ingest is running
      if (manager.isIngestRunning() == false) {
        return;
      }

      if (checkDiskSpace() == false) {
        // stop ingest if running
        final String diskPath = root.getAbsolutePath();
        logger.log(Level.SEVERE, "Stopping ingest due to low disk space on disk " + diskPath);
        manager.stopAll();
        manager.postMessage(
            IngestMessage.createManagerMessage(
                "Stopping ingest due to low disk space on disk " + diskPath,
                "Stopping ingest due to low disk space on disk "
                    + diskPath
                    + ". Please ensure the drive where Case is located has at least 1GB free space (more for large images) and restart ingest."));
      }
    }
コード例 #4
0
ファイル: IngestManager.java プロジェクト: raman-bt/autopsy
 private void handleInterruption() {
   for (IngestModuleAbstractFile s : abstractFileModules) {
     if (isModuleRunning(s)) {
       try {
         s.stop();
       } catch (Exception e) {
         logger.log(Level.WARNING, "Exception while stopping module: " + s.getName(), e);
       }
     }
     IngestManager.fireModuleEvent(IngestModuleEvent.STOPPED.toString(), s.getName());
   }
   // empty queues
   scheduler.getFileScheduler().empty();
 }
コード例 #5
0
ファイル: IngestManager.java プロジェクト: raman-bt/autopsy
    @Override
    protected void done() {
      try {
        super.get(); // block and get all exceptions thrown while doInBackground()
        // notify modules of completion
        if (!this.isCancelled()) {
          for (IngestModuleAbstractFile s : abstractFileModules) {
            s.complete();
            IngestManager.fireModuleEvent(IngestModuleEvent.COMPLETED.toString(), s.getName());
          }
        }

        logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo());
        logger.log(Level.INFO, "Freeing jvm heap resources post file pipeline run");
        System.gc();
        logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo());

      } catch (CancellationException e) {
        // task was cancelled
        handleInterruption();

      } catch (InterruptedException ex) {
        handleInterruption();
      } catch (ExecutionException ex) {
        handleInterruption();
        logger.log(Level.SEVERE, "Fatal error during ingest.", ex);

      } catch (Exception ex) {
        handleInterruption();
        logger.log(Level.SEVERE, "Fatal error during ingest.", ex);
      } finally {
        stats.end();
        progress.finish();

        if (!this.isCancelled()) {
          logger.log(Level.INFO, "Summary Report: " + stats.toString());
          logger.log(Level.INFO, "File module timings: " + stats.getFileModuleStats());
          if (ui != null) {
            logger.log(Level.INFO, "Ingest messages count: " + ui.getMessagesCount());
          }

          IngestManager.this.postMessage(
              IngestMessage.createManagerMessage("File Ingest Complete", stats.toHtmlString()));
        }
      }
    }
コード例 #6
0
ファイル: FileSize.java プロジェクト: sleuthkit/autopsy
 private void removeListeners() {
   deleteObservers();
   IngestManager.getInstance().removeIngestJobEventListener(pcl);
   IngestManager.getInstance().removeIngestModuleEventListener(pcl);
   Case.removePropertyChangeListener(pcl);
 }
コード例 #7
0
ファイル: FileSize.java プロジェクト: sleuthkit/autopsy
 FileSizeRootChildrenObservable() {
   IngestManager.getInstance().addIngestJobEventListener(pcl);
   IngestManager.getInstance().addIngestModuleEventListener(pcl);
   Case.addPropertyChangeListener(pcl);
 }
コード例 #8
0
 /**
  * Provides a callback for completed data source ingest jobs, allowing this ingest job to notify
  * the ingest manager when it is complete.
  *
  * @param dataSourceIngestJob A completed data source ingest job.
  */
 void dataSourceJobFinished(DataSourceIngestJob dataSourceIngestJob) {
   if (incompleteJobsCount.decrementAndGet() == 0) {
     IngestManager.getInstance().finishIngestJob(this);
   }
 }
コード例 #9
0
ファイル: IngestManager.java プロジェクト: raman-bt/autopsy
    @Override
    protected Object doInBackground() throws Exception {

      logger.log(Level.INFO, "Starting background ingest file processor");
      logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo());

      stats.start();

      // notify main thread modules started
      for (IngestModuleAbstractFile s : abstractFileModules) {
        IngestManager.fireModuleEvent(IngestModuleEvent.STARTED.toString(), s.getName());
      }

      final String displayName = "File Ingest";
      progress =
          ProgressHandleFactory.createHandle(
              displayName,
              new Cancellable() {
                @Override
                public boolean cancel() {
                  logger.log(Level.INFO, "Filed ingest cancelled by user.");
                  if (progress != null) {
                    progress.setDisplayName(displayName + " (Cancelling...)");
                  }
                  return IngestAbstractFileProcessor.this.cancel(true);
                }
              });

      final IngestScheduler.FileScheduler fileScheduler = scheduler.getFileScheduler();

      // initialize the progress bar
      progress.start();
      progress.switchToIndeterminate();
      // set initial totals and processed (to be updated as we process or new files are scheduled)
      int totalEnqueuedFiles = fileScheduler.getFilesEnqueuedEst();
      progress.switchToDeterminate(totalEnqueuedFiles);
      int processedFiles = 0;
      // process AbstractFiles queue
      while (fileScheduler.hasNext()) {
        final ProcessTask fileTask = fileScheduler.next();
        final PipelineContext<IngestModuleAbstractFile> filepipelineContext = fileTask.context;
        final ScheduledTask<IngestModuleAbstractFile> fileIngestTask =
            filepipelineContext.getScheduledTask();
        final AbstractFile fileToProcess = fileTask.file;

        // clear return values from modules for last file
        synchronized (abstractFileModulesRetValues) {
          abstractFileModulesRetValues.clear();
        }

        // logger.log(Level.INFO, "IngestManager: Processing: {0}", fileToProcess.getName());

        for (IngestModuleAbstractFile module : fileIngestTask.getModules()) {
          // process the file with every file module
          if (isCancelled()) {
            logger.log(Level.INFO, "Terminating file ingest due to cancellation.");
            return null;
          }
          progress.progress(
              fileToProcess.getName() + " (" + module.getName() + ")", processedFiles);

          try {
            stats.logFileModuleStartProcess(module);
            IngestModuleAbstractFile.ProcessResult result =
                module.process(filepipelineContext, fileToProcess);
            stats.logFileModuleEndProcess(module);

            // store the result for subsequent modules for this file
            synchronized (abstractFileModulesRetValues) {
              abstractFileModulesRetValues.put(module.getName(), result);
            }

          } catch (Exception e) {
            logger.log(
                Level.SEVERE, "Error: unexpected exception from module: " + module.getName(), e);
            stats.addError(module);
          } catch (OutOfMemoryError e) {
            logger.log(Level.SEVERE, "Error: out of memory from module: " + module.getName(), e);
            stats.addError(module);
          }
        } // end for every module

        // free the internal file resource after done with every module
        fileToProcess.close();

        int newTotalEnqueuedFiles = fileScheduler.getFilesEnqueuedEst();
        if (newTotalEnqueuedFiles > totalEnqueuedFiles) {
          // update if new enqueued
          totalEnqueuedFiles = newTotalEnqueuedFiles + 1; // + processedFiles + 1;
          // processedFiles = 0;
          // reset
          progress.switchToIndeterminate();
          progress.switchToDeterminate(totalEnqueuedFiles);
        }
        if (processedFiles
            < totalEnqueuedFiles) { // fix for now to handle the same datasource Content enqueued
                                    // twice
          ++processedFiles;
        }
        // --totalEnqueuedFiles;

      } // end of for every AbstractFile
      logger.log(Level.INFO, "IngestManager: Finished processing files");
      return null;
    }
コード例 #10
0
ファイル: IngestManager.java プロジェクト: raman-bt/autopsy
  /**
   * Starts the needed worker threads.
   *
   * <p>if AbstractFile module is still running, do nothing and allow it to consume queue otherwise
   * start /restart AbstractFile worker
   *
   * <p>data source ingest workers run per (module,content). Checks if one for the same
   * (module,content) is already running otherwise start/restart the worker
   */
  private synchronized void startAll() {
    final IngestScheduler.DataSourceScheduler dataSourceScheduler =
        scheduler.getDataSourceScheduler();
    final IngestScheduler.FileScheduler fileScheduler = scheduler.getFileScheduler();

    logger.log(Level.INFO, "DataSource queue: " + dataSourceScheduler.toString());
    logger.log(Level.INFO, "File queue: " + fileScheduler.toString());

    if (!ingestMonitor.isRunning()) {
      ingestMonitor.start();
    }

    // image ingesters
    // cycle through each data source content in the queue
    while (dataSourceScheduler.hasNext()) {
      // dequeue
      // get next data source content and set of modules
      final ScheduledTask<IngestModuleDataSource> dataSourceTask = dataSourceScheduler.next();

      // check if each module for this data source content is already running
      for (IngestModuleDataSource taskModule : dataSourceTask.getModules()) {
        boolean alreadyRunning = false;
        for (IngestDataSourceThread worker : dataSourceIngesters) {
          // ignore threads that are on different data sources
          if (!worker.getContent().equals(dataSourceTask.getContent())) {
            continue; // check next worker
          }
          // same data source, check module (by name, not id, since different instances)
          if (worker.getModule().getName().equals(taskModule.getName())) {
            alreadyRunning = true;
            logger.log(
                Level.INFO,
                "Data Source Ingester <"
                    + dataSourceTask.getContent()
                    + ", "
                    + taskModule.getName()
                    + "> is already running");
            break;
          }
        }
        // checked all workers
        if (alreadyRunning == false) {
          logger.log(
              Level.INFO,
              "Starting new data source Ingester <"
                  + dataSourceTask.getContent()
                  + ", "
                  + taskModule.getName()
                  + ">");
          // data source modules are now initialized per instance

          IngestModuleInit moduleInit = new IngestModuleInit();

          PipelineContext<IngestModuleDataSource> dataSourcepipelineContext =
              new PipelineContext<IngestModuleDataSource>(dataSourceTask, getProcessUnallocSpace());
          final IngestDataSourceThread newDataSourceWorker =
              new IngestDataSourceThread(
                  this,
                  dataSourcepipelineContext,
                  dataSourceTask.getContent(),
                  taskModule,
                  moduleInit);

          dataSourceIngesters.add(newDataSourceWorker);

          // wrap the module in a worker, that will run init, process and complete on the module
          newDataSourceWorker.execute();
          IngestManager.fireModuleEvent(IngestModuleEvent.STARTED.toString(), taskModule.getName());
        }
      }
    }

    // AbstractFile ingester
    boolean startAbstractFileIngester = false;
    if (fileScheduler.hasNext()) {
      if (abstractFileIngester == null) {
        startAbstractFileIngester = true;
        logger.log(Level.INFO, "Starting initial AbstractFile ingester");
      }
      // if worker had completed, restart it in case data is still enqueued
      else if (abstractFileIngester.isDone()) {
        startAbstractFileIngester = true;
        logger.log(Level.INFO, "Restarting AbstractFile ingester");
      }
    } else {
      logger.log(Level.INFO, "no new AbstractFile enqueued, no ingester needed");
    }

    if (startAbstractFileIngester) {
      stats = new IngestManagerStats();
      abstractFileIngester = new IngestAbstractFileProcessor();
      // init all fs modules, everytime new worker starts
      /* @@@ I don't understand why we do an init on each module.  Should do only modules
       * that we are going to be using in the pipeline
       */
      for (IngestModuleAbstractFile s : abstractFileModules) {
        IngestModuleInit moduleInit = new IngestModuleInit();
        try {
          s.init(moduleInit);
        } catch (Exception e) {
          logger.log(Level.SEVERE, "File ingest module failed init(): " + s.getName());
        }
      }
      abstractFileIngester.execute();
    }
  }
コード例 #11
0
    @Override
    protected Object doInBackground() throws Exception {
      logger.log(Level.INFO, "Pending start of new searcher");

      final String displayName = "Keyword Search" + (finalRun ? " - Finalizing" : "");
      progress =
          ProgressHandleFactory.createHandle(
              displayName + (" (Pending)"),
              new Cancellable() {

                @Override
                public boolean cancel() {
                  logger.log(Level.INFO, "Cancelling the searcher by user.");
                  if (progress != null) {
                    progress.setDisplayName(displayName + " (Cancelling...)");
                  }
                  return Searcher.this.cancel(true);
                }
              });

      progress.start();
      progress.switchToIndeterminate();

      // block to ensure previous searcher is completely done with doInBackground()
      // even after previous searcher cancellation, we need to check this
      searcherLock.lock();
      try {
        logger.log(Level.INFO, "Started a new searcher");
        progress.setDisplayName(displayName);
        // make sure other searchers are not spawned
        searcherDone = false;
        runSearcher = false;
        if (searchTimer.isRunning()) {
          searchTimer.stop();
        }

        int numSearched = 0;

        updateKeywords();
        progress.switchToDeterminate(keywords.size());

        for (Keyword keywordQuery : keywords) {
          if (this.isCancelled()) {
            logger.log(
                Level.INFO,
                "Cancel detected, bailing before new keyword processed: "
                    + keywordQuery.getQuery());
            return null;
          }
          final String queryStr = keywordQuery.getQuery();
          final KeywordSearchList list = keywordToList.get(queryStr);
          final String listName = list.getName();

          // DEBUG
          // logger.log(Level.INFO, "Searching: " + queryStr);

          progress.progress(queryStr, numSearched);

          KeywordSearchQuery del = null;

          boolean isRegex = !keywordQuery.isLiteral();
          if (!isRegex) {
            del = new LuceneQuery(keywordQuery);
            del.escape();
          } else {
            del = new TermComponentQuery(keywordQuery);
          }

          Map<String, List<ContentHit>> queryResult = null;

          try {
            queryResult = del.performQuery();
          } catch (NoOpenCoreException ex) {
            logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), ex);
            // no reason to continue with next query if recovery failed
            // or wait for recovery to kick in and run again later
            // likely case has closed and threads are being interrupted
            return null;
          } catch (CancellationException e) {
            logger.log(
                Level.INFO,
                "Cancel detected, bailing during keyword query: " + keywordQuery.getQuery());
            return null;
          } catch (Exception e) {
            logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), e);
            continue;
          }

          // calculate new results but substracting results already obtained in this run
          Map<Keyword, List<ContentHit>> newResults = new HashMap<Keyword, List<ContentHit>>();

          for (String termResult : queryResult.keySet()) {
            List<ContentHit> queryTermResults = queryResult.get(termResult);
            Keyword termResultK = new Keyword(termResult, !isRegex);
            List<ContentHit> curTermResults = currentResults.get(termResultK);
            if (curTermResults == null) {
              currentResults.put(termResultK, queryTermResults);
              newResults.put(termResultK, queryTermResults);
            } else {
              // some AbstractFile hits already exist for this keyword
              for (ContentHit res : queryTermResults) {
                if (!previouslyHit(curTermResults, res)) {
                  // add to new results
                  List<ContentHit> newResultsFs = newResults.get(termResultK);
                  if (newResultsFs == null) {
                    newResultsFs = new ArrayList<ContentHit>();
                    newResults.put(termResultK, newResultsFs);
                  }
                  newResultsFs.add(res);
                  curTermResults.add(res);
                }
              }
            }
          }

          if (!newResults.isEmpty()) {

            // write results to BB

            // new artifacts created, to report to listeners
            Collection<BlackboardArtifact> newArtifacts = new ArrayList<BlackboardArtifact>();

            for (final Keyword hitTerm : newResults.keySet()) {
              List<ContentHit> contentHitsAll = newResults.get(hitTerm);
              Map<AbstractFile, Integer> contentHitsFlattened =
                  ContentHit.flattenResults(contentHitsAll);
              for (final AbstractFile hitFile : contentHitsFlattened.keySet()) {
                String snippet = null;
                final String snippetQuery =
                    KeywordSearchUtil.escapeLuceneQuery(hitTerm.getQuery(), true, false);
                int chunkId = contentHitsFlattened.get(hitFile);
                try {
                  snippet =
                      LuceneQuery.querySnippet(
                          snippetQuery, hitFile.getId(), chunkId, isRegex, true);
                } catch (NoOpenCoreException e) {
                  logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e);
                  // no reason to continue
                  return null;
                } catch (Exception e) {
                  logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e);
                  continue;
                }

                KeywordWriteResult written =
                    del.writeToBlackBoard(hitTerm.getQuery(), hitFile, snippet, listName);

                if (written == null) {
                  logger.log(
                      Level.WARNING,
                      "BB artifact for keyword hit not written, file: "
                          + hitFile
                          + ", hit: "
                          + hitTerm.toString());
                  continue;
                }

                newArtifacts.add(written.getArtifact());

                // generate a data message for each artifact
                StringBuilder subjectSb = new StringBuilder();
                StringBuilder detailsSb = new StringBuilder();
                // final int hitFiles = newResults.size();

                if (!keywordQuery.isLiteral()) {
                  subjectSb.append("RegExp hit: ");
                } else {
                  subjectSb.append("Keyword hit: ");
                }
                // subjectSb.append("<");
                String uniqueKey = null;
                BlackboardAttribute attr =
                    written.getAttribute(
                        BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD.getTypeID());
                if (attr != null) {
                  final String keyword = attr.getValueString();
                  subjectSb.append(keyword);
                  uniqueKey = keyword.toLowerCase();
                }

                // subjectSb.append(">");
                // String uniqueKey = queryStr;

                // details
                detailsSb.append("<table border='0' cellpadding='4' width='280'>");
                // hit
                detailsSb.append("<tr>");
                detailsSb.append("<th>Keyword hit</th>");
                detailsSb
                    .append("<td>")
                    .append(StringEscapeUtils.escapeHtml(attr.getValueString()))
                    .append("</td>");
                detailsSb.append("</tr>");

                // preview
                attr =
                    written.getAttribute(
                        BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW.getTypeID());
                if (attr != null) {
                  detailsSb.append("<tr>");
                  detailsSb.append("<th>Preview</th>");
                  detailsSb
                      .append("<td>")
                      .append(StringEscapeUtils.escapeHtml(attr.getValueString()))
                      .append("</td>");
                  detailsSb.append("</tr>");
                }

                // file
                detailsSb.append("<tr>");
                detailsSb.append("<th>File</th>");
                if (hitFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.FS)) {
                  detailsSb
                      .append("<td>")
                      .append(((FsContent) hitFile).getParentPath())
                      .append(hitFile.getName())
                      .append("</td>");
                } else {
                  detailsSb.append("<td>").append(hitFile.getName()).append("</td>");
                }
                detailsSb.append("</tr>");

                // list
                attr =
                    written.getAttribute(
                        BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID());
                detailsSb.append("<tr>");
                detailsSb.append("<th>List</th>");
                detailsSb.append("<td>").append(attr.getValueString()).append("</td>");
                detailsSb.append("</tr>");

                // regex
                if (!keywordQuery.isLiteral()) {
                  attr =
                      written.getAttribute(
                          BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP.getTypeID());
                  if (attr != null) {
                    detailsSb.append("<tr>");
                    detailsSb.append("<th>RegEx</th>");
                    detailsSb.append("<td>").append(attr.getValueString()).append("</td>");
                    detailsSb.append("</tr>");
                  }
                }
                detailsSb.append("</table>");

                // check if should send messages on hits on this list
                if (list.getIngestMessages()) // post ingest inbox msg
                {
                  managerProxy.postMessage(
                      IngestMessage.createDataMessage(
                          ++messageID,
                          instance,
                          subjectSb.toString(),
                          detailsSb.toString(),
                          uniqueKey,
                          written.getArtifact()));
                }
              } // for each term hit
            } // for each file hit

            // update artifact browser
            if (!newArtifacts.isEmpty()) {
              IngestManager.fireServiceDataEvent(
                  new ServiceDataEvent(MODULE_NAME, ARTIFACT_TYPE.TSK_KEYWORD_HIT, newArtifacts));
            }
          }
          progress.progress(queryStr, ++numSearched);
        }

      } // end try block
      catch (Exception ex) {
        logger.log(Level.WARNING, "searcher exception occurred", ex);
      } finally {
        finalizeSearcher();
        searcherLock.unlock();
      }

      return null;
    }