/**
   * Initializes the service for new ingest run Sets up threads, timers, retrieves settings, keyword
   * lists to run on
   *
   * @param managerProxy
   */
  @Override
  public void init(IngestManagerProxy managerProxy) {
    logger.log(Level.INFO, "init()");
    initialized = false;

    caseHandle = Case.getCurrentCase().getSleuthkitCase();

    this.managerProxy = managerProxy;

    Server solrServer = KeywordSearch.getServer();

    ingester = solrServer.getIngester();

    ingestStatus = new HashMap<Long, IngestStatus>();

    keywords = new ArrayList<Keyword>();
    keywordLists = new ArrayList<String>();
    keywordToList = new HashMap<String, KeywordSearchList>();

    initKeywords();

    if (keywords.isEmpty() || keywordLists.isEmpty()) {
      managerProxy.postMessage(
          IngestMessage.createWarningMessage(
              ++messageID,
              instance,
              "No keywords in keyword list.",
              "Only indexing will be done and and keyword search will be skipped (it can be executed later again as ingest or using toolbar search feature)."));
    }

    processedFiles = false;
    finalSearcherDone = false;
    searcherDone = true; // make sure to start the initial currentSearcher
    // keeps track of all results per run not to repeat reporting the same hits
    currentResults = new HashMap<Keyword, List<ContentHit>>();

    indexer = new Indexer();

    final int updateIntervalMs = managerProxy.getUpdateFrequency() * 60 * 1000;
    logger.log(Level.INFO, "Using commit interval (ms): " + updateIntervalMs);
    logger.log(Level.INFO, "Using searcher interval (ms): " + updateIntervalMs);

    commitTimer = new Timer(updateIntervalMs, new CommitTimerAction());
    searchTimer = new Timer(updateIntervalMs, new SearchTimerAction());

    initialized = true;

    commitTimer.start();
    searchTimer.start();

    managerProxy.postMessage(
        IngestMessage.createMessage(++messageID, MessageType.INFO, this, "Started"));
  }
  /** Posts inbox message with summary of indexed files */
  private void postIndexSummary() {
    int indexed = 0;
    int indexed_extr = 0;
    int skipped = 0;
    for (IngestStatus s : ingestStatus.values()) {
      switch (s) {
        case INGESTED:
          ++indexed;
          break;
        case EXTRACTED_INGESTED:
          ++indexed_extr;
          break;
        case SKIPPED:
          ++skipped;
          break;
        default:;
      }
    }

    StringBuilder msg = new StringBuilder();
    msg.append("Indexed files: ")
        .append(indexed)
        .append("<br />Indexed strings: ")
        .append(indexed_extr);
    msg.append("<br />Skipped files: ").append(skipped).append("<br />");
    String indexStats = msg.toString();
    logger.log(Level.INFO, "Keyword Indexing Completed: " + indexStats);
    managerProxy.postMessage(
        IngestMessage.createMessage(
            ++messageID, MessageType.INFO, this, "Keyword Indexing Completed", indexStats));
  }
  /**
   * Starts processing of every file provided by IngestManager. Checks if it is time to commit and
   * run search
   *
   * @param abstractFile file/unallocated file/directory to process
   * @return ProcessResult.OK in most cases and ERROR only if error in the pipeline, otherwise does
   *     not advice to stop the pipeline
   */
  @Override
  public ProcessResult process(AbstractFile abstractFile) {

    if (initialized == false) // error initializing indexing/Solr
    {
      return ProcessResult.OK;
    }

    // check if we should skip this file according to HashDb service
    // if so do not index it, also postpone indexing and keyword search threads to later
    IngestServiceAbstractFile.ProcessResult hashDBResult =
        managerProxy.getAbstractFileServiceResult(hashDBServiceName);
    // logger.log(Level.INFO, "hashdb result: " + hashDBResult + "file: " + AbstractFile.getName());
    if (hashDBResult == IngestServiceAbstractFile.ProcessResult.COND_STOP && skipKnown) {
      return ProcessResult.OK;
    } else if (hashDBResult == IngestServiceAbstractFile.ProcessResult.ERROR) {
      // notify depending service that keyword search (would) encountered error for this file
      return ProcessResult.ERROR;
    }

    if (processedFiles == false) {
      processedFiles = true;
    }

    checkRunCommitSearch();

    indexer.indexFile(abstractFile);
    return ProcessResult.OK;
  }
  /**
   * After all files are ingested, execute final index commit and final search Cleanup resources,
   * threads, timers
   */
  @Override
  public void complete() {
    if (initialized == false) {
      return;
    }

    // logger.log(Level.INFO, "complete()");
    commitTimer.stop();

    // handle case if previous search running
    // cancel it, will re-run after final commit
    // note: cancellation of Searcher worker is graceful (between keywords)
    if (currentSearcher != null) {
      currentSearcher.cancel(false);
    }

    // cancel searcher timer, ensure unwanted searcher does not start
    // before we start the final one
    if (searchTimer.isRunning()) {
      searchTimer.stop();
    }
    runSearcher = false;

    logger.log(Level.INFO, "Running final index commit and search");
    // final commit
    commit();

    postIndexSummary();

    // run one last search as there are probably some new files committed
    if (keywords != null && !keywords.isEmpty() && processedFiles == true) {
      finalSearcher = new Searcher(keywords, true); // final searcher run
      finalSearcher.execute();
    } else {
      finalSearcherDone = true;
      managerProxy.postMessage(
          IngestMessage.createMessage(++messageID, MessageType.INFO, this, "Completed"));
    }

    // postSummary();
  }