public void testListenerCalled() throws Exception {
    Directory dir = newDirectory();
    IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
    final AtomicBoolean afterRefreshCalled = new AtomicBoolean(false);
    SearcherManager sm = new SearcherManager(iw, true, new SearcherFactory());
    sm.addListener(
        new ReferenceManager.RefreshListener() {
          @Override
          public void beforeRefresh() {}

          @Override
          public void afterRefresh(boolean didRefresh) {
            if (didRefresh) {
              afterRefreshCalled.set(true);
            }
          }
        });
    iw.addDocument(new Document());
    iw.commit();
    assertFalse(afterRefreshCalled.get());
    sm.maybeRefreshBlocking();
    assertTrue(afterRefreshCalled.get());
    sm.close();
    iw.close();
    dir.close();
  }
 @Override
 public long getCount() throws IOException {
   IndexSearcher searcher = searcherMgr.acquire();
   try {
     return searcher.getIndexReader().numDocs();
   } finally {
     searcherMgr.release(searcher);
   }
 }
 /**
  * Returns the total number of deleted {@link Document}s in this index.
  *
  * @return The total number of deleted {@link Document}s in this index.
  * @throws IOException If Lucene throws IO errors.
  */
 @Override
 public long getNumDeletedDocs() throws IOException {
   Log.debug("%s get num deleted docs", logName);
   IndexSearcher searcher = searcherManager.acquire();
   try {
     return searcher.getIndexReader().numDeletedDocs();
   } finally {
     searcherManager.release(searcher);
   }
 }
 @Override
 protected void doClose() throws Exception {
   assertTrue(warmCalled);
   if (VERBOSE) {
     System.out.println("TEST: now close SearcherManagers");
   }
   nrtDeletesThread.close();
   nrtDeletes.close();
   nrtNoDeletesThread.close();
   nrtNoDeletes.close();
 }
  @Override
  protected IndexSearcher getCurrentSearcher() throws Exception {
    // Test doesn't assert deletions until the end, so we
    // can randomize whether dels must be applied
    final SearcherManager nrt;
    if (random().nextBoolean()) {
      nrt = nrtDeletes;
    } else {
      nrt = nrtNoDeletes;
    }

    return nrt.acquire();
  }
예제 #6
0
  @Override
  public void refresh(String source) throws EngineException {
    // we obtain a read lock here, since we don't want a flush to happen while we are refreshing
    // since it flushes the index as well (though, in terms of concurrency, we are allowed to do it)
    try (ReleasableLock lock = readLock.acquire()) {
      ensureOpen();
      searcherManager.maybeRefreshBlocking();
    } catch (AlreadyClosedException e) {
      ensureOpen();
      maybeFailEngine("refresh", e);
    } catch (EngineClosedException e) {
      throw e;
    } catch (Throwable t) {
      failEngine("refresh failed", t);
      throw new RefreshFailedEngineException(shardId, t);
    }

    // TODO: maybe we should just put a scheduled job in threadPool?
    // We check for pruning in each delete request, but we also prune here e.g. in case a delete
    // burst comes in and then no more deletes
    // for a long time:
    maybePruneDeletedTombstones();
    versionMapRefreshPending.set(false);
    mergeScheduler.refreshConfig();
  }
예제 #7
0
  @Override
  public void reopen() throws IOException {
    this.writer.forceMerge(1); // optimize();
    this.writer.commit();

    nrt_manager.maybeRefresh();
  }
예제 #8
0
 @Override
 public int size() throws IOException {
   IndexSearcher searcher = null;
   try {
     searcher = nrt_manager.acquire();
     return searcher.getIndexReader().numDocs();
   } finally {
     try {
       if (searcher != null) {
         nrt_manager.release(searcher);
       }
     } catch (IOException e) {
       logger.error("", e);
     }
   }
 }
 @Override
 protected void releaseSearcher(IndexSearcher s) throws Exception {
   // NOTE: a bit iffy... technically you should release
   // against the same SearcherManager you acquired from... but
   // both impls just decRef the underlying reader so we
   // can get away w/ cheating:
   nrtNoDeletes.release(s);
 }
 @Override
 protected IndexSearcher getFinalSearcher() throws Exception {
   if (VERBOSE) {
     System.out.println("TEST: finalSearcher maxGen=" + maxGen);
   }
   nrtDeletesThread.waitForGeneration(maxGen);
   return nrtDeletes.acquire();
 }
  public LuceneSearchResult search(String queryString, int count, String entityName)
      throws IcatException {
    if (entityName != null) {
      try {
        Class<?> klass = Class.forName(Constants.ENTITY_PREFIX + entityName);
        System.out.println(klass);
        if (!EntityBaseBean.class.isAssignableFrom(klass)) {
          throw new IcatException(
              IcatExceptionType.BAD_PARAMETER, "Invalid entity name " + entityName);
        }
      } catch (ClassNotFoundException e) {
        throw new IcatException(
            IcatExceptionType.BAD_PARAMETER, "Invalid entity name " + entityName);
      }
    }
    try {

      if (!searcherManager.isSearcherCurrent()) {
        IndexSearcher oldSearcher = isearcher;
        searcherManager.maybeRefreshBlocking();
        isearcher = searcherManager.acquire();
        searcherManager.release(oldSearcher);
        logger.debug("Got a new IndexSearcher " + isearcher);
      }

      List<String> results = new ArrayList<String>();
      Query query = parser.parse(queryString, "all");
      if (entityName != null) {
        BooleanQuery bquery = new BooleanQuery();
        bquery.add(query, Occur.MUST);
        bquery.add(new TermQuery(new Term("entity", entityName)), Occur.MUST);
        query = bquery;
      }
      ScoreDoc[] hits = isearcher.search(query, count).scoreDocs;
      for (int i = 0; i < hits.length; i++) {
        Document doc = isearcher.doc(hits[i].doc);
        results.add(doc.get("id"));
      }
      ScoreDoc lastDoc = results.isEmpty() ? null : hits[hits.length - 1];
      return new LuceneSearchResult(results, lastDoc, query);
    } catch (Exception e) {
      throw new IcatException(IcatExceptionType.INTERNAL, e.getMessage());
    }
  }
 public void release(LuceneConnection inConnection) {
   try {
     if (inConnection.getSearcherAndTaxonomy() == null) {
       fieldSearcherManager.release(inConnection.getIndexSearcher());
     } else {
       fieldSearcherTaxonomyManager.release(inConnection.getSearcherAndTaxonomy());
     }
   } catch (IOException ex) {
     throw new OpenEditException(ex);
   }
 }
 public void maybeRefresh() {
   try {
     if (fieldSearcherManager != null) {
       fieldSearcherManager.maybeRefresh();
     } else {
       fieldSearcherTaxonomyManager.maybeRefresh();
     }
   } catch (IOException ex) {
     throw new OpenEditException(ex);
   }
 }
 @Override
 protected void deleteDocuments(Term id) throws Exception {
   final long gen = genWriter.deleteDocuments(id);
   // randomly verify the delete "took":
   if (random().nextInt(20) == 7) {
     if (VERBOSE) {
       System.out.println(Thread.currentThread().getName() + ": nrt: verify del " + id);
     }
     nrtDeletesThread.waitForGeneration(gen);
     final IndexSearcher s = nrtDeletes.acquire();
     if (VERBOSE) {
       System.out.println(Thread.currentThread().getName() + ": nrt: got searcher=" + s);
     }
     try {
       assertEquals(0, s.search(new TermQuery(id), 10).totalHits);
     } finally {
       nrtDeletes.release(s);
     }
   }
   lastGens.set(gen);
 }
 /**
  * Commits all changes to the index, waits for pending merges to complete, and closes all
  * associated resources.
  *
  * @throws IOException If Lucene throws IO errors.
  */
 public void close() throws IOException {
   searcherReopener.interrupt();
   searcherManager.close();
   indexWriter.close();
   directory.close();
   try {
     ManagementFactory.getPlatformMBeanServer().unregisterMBean(objectName);
   } catch (MBeanException | OperationsException e) {
     Log.error(e, "Error while removing MBean");
   }
   Log.info("%s closed", logName);
 }
 @Override
 public void close() throws IOException {
   if (searcherMgr != null) {
     searcherMgr.close();
     searcherMgr = null;
   }
   if (writer != null) {
     writer.close();
     dir.close();
     writer = null;
   }
 }
  @Override
  protected void addDocument(Term id, Iterable<? extends IndexableField> doc) throws Exception {
    final long gen = genWriter.addDocument(doc);

    // Randomly verify the add "took":
    if (random().nextInt(20) == 2) {
      if (VERBOSE) {
        System.out.println(Thread.currentThread().getName() + ": nrt: verify " + id);
      }
      nrtNoDeletesThread.waitForGeneration(gen);
      final IndexSearcher s = nrtNoDeletes.acquire();
      if (VERBOSE) {
        System.out.println(Thread.currentThread().getName() + ": nrt: got searcher=" + s);
      }
      try {
        assertEquals(1, s.search(new TermQuery(id), 10).totalHits);
      } finally {
        nrtNoDeletes.release(s);
      }
    }
    lastGens.set(gen);
  }
 @Override
 public long sizeInBytes() {
   long mem = RamUsageEstimator.shallowSizeOf(this);
   try {
     if (searcherMgr != null) {
       IndexSearcher searcher = searcherMgr.acquire();
       try {
         for (AtomicReaderContext context : searcher.getIndexReader().leaves()) {
           AtomicReader reader = FilterAtomicReader.unwrap(context.reader());
           if (reader instanceof SegmentReader) {
             mem += ((SegmentReader) context.reader()).ramBytesUsed();
           }
         }
       } finally {
         searcherMgr.release(searcher);
       }
     }
     return mem;
   } catch (IOException ioe) {
     throw new RuntimeException(ioe);
   }
 }
 public LuceneConnection acquire() {
   try {
     LuceneConnection connection = new LuceneConnection();
     if (fieldSearcherManager != null) {
       IndexSearcher searcher = fieldSearcherManager.acquire();
       connection.setIndexSearcher(searcher);
     } else {
       SearcherAndTaxonomy st = fieldSearcherTaxonomyManager.acquire();
       connection.setSearcherAndTaxonomy(st);
     }
     return connection;
   } catch (IOException ex) {
     throw new OpenEditException(ex);
   }
 }
 @PreDestroy
 private void exit() {
   timer.cancel();
   try {
     logger.debug("Closing IndexWriter for directory lockid " + directory.getLockID());
     iwriter.commit();
     iwriter.close();
     iwriter = null;
     logger.debug("IndexWriter closed for directory lockid " + directory.getLockID());
     searcherManager.close();
     logger.debug("SearcherManager closed for directory lockid " + directory.getLockID());
     directory.close();
     directory = null;
     logger.info("Directory closed");
   } catch (Exception e) {
     StringWriter errors = new StringWriter();
     e.printStackTrace(new PrintWriter(errors));
     logger.fatal(errors.toString());
   }
 }
  @Override
  public void build(InputIterator iter) throws IOException {

    if (searcherMgr != null) {
      searcherMgr.close();
      searcherMgr = null;
    }

    if (writer != null) {
      writer.close();
      writer = null;
    }

    AtomicReader r = null;
    boolean success = false;
    try {
      // First pass: build a temporary normal Lucene index,
      // just indexing the suggestions as they iterate:
      writer =
          new IndexWriter(
              dir,
              getIndexWriterConfig(
                  matchVersion, getGramAnalyzer(), IndexWriterConfig.OpenMode.CREATE));
      BytesRef text;
      Document doc = new Document();
      FieldType ft = getTextFieldType();
      Field textField = new Field(TEXT_FIELD_NAME, "", ft);
      doc.add(textField);

      Field textGramField = new Field("textgrams", "", ft);
      doc.add(textGramField);

      Field exactTextField = new StringField(EXACT_TEXT_FIELD_NAME, "", Field.Store.NO);
      doc.add(exactTextField);

      Field textDVField = new BinaryDocValuesField(TEXT_FIELD_NAME, new BytesRef());
      doc.add(textDVField);

      // TODO: use threads...?
      Field weightField = new NumericDocValuesField("weight", 0L);
      doc.add(weightField);

      Field payloadField;
      if (iter.hasPayloads()) {
        payloadField = new BinaryDocValuesField("payloads", new BytesRef());
        doc.add(payloadField);
      } else {
        payloadField = null;
      }
      // long t0 = System.nanoTime();
      while ((text = iter.next()) != null) {
        String textString = text.utf8ToString();
        textField.setStringValue(textString);
        exactTextField.setStringValue(textString);
        textGramField.setStringValue(textString);
        textDVField.setBytesValue(text);
        weightField.setLongValue(iter.weight());
        if (iter.hasPayloads()) {
          payloadField.setBytesValue(iter.payload());
        }
        writer.addDocument(doc);
      }
      // System.out.println("initial indexing time: " + ((System.nanoTime()-t0)/1000000) + " msec");

      searcherMgr = new SearcherManager(writer, true, null);
      success = true;
    } finally {
      if (success) {
        IOUtils.close(r);
      } else {
        IOUtils.closeWhileHandlingException(writer, r);
        writer = null;
      }
    }
  }
  @Override
  public SearchResult search(CrescentSearchRequestWrapper csrw) throws IOException {

    SearchResult searchResult = new SearchResult();
    int totalHitsCount = 0;
    String errorMessage = "SUCCESS";
    int errorCode = 0;

    // 5page * 50
    int numOfHits = csrw.getDefaultHitsPage() * csrw.getHitsForPage();
    IndexSearcher indexSearcher = null;
    SearcherManager searcherManager =
        crescentSearcherManager.getSearcherManager(csrw.getCollectionName());

    try {
      indexSearcher = searcherManager.acquire();

      Query query = csrw.getQuery();
      Filter filter = csrw.getFilter();
      Sort sort = csrw.getSort();

      logger.debug("query : {}", query);
      logger.debug("filter : {}", filter);
      logger.debug("sort : {}", sort);

      long startTime = System.currentTimeMillis();
      TopDocs topDocs = null;

      if (sort == null) {
        topDocs = indexSearcher.search(query, filter, numOfHits);
      } else {
        topDocs = indexSearcher.search(query, filter, numOfHits, sort);
      }

      long endTime = System.currentTimeMillis();

      // 전체 검색 건수
      totalHitsCount = topDocs.totalHits;

      LogInfo logInfo = new LogInfo();
      logInfo.setCollectionName(csrw.getCollectionName());
      logInfo.setElaspedTimeMil(endTime - startTime);
      logInfo.setKeyword(csrw.getKeyword());
      logInfo.setPageNum(csrw.getPageNum());
      logInfo.setPcid(csrw.getPcId());
      logInfo.setQuery(query);
      logInfo.setSort(csrw.getSort());
      logInfo.setTotalCount(totalHitsCount);
      logInfo.setUserId(csrw.getUserId());
      logInfo.setUserIp(csrw.getUserIp());
      logInfo.setFilter(csrw.getFilter());

      CrescentLogger.logging(logInfo);

      logger.debug("Total Hits Count : {} ", totalHitsCount);

      ScoreDoc[] hits = topDocs.scoreDocs;

      // 총 검색건수와 실제 보여줄 document의 offset (min ~ max)를 비교해서 작은 것을 가져옴
      int endOffset = Math.min(totalHitsCount, csrw.getStartOffSet() + csrw.getHitsForPage());

      if (endOffset > hits.length) {
        logger.debug("기본 설정된 검색건수보다 더 검색을 원하므로, 전체를 대상으로 검색합니다.");

        if (sort == null) {
          topDocs = indexSearcher.search(query, filter, totalHitsCount);
        } else {
          topDocs = indexSearcher.search(query, filter, totalHitsCount, sort);
        }

        hits = topDocs.scoreDocs;
      }

      int startOffset = csrw.getStartOffSet();
      endOffset = Math.min(hits.length, startOffset + csrw.getHitsForPage());

      // for(int i = startOffset; i < endOffset; i++) {
      //	Document doc = indexSearcher.doc(hits[i].doc);
      //	resultDocumentList.add(doc);
      // }

      logger.debug(
          "start offset : [{}], end offset : [{}], total : [{}], numOfHits :[{}]",
          new Object[] {csrw.getStartOffSet(), endOffset, totalHitsCount, numOfHits});
      logger.debug("hits count : [{}]", hits.length);
      logger.debug(
          "startOffset + hitsPerPage : [{}]", csrw.getStartOffSet() + csrw.getHitsForPage());

      if (totalHitsCount > 0) {
        List<Map<String, String>> resultList = new ArrayList<Map<String, String>>();
        Map<String, Object> result = new HashMap<String, Object>();

        CrescentFastVectorHighlighter highlighter = new CrescentFastVectorHighlighter();

        CrescentCollectionHandler collectionHandler =
            SpringApplicationContext.getBean(
                "crescentCollectionHandler", CrescentCollectionHandler.class);
        CrescentCollection collection =
            collectionHandler
                .getCrescentCollections()
                .getCrescentCollection(csrw.getCollectionName());

        // int docnum = 0;
        for (int i = startOffset; i < endOffset; i++) {

          Map<String, String> resultMap = new HashMap<String, String>();

          for (CrescentCollectionField field : collection.getFields()) {
            String value = null;

            if (field.isStore() && !field.isNumeric()) {

              // 필드별 결과를 가져온다.
              value =
                  highlighter.getBestFragment(
                      indexSearcher.getIndexReader(), hits[i].doc, query, field.getName());
            }

            if (value == null || value.length() == 0) {
              Document doc = indexSearcher.doc(hits[i].doc);
              value = doc.get(field.getName());
            }

            resultMap.put(field.getName(), value);
          }

          resultList.add(resultMap);
        }

        result.put("total_count", totalHitsCount);
        result.put("result_list", resultList);
        result.put("error_code", errorCode);
        result.put("error_msg", errorMessage);

        logger.debug("result list {}", resultList);

        searchResult.setResultList(resultList);
        searchResult.setTotalHitsCount(totalHitsCount);
        searchResult.setSearchResult(result);

      } else {

        // 결과없음
        Map<String, Object> result = new HashMap<String, Object>();
        List<Map<String, String>> resultList = new ArrayList<Map<String, String>>();

        result.put("total_count", totalHitsCount);
        result.put("result_list", resultList);
        result.put("error_code", errorCode);
        result.put("error_msg", errorMessage);

        logger.debug("result list {}", resultList);

        searchResult.setResultList(resultList);
        searchResult.setTotalHitsCount(0);
        searchResult.setSearchResult(result);
      }

    } catch (Exception e) {

      logger.error("error in CrescentDefaultDocSearcher : ", e);

      Map<String, Object> result = new HashMap<String, Object>();
      List<Map<String, String>> resultList = new ArrayList<Map<String, String>>();

      result.put("total_count", totalHitsCount);
      result.put("result_list", resultList);
      result.put("error_code", errorCode);
      result.put("error_msg", errorMessage);

      logger.error("검색 중 에러 발생함. {}", e);

      searchResult.setErrorCode(errorCode);
      searchResult.setErrorMsg(errorMessage);
      searchResult.setSearchResult(result);
      searchResult.setResultList(resultList);

      return searchResult;

    } finally {
      searcherManager.release(indexSearcher);
      indexSearcher = null;
    }

    return searchResult;
  }
  /**
   * Retrieve suggestions, specifying whether all terms must match ({@code allTermsRequired}) and
   * whether the hits should be highlighted ({@code doHighlight}).
   */
  public List<LookupResult> lookup(
      CharSequence key, int num, boolean allTermsRequired, boolean doHighlight) throws IOException {

    if (searcherMgr == null) {
      throw new IllegalStateException("suggester was not built");
    }

    final BooleanClause.Occur occur;
    if (allTermsRequired) {
      occur = BooleanClause.Occur.MUST;
    } else {
      occur = BooleanClause.Occur.SHOULD;
    }

    BooleanQuery query;
    Set<String> matchedTokens = new HashSet<>();
    String prefixToken = null;

    try (TokenStream ts = queryAnalyzer.tokenStream("", new StringReader(key.toString()))) {
      // long t0 = System.currentTimeMillis();
      ts.reset();
      final CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class);
      final OffsetAttribute offsetAtt = ts.addAttribute(OffsetAttribute.class);
      String lastToken = null;
      query = new BooleanQuery();
      int maxEndOffset = -1;
      matchedTokens = new HashSet<>();
      while (ts.incrementToken()) {
        if (lastToken != null) {
          matchedTokens.add(lastToken);
          query.add(new TermQuery(new Term(TEXT_FIELD_NAME, lastToken)), occur);
        }
        lastToken = termAtt.toString();
        if (lastToken != null) {
          maxEndOffset = Math.max(maxEndOffset, offsetAtt.endOffset());
        }
      }
      ts.end();

      if (lastToken != null) {
        Query lastQuery;
        if (maxEndOffset == offsetAtt.endOffset()) {
          // Use PrefixQuery (or the ngram equivalent) when
          // there was no trailing discarded chars in the
          // string (e.g. whitespace), so that if query does
          // not end with a space we show prefix matches for
          // that token:
          lastQuery = getLastTokenQuery(lastToken);
          prefixToken = lastToken;
        } else {
          // Use TermQuery for an exact match if there were
          // trailing discarded chars (e.g. whitespace), so
          // that if query ends with a space we only show
          // exact matches for that term:
          matchedTokens.add(lastToken);
          lastQuery = new TermQuery(new Term(TEXT_FIELD_NAME, lastToken));
        }
        if (lastQuery != null) {
          query.add(lastQuery, occur);
        }
      }
    }

    // TODO: we could allow blended sort here, combining
    // weight w/ score.  Now we ignore score and sort only
    // by weight:

    // System.out.println("INFIX query=" + query);

    Query finalQuery = finishQuery(query, allTermsRequired);

    // System.out.println("finalQuery=" + query);

    // Sort by weight, descending:
    TopFieldCollector c = TopFieldCollector.create(SORT, num, true, false, false, false);

    // We sorted postings by weight during indexing, so we
    // only retrieve the first num hits now:
    Collector c2 = new EarlyTerminatingSortingCollector(c, SORT, num);
    IndexSearcher searcher = searcherMgr.acquire();
    List<LookupResult> results = null;
    try {
      // System.out.println("got searcher=" + searcher);
      searcher.search(finalQuery, c2);

      TopFieldDocs hits = (TopFieldDocs) c.topDocs();

      // Slower way if postings are not pre-sorted by weight:
      // hits = searcher.search(query, null, num, SORT);
      results = createResults(searcher, hits, num, key, doHighlight, matchedTokens, prefixToken);
    } finally {
      searcherMgr.release(searcher);
    }

    // System.out.println((System.currentTimeMillis() - t0) + " msec for infix suggest");
    // System.out.println(results);

    return results;
  }
 /**
  * Reopens the underlying searcher; it's best to "batch up" many additions/updates, and then call
  * refresh once in the end.
  */
 public void refresh() throws IOException {
   searcherMgr.maybeRefreshBlocking();
 }
예제 #25
0
  @Override
  public List<AdDefinition> search(AdRequest request) throws IOException {
    IndexSearcher searcher = nrt_manager.acquire();
    List<AdDefinition> result = new ArrayList<AdDefinition>();
    try {
      // Collector für die Banner
      AdCollector collector = new AdCollector(searcher.getIndexReader().numDocs());

      // MainQuery
      BooleanQuery mainQuery = new BooleanQuery();
      // Query für den/die BannerTypen
      BooleanQuery typeQuery = new BooleanQuery();
      for (AdType type : request.types()) {
        TermQuery tq = new TermQuery(new Term(AdDBConstants.ADDB_AD_TYPE, type.getType()));
        typeQuery.add(tq, Occur.SHOULD);
      }
      mainQuery.add(typeQuery, Occur.MUST);

      // Query für den/die BannerFormate
      BooleanQuery formatQuery = new BooleanQuery();
      for (AdFormat format : request.formats()) {
        TermQuery tq =
            new TermQuery(new Term(AdDBConstants.ADDB_AD_FORMAT, format.getCompoundName()));
        formatQuery.add(tq, Occur.SHOULD);
      }
      mainQuery.add(formatQuery, Occur.MUST);

      // Query für die Bedingungen unter denen ein Banner angezeigt werden soll
      Query cq = LuceneQueryHelper.getInstance().getConditionalQuery(request, this.addb);
      if (cq != null) {
        mainQuery.add(cq, Occur.MUST);
      }

      /*
       * Es sollen nur Produkte geliefert werden
       */
      if (request.products()) {
        // search online for products
        mainQuery.add(
            new TermQuery(
                new Term(AdDBConstants.ADDB_AD_PRODUCT, AdDBConstants.ADDB_AD_PRODUCT_TRUE)),
            Occur.MUST);

        // if possible add the product name, so online ads for that product will be found
        if (!Strings.isNullOrEmpty(request.product())) {
          mainQuery.add(
              new TermQuery(new Term(AdDBConstants.ADDB_AD_PRODUCT_NAME, request.product())),
              Occur.MUST);
        }

      } else {
        mainQuery.add(
            new TermQuery(
                new Term(AdDBConstants.ADDB_AD_PRODUCT, AdDBConstants.ADDB_AD_PRODUCT_FALSE)),
            Occur.MUST);
      }

      logger.debug(mainQuery.toString());
      System.out.println(mainQuery.toString());

      searcher.search(mainQuery, collector);

      BitSet hits = collector.getHits();
      // Ergebnis
      for (int i = hits.nextSetBit(0); i != -1; i = hits.nextSetBit(i + 1)) {
        Document doc = searcher.doc(i);
        result.add(addb.getBanner(doc.get(AdDBConstants.ADDB_AD_ID)));
      }
    } finally {
      nrt_manager.release(searcher);
    }

    return result;
  }
 /** Refreshes the index readers. */
 @Override
 public void refresh() throws IOException {
   Log.info("%s refreshing readers", logName);
   commit();
   searcherManager.maybeRefreshBlocking();
 }
  @PostConstruct
  private void init() {

    luceneDirectory = propertyHandler.getLuceneDirectory();
    active = luceneDirectory != null;
    if (active) {
      luceneRefreshSeconds = propertyHandler.getLuceneRefreshSeconds() * 1000L;
      luceneCommitCount = propertyHandler.getLuceneCommitCount();
      ei = EntityInfoHandler.getInstance();

      try {
        directory = FSDirectory.open(new File(luceneDirectory));
        logger.debug("Opened FSDirectory with lockid " + directory.getLockID());
        Analyzer analyzer = new IcatAnalyzer(Version.LUCENE_43);
        IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_43, analyzer);
        iwriter = new IndexWriter(directory, config);
        String[] files = directory.listAll();
        if (files.length == 1 && files[0].equals("write.lock")) {
          logger.debug("Directory only has the write.lock file so commit and reopen");
          iwriter.commit();
          iwriter.close();
          iwriter = new IndexWriter(directory, config);
        }

        searcherManager = new SearcherManager(directory, new SearcherFactory());
        isearcher = searcherManager.acquire();
        logger.debug("Got a new IndexSearcher " + isearcher);

        parser = new StandardQueryParser();
        StandardQueryConfigHandler qpConf =
            (StandardQueryConfigHandler) parser.getQueryConfigHandler();
        qpConf.set(ConfigurationKeys.ANALYZER, analyzer);
        qpConf.set(ConfigurationKeys.ALLOW_LEADING_WILDCARD, true);
      } catch (Exception e) {
        StringWriter errors = new StringWriter();
        e.printStackTrace(new PrintWriter(errors));
        logger.fatal(errors.toString());
        if (directory != null) {
          try {
            String lockId = directory.getLockID();
            directory.clearLock(lockId);
            logger.warn("Cleared lock " + lockId);
          } catch (IOException e1) {
            // Ignore
          }
        }
        throw new IllegalStateException(errors.toString());
      }

      timer = new Timer("Lucene");
      timer.schedule(
          new TimerTask() {

            @Override
            public void run() {
              try {
                commit();
              } catch (IcatException e) {
                logger.error(e.getMessage());
              } catch (Throwable t) {
                logger.error(t.getMessage());
              }
            }
          },
          luceneRefreshSeconds,
          luceneRefreshSeconds);

      logger.debug("Created LuceneSingleton");
    }
  }
  // Relies on wall clock time, so it can easily false-fail when the machine is otherwise busy:
  @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/LUCENE-5737")
  // LUCENE-5461
  public void testCRTReopen() throws Exception {
    // test behaving badly

    // should be high enough
    int maxStaleSecs = 20;

    // build crap data just to store it.
    String s = "        abcdefghijklmnopqrstuvwxyz     ";
    char[] chars = s.toCharArray();
    StringBuilder builder = new StringBuilder(2048);
    for (int i = 0; i < 2048; i++) {
      builder.append(chars[random().nextInt(chars.length)]);
    }
    String content = builder.toString();

    final SnapshotDeletionPolicy sdp =
        new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
    final Directory dir = new NRTCachingDirectory(newFSDirectory(createTempDir("nrt")), 5, 128);
    IndexWriterConfig config =
        new IndexWriterConfig(Version.LUCENE_4_6, new MockAnalyzer(random()));
    config.setIndexDeletionPolicy(sdp);
    config.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND);
    final IndexWriter iw = new IndexWriter(dir, config);
    SearcherManager sm = new SearcherManager(iw, true, new SearcherFactory());
    final TrackingIndexWriter tiw = new TrackingIndexWriter(iw);
    ControlledRealTimeReopenThread<IndexSearcher> controlledRealTimeReopenThread =
        new ControlledRealTimeReopenThread<>(tiw, sm, maxStaleSecs, 0);

    controlledRealTimeReopenThread.setDaemon(true);
    controlledRealTimeReopenThread.start();

    List<Thread> commitThreads = new ArrayList<>();

    for (int i = 0; i < 500; i++) {
      if (i > 0 && i % 50 == 0) {
        Thread commitThread =
            new Thread(
                new Runnable() {
                  @Override
                  public void run() {
                    try {
                      iw.commit();
                      IndexCommit ic = sdp.snapshot();
                      for (String name : ic.getFileNames()) {
                        // distribute, and backup
                        // System.out.println(names);
                        assertTrue(slowFileExists(dir, name));
                      }
                    } catch (Exception e) {
                      throw new RuntimeException(e);
                    }
                  }
                });
        commitThread.start();
        commitThreads.add(commitThread);
      }
      Document d = new Document();
      d.add(new TextField("count", i + "", Field.Store.NO));
      d.add(new TextField("content", content, Field.Store.YES));
      long start = System.currentTimeMillis();
      long l = tiw.addDocument(d);
      controlledRealTimeReopenThread.waitForGeneration(l);
      long wait = System.currentTimeMillis() - start;
      assertTrue("waited too long for generation " + wait, wait < (maxStaleSecs * 1000));
      IndexSearcher searcher = sm.acquire();
      TopDocs td = searcher.search(new TermQuery(new Term("count", i + "")), 10);
      sm.release(searcher);
      assertEquals(1, td.totalHits);
    }

    for (Thread commitThread : commitThreads) {
      commitThread.join();
    }

    controlledRealTimeReopenThread.close();
    sm.close();
    iw.close();
    dir.close();
  }
  /*
   * LUCENE-3528 - NRTManager hangs in certain situations
   */
  public void testThreadStarvationNoDeleteNRTReader() throws IOException, InterruptedException {
    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
    conf.setMergePolicy(NoMergePolicy.INSTANCE);
    Directory d = newDirectory();
    final CountDownLatch latch = new CountDownLatch(1);
    final CountDownLatch signal = new CountDownLatch(1);

    LatchedIndexWriter _writer = new LatchedIndexWriter(d, conf, latch, signal);
    final TrackingIndexWriter writer = new TrackingIndexWriter(_writer);
    final SearcherManager manager = new SearcherManager(_writer, false, null);
    Document doc = new Document();
    doc.add(newTextField("test", "test", Field.Store.YES));
    writer.addDocument(doc);
    manager.maybeRefresh();
    Thread t =
        new Thread() {
          @Override
          public void run() {
            try {
              signal.await();
              manager.maybeRefresh();
              writer.deleteDocuments(new TermQuery(new Term("foo", "barista")));
              manager.maybeRefresh(); // kick off another reopen so we inc. the internal gen
            } catch (Exception e) {
              e.printStackTrace();
            } finally {
              latch.countDown(); // let the add below finish
            }
          }
        };
    t.start();
    _writer.waitAfterUpdate = true; // wait in addDocument to let some reopens go through
    final long lastGen =
        writer.updateDocument(
            new Term("foo", "bar"),
            doc); // once this returns the doc is already reflected in the last reopen

    assertFalse(manager.isSearcherCurrent()); // false since there is a delete in the queue

    IndexSearcher searcher = manager.acquire();
    try {
      assertEquals(2, searcher.getIndexReader().numDocs());
    } finally {
      manager.release(searcher);
    }
    final ControlledRealTimeReopenThread<IndexSearcher> thread =
        new ControlledRealTimeReopenThread<>(writer, manager, 0.01, 0.01);
    thread.start(); // start reopening
    if (VERBOSE) {
      System.out.println("waiting now for generation " + lastGen);
    }

    final AtomicBoolean finished = new AtomicBoolean(false);
    Thread waiter =
        new Thread() {
          @Override
          public void run() {
            try {
              thread.waitForGeneration(lastGen);
            } catch (InterruptedException ie) {
              Thread.currentThread().interrupt();
              throw new RuntimeException(ie);
            }
            finished.set(true);
          }
        };
    waiter.start();
    manager.maybeRefresh();
    waiter.join(1000);
    if (!finished.get()) {
      waiter.interrupt();
      fail("thread deadlocked on waitForGeneration");
    }
    thread.close();
    thread.join();
    IOUtils.close(manager, _writer, d);
  }