public ContextIndexSearcher(SearchContext searchContext, Engine.Searcher searcher) {
   super(searcher.reader());
   in = searcher.searcher();
   engineSearcher = searcher;
   setSimilarity(searcher.searcher().getSimilarity(true));
   setQueryCache(searchContext.indexShard().indexService().cache().query());
   setQueryCachingPolicy(searchContext.indexShard().getQueryCachingPolicy());
 }
Exemple #2
0
 private void loadQueries(IndexShard shard) {
   try {
     shard.refresh(new Engine.Refresh("percolator_load_queries").force(true));
     // Maybe add a mode load? This isn't really a write. We need write b/c state=post_recovery
     Engine.Searcher searcher =
         shard.acquireSearcher("percolator_load_queries", IndexShard.Mode.WRITE);
     try {
       Query query =
           new XConstantScoreQuery(
               indexCache
                   .filter()
                   .cache(
                       new TermFilter(
                           new Term(TypeFieldMapper.NAME, PercolatorService.TYPE_NAME))));
       QueriesLoaderCollector queryCollector =
           new QueriesLoaderCollector(
               PercolatorQueriesRegistry.this, logger, mapperService, indexFieldDataService);
       searcher.searcher().search(query, queryCollector);
       Map<HashedBytesRef, Query> queries = queryCollector.queries();
       for (Map.Entry<HashedBytesRef, Query> entry : queries.entrySet()) {
         Query previousQuery = percolateQueries.put(entry.getKey(), entry.getValue());
         shardPercolateService.addedQuery(entry.getKey(), previousQuery, entry.getValue());
       }
     } finally {
       searcher.release();
     }
   } catch (Exception e) {
     throw new PercolatorException(
         shardId.index(), "failed to load queries from percolator index", e);
   }
 }
 @Override
 public boolean matchesSafely(Engine.Searcher searcher) {
   try {
     long count = Lucene.count(searcher.searcher(), query);
     return count == totalHits;
   } catch (IOException e) {
     return false;
   }
 }
  public void run() throws Exception {
    for (Thread t : searcherThreads) {
      t.start();
    }
    for (Thread t : writerThreads) {
      t.start();
    }
    barrier1.await();

    Refresher refresher = new Refresher();
    scheduledExecutorService.scheduleWithFixedDelay(
        refresher, refreshSchedule.millis(), refreshSchedule.millis(), TimeUnit.MILLISECONDS);
    Flusher flusher = new Flusher();
    scheduledExecutorService.scheduleWithFixedDelay(
        flusher, flushSchedule.millis(), flushSchedule.millis(), TimeUnit.MILLISECONDS);

    StopWatch stopWatch = new StopWatch();
    stopWatch.start();
    barrier2.await();

    latch.await();
    stopWatch.stop();

    System.out.println("Summary");
    System.out.println(
        "   -- Readers ["
            + searcherThreads.length
            + "] with ["
            + searcherIterations
            + "] iterations");
    System.out.println(
        "   -- Writers [" + writerThreads.length + "] with [" + writerIterations + "] iterations");
    System.out.println("   -- Took: " + stopWatch.totalTime());
    System.out.println(
        "   -- Refresh [" + refresher.id + "] took: " + refresher.stopWatch.totalTime());
    System.out.println("   -- Flush [" + flusher.id + "] took: " + flusher.stopWatch.totalTime());
    System.out.println("   -- Store size " + store.estimateSize());

    scheduledExecutorService.shutdown();

    engine.refresh(new Engine.Refresh(true));
    stopWatch = new StopWatch();
    stopWatch.start();
    Engine.Searcher searcher = engine.searcher();
    TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), idGenerator.get() + 1);
    stopWatch.stop();
    System.out.println(
        "   -- Indexed ["
            + idGenerator.get()
            + "] docs, found ["
            + topDocs.totalHits
            + "] hits, took "
            + stopWatch.totalTime());
    searcher.release();
  }
 @Override
 public void run() {
   try {
     barrier1.await();
     barrier2.await();
     for (int i = 0; i < searcherIterations; i++) {
       Engine.Searcher searcher = engine.searcher();
       TopDocs topDocs =
           searcher.searcher().search(new TermQuery(new Term("content", content(i))), 10);
       // read one
       searcher.searcher().doc(topDocs.scoreDocs[0].doc, new LoadFirstFieldSelector());
       searcher.release();
     }
   } catch (Exception e) {
     System.out.println("Searcher thread failed");
     e.printStackTrace();
   } finally {
     latch.countDown();
   }
 }
  @Override
  protected ShardValidateQueryResponse shardOperation(ShardValidateQueryRequest request) {
    IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex());
    IndexShard indexShard = indexService.getShard(request.shardId().id());
    final QueryShardContext queryShardContext = indexService.newQueryShardContext();
    queryShardContext.setTypes(request.types());

    boolean valid;
    String explanation = null;
    String error = null;
    Engine.Searcher searcher = indexShard.acquireSearcher("validate_query");

    DefaultSearchContext searchContext =
        new DefaultSearchContext(
            0,
            new ShardSearchLocalRequest(
                request.types(), request.nowInMillis(), request.filteringAliases()),
            null,
            searcher,
            indexService,
            indexShard,
            scriptService,
            pageCacheRecycler,
            bigArrays,
            threadPool.estimatedTimeInMillisCounter(),
            parseFieldMatcher,
            SearchService.NO_TIMEOUT);
    SearchContext.setCurrent(searchContext);
    try {
      searchContext.parsedQuery(queryShardContext.toQuery(request.query()));
      searchContext.preProcess();

      valid = true;
      if (request.rewrite()) {
        explanation = getRewrittenQuery(searcher.searcher(), searchContext.query());
      } else if (request.explain()) {
        explanation = searchContext.filteredQuery().query().toString();
      }
    } catch (QueryShardException | ParsingException e) {
      valid = false;
      error = e.getDetailedMessage();
    } catch (AssertionError | IOException e) {
      valid = false;
      error = e.getMessage();
    } finally {
      searchContext.close();
      SearchContext.removeCurrent();
    }

    return new ShardValidateQueryResponse(request.shardId(), valid, explanation, error);
  }
 private UidField.DocIdAndVersion loadCurrentVersionFromIndex(
     BloomCache bloomCache, Engine.Searcher searcher, Term uid) {
   UnicodeUtil.UTF8Result utf8 = Unicode.fromStringAsUtf8(uid.text());
   for (IndexReader reader : searcher.searcher().subReaders()) {
     BloomFilter filter = bloomCache.filter(reader, UidFieldMapper.NAME, true);
     // we know that its not there...
     if (!filter.isPresent(utf8.result, 0, utf8.length)) {
       continue;
     }
     UidField.DocIdAndVersion docIdAndVersion = UidField.loadDocIdAndVersion(reader, uid);
     // either -2 (its there, but no version associated), or an actual version
     if (docIdAndVersion.docId != -1) {
       return docIdAndVersion;
     }
   }
   return null;
 }
 private void loadQueries(String indexName) {
   IndexService indexService = percolatorIndexService();
   IndexShard shard = indexService.shard(0);
   Engine.Searcher searcher = shard.searcher();
   try {
     // create a query to fetch all queries that are registered under the index name (which is the
     // type
     // in the percolator).
     Query query = new DeletionAwareConstantScoreQuery(indexQueriesFilter(indexName));
     QueriesLoaderCollector queries = new QueriesLoaderCollector();
     searcher.searcher().search(query, queries);
     percolator.addQueries(queries.queries());
   } catch (IOException e) {
     throw new PercolatorException(index, "failed to load queries from percolator index");
   } finally {
     searcher.release();
   }
 }
  public void initialize(Engine.Searcher docSearcher, ParsedDocument parsedDocument) {
    this.docSearcher = docSearcher;

    IndexReader indexReader = docSearcher.reader();
    LeafReaderContext atomicReaderContext = indexReader.leaves().get(0);
    LeafSearchLookup leafLookup = lookup().getLeafSearchLookup(atomicReaderContext);
    leafLookup.setDocument(0);
    leafLookup.source().setSource(parsedDocument.source());

    Map<String, SearchHitField> fields = new HashMap<>();
    for (IndexableField field : parsedDocument.rootDoc().getFields()) {
      fields.put(field.name(), new InternalSearchHitField(field.name(), Collections.emptyList()));
    }
    hitContext()
        .reset(
            new InternalSearchHit(0, "unknown", new StringText(parsedDocument.type()), fields),
            atomicReaderContext,
            0,
            docSearcher.searcher());
  }
  private void queryBasedPercolating(
      Engine.Searcher percolatorSearcher,
      PercolateContext context,
      QueryCollector percolateCollector)
      throws IOException {
    Filter percolatorTypeFilter =
        context.indexService().mapperService().documentMapper(TYPE_NAME).typeFilter();
    percolatorTypeFilter = context.indexService().cache().filter().cache(percolatorTypeFilter);
    FilteredQuery query = new FilteredQuery(context.percolateQuery(), percolatorTypeFilter);
    percolatorSearcher.searcher().search(query, percolateCollector);

    for (Collector queryCollector : percolateCollector.facetAndAggregatorCollector) {
      if (queryCollector instanceof XCollector) {
        ((XCollector) queryCollector).postCollection();
      }
    }
    if (context.facets() != null) {
      facetPhase.execute(context);
    }
    if (context.aggregations() != null) {
      aggregationPhase.execute(context);
    }
  }
  private void purgeShards(List<IndexShard> shardsToPurge) {
    for (IndexShard shardToPurge : shardsToPurge) {
      Query query =
          shardToPurge
              .mapperService()
              .fullName(TTLFieldMapper.NAME)
              .rangeQuery(null, System.currentTimeMillis(), false, true);
      Engine.Searcher searcher = shardToPurge.acquireSearcher("indices_ttl");
      try {
        logger.debug(
            "[{}][{}] purging shard",
            shardToPurge.routingEntry().index(),
            shardToPurge.routingEntry().id());
        ExpiredDocsCollector expiredDocsCollector = new ExpiredDocsCollector();
        searcher.searcher().search(query, expiredDocsCollector);
        List<DocToPurge> docsToPurge = expiredDocsCollector.getDocsToPurge();

        BulkRequest bulkRequest = new BulkRequest();
        for (DocToPurge docToPurge : docsToPurge) {

          bulkRequest.add(
              new DeleteRequest()
                  .index(shardToPurge.routingEntry().getIndexName())
                  .type(docToPurge.type)
                  .id(docToPurge.id)
                  .version(docToPurge.version)
                  .routing(docToPurge.routing));
          bulkRequest = processBulkIfNeeded(bulkRequest, false);
        }
        processBulkIfNeeded(bulkRequest, true);
      } catch (Exception e) {
        logger.warn("failed to purge", e);
      } finally {
        searcher.close();
      }
    }
  }
 public IndexSearcher docSearcher() {
   return docSearcher.searcher();
 }
 public ContextIndexSearcher(SearchContext searchContext, Engine.Searcher searcher) {
   super(searcher.reader());
   in = searcher.searcher();
   this.searchContext = searchContext;
   setSimilarity(searcher.searcher().getSimilarity());
 }