@Override public Weight createWeight(IndexSearcher searcher) throws IOException { Recycler.V<ObjectObjectOpenHashMap<Object, ParentDoc[]>> parentDocs = cacheRecycler.hashMap(-1); SearchContext searchContext = SearchContext.current(); searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves()); int parentHitsResolved; int requestedDocs = (searchContext.from() + searchContext.size()); if (requestedDocs <= 0) { requestedDocs = 1; } int numChildDocs = requestedDocs * factor; Query childQuery; if (rewrittenChildQuery == null) { childQuery = rewrittenChildQuery = searcher.rewrite(originalChildQuery); } else { assert rewriteIndexReader == searcher.getIndexReader(); childQuery = rewrittenChildQuery; } IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader()); while (true) { parentDocs.v().clear(); TopDocs topChildDocs = indexSearcher.search(childQuery, numChildDocs); parentHitsResolved = resolveParentDocuments(topChildDocs, searchContext, parentDocs); // check if we found enough docs, if so, break if (parentHitsResolved >= requestedDocs) { break; } // if we did not find enough docs, check if it make sense to search further if (topChildDocs.totalHits <= numChildDocs) { break; } // if not, update numDocs, and search again numChildDocs *= incrementalFactor; if (numChildDocs > topChildDocs.totalHits) { numChildDocs = topChildDocs.totalHits; } } ParentWeight parentWeight = new ParentWeight(rewrittenChildQuery.createWeight(searcher), parentDocs); searchContext.addReleasable(parentWeight); return parentWeight; }
@Override public Weight createWeight(IndexSearcher searcher) throws IOException { final Weight weight = query.createWeight(searcher); return new Weight() { @Override public String toString() { return "no-zero:" + weight.toString(); } @Override public Query getQuery() { return weight.getQuery(); } @Override public float getValueForNormalization() throws IOException { return weight.getValueForNormalization(); } @Override public void normalize(float queryNorm, float topLevelBoost) { weight.normalize(queryNorm, topLevelBoost); } @Override public Explanation explain(AtomicReaderContext context, int doc) throws IOException { return weight.explain(context, doc); } @Override public Scorer scorer(AtomicReaderContext context, Bits acceptDocs) throws IOException { final Scorer scorer = weight.scorer(context, acceptDocs); if (scorer == null) return scorer; return new Scorer(weight) { float current_score = -1f; @Override public int docID() { return scorer.docID(); } @Override public int freq() throws IOException { return scorer.freq(); } @Override public int nextDoc() throws IOException { while (true) { int n = scorer.nextDoc(); if (n == DocIdSetIterator.NO_MORE_DOCS) return n; current_score = scorer.score(); if (current_score != 0) return n; } } @Override public int advance(int target) throws IOException { int n = scorer.advance(target); if (n == DocIdSetIterator.NO_MORE_DOCS) return n; current_score = scorer.score(); if (current_score != 0) return n; // if the score is 0, we just return the next non-zero next doc return nextDoc(); } @Override public float score() throws IOException { return current_score; } @Override public long cost() { return scorer.cost(); } @Override public String toString() { return "no-zero:" + scorer.toString(); } }; } }; }