private void doTest(Random random, PrintWriter out, boolean useCompoundFiles, int MAX_DOCS) throws Exception { Directory directory = newDirectory(); Analyzer analyzer = new MockAnalyzer(random); IndexWriterConfig conf = newIndexWriterConfig(analyzer); final MergePolicy mp = conf.getMergePolicy(); mp.setNoCFSRatio(useCompoundFiles ? 1.0 : 0.0); IndexWriter writer = new IndexWriter(directory, conf); if (VERBOSE) { System.out.println("TEST: now build index MAX_DOCS=" + MAX_DOCS); } for (int j = 0; j < MAX_DOCS; j++) { Document d = new Document(); d.add(newTextField(PRIORITY_FIELD, HIGH_PRIORITY, Field.Store.YES)); d.add(newTextField(ID_FIELD, Integer.toString(j), Field.Store.YES)); writer.addDocument(d); } writer.close(); // try a search without OR IndexReader reader = DirectoryReader.open(directory); IndexSearcher searcher = newSearcher(reader); Query query = new TermQuery(new Term(PRIORITY_FIELD, HIGH_PRIORITY)); out.println("Query: " + query.toString(PRIORITY_FIELD)); if (VERBOSE) { System.out.println("TEST: search query=" + query); } final Sort sort = new Sort(SortField.FIELD_SCORE, new SortField(ID_FIELD, SortField.Type.INT)); ScoreDoc[] hits = searcher.search(query, null, MAX_DOCS, sort).scoreDocs; printHits(out, hits, searcher); checkHits(hits, MAX_DOCS, searcher); // try a new search with OR searcher = newSearcher(reader); hits = null; BooleanQuery booleanQuery = new BooleanQuery(); booleanQuery.add( new TermQuery(new Term(PRIORITY_FIELD, HIGH_PRIORITY)), BooleanClause.Occur.SHOULD); booleanQuery.add( new TermQuery(new Term(PRIORITY_FIELD, MED_PRIORITY)), BooleanClause.Occur.SHOULD); out.println("Query: " + booleanQuery.toString(PRIORITY_FIELD)); hits = searcher.search(booleanQuery, null, MAX_DOCS, sort).scoreDocs; printHits(out, hits, searcher); checkHits(hits, MAX_DOCS, searcher); reader.close(); directory.close(); }
@Override public int hashCode() { int result = originalChildQuery.hashCode(); result = 31 * result + parentType.hashCode(); result = 31 * result + incrementalFactor; result = 31 * result + Float.floatToIntBits(getBoost()); return result; }
public String toString(String field) { StringBuilder sb = new StringBuilder(); sb.append("score_child[") .append(childType) .append("/") .append(parentType) .append("](") .append(originalChildQuery.toString(field)) .append(')'); sb.append(ToStringUtils.boost(getBoost())); return sb.toString(); }
// Rewrite invocation logic: // 1) query_then_fetch (default): Rewrite is execute as part of the createWeight invocation, when // search child docs. // 2) dfs_query_then_fetch:: First rewrite and then createWeight is executed. During query phase // rewrite isn't // executed any more because searchContext#queryRewritten() returns true. @Override public Query rewrite(IndexReader reader) throws IOException { if (rewrittenChildQuery == null) { rewrittenChildQuery = originalChildQuery.rewrite(reader); rewriteIndexReader = reader; } // We can always return the current instance, and we can do this b/c the child query is executed // separately // before the main query (other scope) in a different IS#search() invocation than the main // query. // In fact we only need override the rewrite method because for the dfs phase, to get also // global document // frequency for the child query. return this; }
@Override public Weight createWeight(IndexSearcher searcher) throws IOException { Recycler.V<ObjectObjectOpenHashMap<Object, ParentDoc[]>> parentDocs = cacheRecycler.hashMap(-1); SearchContext searchContext = SearchContext.current(); searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves()); int parentHitsResolved; int requestedDocs = (searchContext.from() + searchContext.size()); if (requestedDocs <= 0) { requestedDocs = 1; } int numChildDocs = requestedDocs * factor; Query childQuery; if (rewrittenChildQuery == null) { childQuery = rewrittenChildQuery = searcher.rewrite(originalChildQuery); } else { assert rewriteIndexReader == searcher.getIndexReader(); childQuery = rewrittenChildQuery; } IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader()); while (true) { parentDocs.v().clear(); TopDocs topChildDocs = indexSearcher.search(childQuery, numChildDocs); parentHitsResolved = resolveParentDocuments(topChildDocs, searchContext, parentDocs); // check if we found enough docs, if so, break if (parentHitsResolved >= requestedDocs) { break; } // if we did not find enough docs, check if it make sense to search further if (topChildDocs.totalHits <= numChildDocs) { break; } // if not, update numDocs, and search again numChildDocs *= incrementalFactor; if (numChildDocs > topChildDocs.totalHits) { numChildDocs = topChildDocs.totalHits; } } ParentWeight parentWeight = new ParentWeight(rewrittenChildQuery.createWeight(searcher), parentDocs); searchContext.addReleasable(parentWeight); return parentWeight; }
@Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || obj.getClass() != this.getClass()) { return false; } TopChildrenQuery that = (TopChildrenQuery) obj; if (!originalChildQuery.equals(that.originalChildQuery)) { return false; } if (!childType.equals(that.childType)) { return false; } if (incrementalFactor != that.incrementalFactor) { return false; } if (getBoost() != that.getBoost()) { return false; } return true; }
@Override public Weight createWeight(IndexSearcher searcher) throws IOException { final Weight weight = query.createWeight(searcher); return new Weight() { @Override public String toString() { return "no-zero:" + weight.toString(); } @Override public Query getQuery() { return weight.getQuery(); } @Override public float getValueForNormalization() throws IOException { return weight.getValueForNormalization(); } @Override public void normalize(float queryNorm, float topLevelBoost) { weight.normalize(queryNorm, topLevelBoost); } @Override public Explanation explain(AtomicReaderContext context, int doc) throws IOException { return weight.explain(context, doc); } @Override public Scorer scorer(AtomicReaderContext context, Bits acceptDocs) throws IOException { final Scorer scorer = weight.scorer(context, acceptDocs); if (scorer == null) return scorer; return new Scorer(weight) { float current_score = -1f; @Override public int docID() { return scorer.docID(); } @Override public int freq() throws IOException { return scorer.freq(); } @Override public int nextDoc() throws IOException { while (true) { int n = scorer.nextDoc(); if (n == DocIdSetIterator.NO_MORE_DOCS) return n; current_score = scorer.score(); if (current_score != 0) return n; } } @Override public int advance(int target) throws IOException { int n = scorer.advance(target); if (n == DocIdSetIterator.NO_MORE_DOCS) return n; current_score = scorer.score(); if (current_score != 0) return n; // if the score is 0, we just return the next non-zero next doc return nextDoc(); } @Override public float score() throws IOException { return current_score; } @Override public long cost() { return scorer.cost(); } @Override public String toString() { return "no-zero:" + scorer.toString(); } }; } }; }
@Override public String toString(String field) { return "no-zero:" + query.toString(field); }
@Override public void extractTerms(Set<Term> terms) { rewrittenChildQuery.extractTerms(terms); }