@Override
  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
    if (needsScores == false && minScore == null) {
      return subQuery.createWeight(searcher, needsScores);
    }

    boolean subQueryNeedsScores = combineFunction != CombineFunction.REPLACE;
    Weight[] filterWeights = new Weight[filterFunctions.length];
    for (int i = 0; i < filterFunctions.length; ++i) {
      subQueryNeedsScores |= filterFunctions[i].function.needsScores();
      filterWeights[i] = searcher.createNormalizedWeight(filterFunctions[i].filter, false);
    }
    Weight subQueryWeight = subQuery.createWeight(searcher, subQueryNeedsScores);
    return new CustomBoostFactorWeight(this, subQueryWeight, filterWeights, subQueryNeedsScores);
  }
 @Override
 public Weight createWeight(IndexSearcher searcher) throws IOException {
   if (uidToScore == null) {
     throw new ElasticSearchIllegalStateException("has_parent query hasn't executed properly");
   }
   return new ChildWeight(rewrittenParentQuery.createWeight(searcher));
 }
  @Override
  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
    final Weight originalWeight = originalQuery.createWeight(searcher, needsScores);
    return new Weight(TermsIncludingScoreQuery.this) {

      @Override
      public void extractTerms(Set<Term> terms) {}

      @Override
      public Explanation explain(LeafReaderContext context, int doc) throws IOException {
        Terms terms = context.reader().terms(field);
        if (terms != null) {
          TermsEnum segmentTermsEnum = terms.iterator();
          BytesRef spare = new BytesRef();
          PostingsEnum postingsEnum = null;
          for (int i = 0; i < TermsIncludingScoreQuery.this.terms.size(); i++) {
            if (segmentTermsEnum.seekExact(
                TermsIncludingScoreQuery.this.terms.get(ords[i], spare))) {
              postingsEnum = segmentTermsEnum.postings(postingsEnum, PostingsEnum.NONE);
              if (postingsEnum.advance(doc) == doc) {
                final float score = TermsIncludingScoreQuery.this.scores[ords[i]];
                return Explanation.match(
                    score, "Score based on join value " + segmentTermsEnum.term().utf8ToString());
              }
            }
          }
        }
        return Explanation.noMatch("Not a match");
      }

      @Override
      public float getValueForNormalization() throws IOException {
        return originalWeight.getValueForNormalization();
      }

      @Override
      public void normalize(float norm, float boost) {
        originalWeight.normalize(norm, boost);
      }

      @Override
      public Scorer scorer(LeafReaderContext context) throws IOException {
        Terms terms = context.reader().terms(field);
        if (terms == null) {
          return null;
        }

        // what is the runtime...seems ok?
        final long cost = context.reader().maxDoc() * terms.size();

        TermsEnum segmentTermsEnum = terms.iterator();
        if (multipleValuesPerDocument) {
          return new MVInOrderScorer(this, segmentTermsEnum, context.reader().maxDoc(), cost);
        } else {
          return new SVInOrderScorer(this, segmentTermsEnum, context.reader().maxDoc(), cost);
        }
      }
    };
  }
 public ReRankWeight(
     Query mainQuery, Query reRankQuery, double reRankWeight, IndexSearcher searcher)
     throws IOException {
   this.reRankQuery = reRankQuery;
   this.searcher = searcher;
   this.reRankWeight = reRankWeight;
   this.mainWeight = mainQuery.createWeight(searcher);
 }
 @Override
 public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
   return new BlockJoinWeight(
       this,
       childQuery.createWeight(searcher, needsScores),
       parentsFilter,
       needsScores ? scoreMode : ScoreMode.None);
 }
  @Override
  public Weight createWeight(IndexSearcher searcher) throws IOException {
    Recycler.V<ObjectObjectOpenHashMap<Object, ParentDoc[]>> parentDocs = cacheRecycler.hashMap(-1);
    SearchContext searchContext = SearchContext.current();
    searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());

    int parentHitsResolved;
    int requestedDocs = (searchContext.from() + searchContext.size());
    if (requestedDocs <= 0) {
      requestedDocs = 1;
    }
    int numChildDocs = requestedDocs * factor;

    Query childQuery;
    if (rewrittenChildQuery == null) {
      childQuery = rewrittenChildQuery = searcher.rewrite(originalChildQuery);
    } else {
      assert rewriteIndexReader == searcher.getIndexReader();
      childQuery = rewrittenChildQuery;
    }

    IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
    while (true) {
      parentDocs.v().clear();
      TopDocs topChildDocs = indexSearcher.search(childQuery, numChildDocs);
      parentHitsResolved = resolveParentDocuments(topChildDocs, searchContext, parentDocs);

      // check if we found enough docs, if so, break
      if (parentHitsResolved >= requestedDocs) {
        break;
      }
      // if we did not find enough docs, check if it make sense to search further
      if (topChildDocs.totalHits <= numChildDocs) {
        break;
      }
      // if not, update numDocs, and search again
      numChildDocs *= incrementalFactor;
      if (numChildDocs > topChildDocs.totalHits) {
        numChildDocs = topChildDocs.totalHits;
      }
    }

    ParentWeight parentWeight =
        new ParentWeight(rewrittenChildQuery.createWeight(searcher), parentDocs);
    searchContext.addReleasable(parentWeight);
    return parentWeight;
  }
 private Map<String, Query> getSingleTermQueries(
     String termQuery, TokenType tokenType, boolean collapse) throws IOException {
   Map<String, Query> queriesMap = new HashMap<String, Query>();
   if (termQuery.contains(WILDCARD_ASTERISK)
       || termQuery.contains(WILDCARD_QUESTION)) { // contains a wildcard
     Term term = getTerm(termQuery, tokenType);
     Query query = getWildCardQuery(term);
     if (collapse) { // treat all wildcard variants as a single term
       queriesMap.put(termQuery, query);
     } else { // separate each wildcard term into its own query
       Set<Term> terms = new HashSet<Term>();
       Weight weight = query.createWeight(indexSearcher, false);
       weight.extractTerms(terms);
       for (Term t : terms) {
         // we don't need to analyze term here since it's already from the index
         queriesMap.put(t.text(), getTermQuery(t));
       }
     }
   } else { // regular term (we hope)
     Term term = getAnalyzedTerm(tokenType, termQuery); // analyze it first
     queriesMap.put(termQuery, getTermQuery(term));
   }
   return queriesMap;
 }
  @Override
  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
    final Weight indexQueryWeight =
        indexQuery.createWeight(searcher, false); // scores aren't unsupported
    final Map valueSourceContext = ValueSource.newContext(searcher);

    return new ConstantScoreWeight(this) {

      @Override
      public Scorer scorer(LeafReaderContext context) throws IOException {

        final Scorer indexQueryScorer = indexQueryWeight.scorer(context);
        if (indexQueryScorer == null) {
          return null;
        }

        final FunctionValues predFuncValues =
            predicateValueSource.getValues(valueSourceContext, context);

        final TwoPhaseIterator twoPhaseIterator =
            new TwoPhaseIterator(indexQueryScorer.iterator()) {
              @Override
              public boolean matches() throws IOException {
                return predFuncValues.boolVal(indexQueryScorer.docID());
              }

              @Override
              public float matchCost() {
                return 100; // TODO: use cost of predFuncValues.boolVal()
              }
            };

        return new ConstantScoreScorer(this, score(), twoPhaseIterator);
      }
    };
  }
 @Override
 public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
   return new W(this, toQuery.createWeight(searcher, false));
 }
 /**
  * Construct the Weight for this Query searched by searcher. Recursively construct subquery
  * weights.
  */
 public DisjunctionMaxWeight(IndexSearcher searcher) throws IOException {
   for (Query disjunctQuery : disjuncts) {
     weights.add(disjunctQuery.createWeight(searcher));
   }
 }
Exemple #11
0
  @Override
  public Weight createWeight(IndexSearcher searcher) throws IOException {
    final Weight weight = query.createWeight(searcher);
    return new Weight() {
      @Override
      public String toString() {
        return "no-zero:" + weight.toString();
      }

      @Override
      public Query getQuery() {
        return weight.getQuery();
      }

      @Override
      public float getValueForNormalization() throws IOException {
        return weight.getValueForNormalization();
      }

      @Override
      public void normalize(float queryNorm, float topLevelBoost) {
        weight.normalize(queryNorm, topLevelBoost);
      }

      @Override
      public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
        return weight.explain(context, doc);
      }

      @Override
      public Scorer scorer(AtomicReaderContext context, Bits acceptDocs) throws IOException {
        final Scorer scorer = weight.scorer(context, acceptDocs);
        if (scorer == null) return scorer;
        return new Scorer(weight) {
          float current_score = -1f;

          @Override
          public int docID() {
            return scorer.docID();
          }

          @Override
          public int freq() throws IOException {
            return scorer.freq();
          }

          @Override
          public int nextDoc() throws IOException {
            while (true) {
              int n = scorer.nextDoc();
              if (n == DocIdSetIterator.NO_MORE_DOCS) return n;
              current_score = scorer.score();
              if (current_score != 0) return n;
            }
          }

          @Override
          public int advance(int target) throws IOException {
            int n = scorer.advance(target);
            if (n == DocIdSetIterator.NO_MORE_DOCS) return n;
            current_score = scorer.score();
            if (current_score != 0) return n;
            // if the score is 0, we just return the next non-zero next doc
            return nextDoc();
          }

          @Override
          public float score() throws IOException {
            return current_score;
          }

          @Override
          public long cost() {
            return scorer.cost();
          }

          @Override
          public String toString() {
            return "no-zero:" + scorer.toString();
          }
        };
      }
    };
  }
 public Weight createWeight(IndexSearcher indexSearcher, boolean needsScores)
     throws IOException {
   return q.createWeight(indexSearcher, needsScores);
 }
  @Override
  public Weight createWeight(IndexSearcher searcher) throws IOException {
    SearchContext searchContext = SearchContext.current();
    assert rewrittenChildQuery != null;
    assert rewriteIndexReader == searcher.getIndexReader()
        : "not equal, rewriteIndexReader="
            + rewriteIndexReader
            + " searcher.getIndexReader()="
            + searcher.getIndexReader();
    final Query childQuery = rewrittenChildQuery;

    IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
    indexSearcher.setSimilarity(searcher.getSimilarity());

    final BytesRefHash parentIds;
    final FloatArray scores;
    final IntArray occurrences;
    switch (scoreType) {
      case MAX:
        MaxCollector maxCollector =
            new MaxCollector(parentChildIndexFieldData, parentType, searchContext);
        try {
          indexSearcher.search(childQuery, maxCollector);
          parentIds = maxCollector.parentIds;
          scores = maxCollector.scores;
          occurrences = null;
        } finally {
          Releasables.release(maxCollector.parentIdsIndex);
        }
        break;
      case SUM:
        SumCollector sumCollector =
            new SumCollector(parentChildIndexFieldData, parentType, searchContext);
        try {
          indexSearcher.search(childQuery, sumCollector);
          parentIds = sumCollector.parentIds;
          scores = sumCollector.scores;
          occurrences = null;
        } finally {
          Releasables.release(sumCollector.parentIdsIndex);
        }
        break;
      case AVG:
        AvgCollector avgCollector =
            new AvgCollector(parentChildIndexFieldData, parentType, searchContext);
        try {
          indexSearcher.search(childQuery, avgCollector);
          parentIds = avgCollector.parentIds;
          scores = avgCollector.scores;
          occurrences = avgCollector.occurrences;
        } finally {
          Releasables.release(avgCollector.parentIdsIndex);
        }
        break;
      default:
        throw new RuntimeException("Are we missing a score type here? -- " + scoreType);
    }

    int size = (int) parentIds.size();
    if (size == 0) {
      Releasables.release(parentIds, scores, occurrences);
      return Queries.newMatchNoDocsQuery().createWeight(searcher);
    }

    final Filter parentFilter;
    if (size == 1) {
      BytesRef id = parentIds.get(0, new BytesRef());
      if (nonNestedDocsFilter != null) {
        List<Filter> filters =
            Arrays.asList(
                new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))),
                nonNestedDocsFilter);
        parentFilter = new AndFilter(filters);
      } else {
        parentFilter =
            new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id)));
      }
    } else if (size <= shortCircuitParentDocSet) {
      parentFilter = new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds);
    } else {
      parentFilter = new ApplyAcceptedDocsFilter(this.parentFilter);
    }
    ParentWeight parentWeight =
        new ParentWeight(
            rewrittenChildQuery.createWeight(searcher),
            parentFilter,
            size,
            parentIds,
            scores,
            occurrences);
    searchContext.addReleasable(parentWeight);
    return parentWeight;
  }
  @Override
  public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
    if (needsScores == false) {
      return match.createWeight(searcher, needsScores);
    }
    final Weight matchWeight = searcher.createWeight(match, needsScores);
    final Weight contextWeight = searcher.createWeight(context, false);
    return new Weight(this) {

      @Override
      public void extractTerms(Set<Term> terms) {
        matchWeight.extractTerms(terms);
        if (boost >= 1) {
          contextWeight.extractTerms(terms);
        }
      }

      @Override
      public Explanation explain(LeafReaderContext context, int doc) throws IOException {
        final Explanation matchExplanation = matchWeight.explain(context, doc);
        final Explanation contextExplanation = contextWeight.explain(context, doc);
        if (matchExplanation.isMatch() == false || contextExplanation.isMatch() == false) {
          return matchExplanation;
        }
        return Explanation.match(
            matchExplanation.getValue() * boost,
            "product of:",
            matchExplanation,
            Explanation.match(boost, "boost"));
      }

      @Override
      public float getValueForNormalization() throws IOException {
        return matchWeight.getValueForNormalization();
      }

      @Override
      public void normalize(float norm, float boost) {
        matchWeight.normalize(norm, boost);
      }

      @Override
      public Scorer scorer(LeafReaderContext context) throws IOException {
        final Scorer matchScorer = matchWeight.scorer(context);
        if (matchScorer == null) {
          return null;
        }
        final Scorer contextScorer = contextWeight.scorer(context);
        if (contextScorer == null) {
          return matchScorer;
        }
        TwoPhaseIterator contextTwoPhase = contextScorer.twoPhaseIterator();
        DocIdSetIterator contextApproximation =
            contextTwoPhase == null ? contextScorer.iterator() : contextTwoPhase.approximation();
        return new FilterScorer(matchScorer) {
          @Override
          public float score() throws IOException {
            if (contextApproximation.docID() < docID()) {
              contextApproximation.advance(docID());
            }
            assert contextApproximation.docID() >= docID();
            float score = super.score();
            if (contextApproximation.docID() == docID()
                && (contextTwoPhase == null || contextTwoPhase.matches())) {
              score *= boost;
            }
            return score;
          }
        };
      }
    };
  }
Exemple #15
0
  @Override
  public Weight createWeight(IndexSearcher searcher) throws IOException {
    SearchContext searchContext = SearchContext.current();
    searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());

    Recycler.V<ObjectFloatOpenHashMap<HashedBytesArray>> uidToScore =
        searchContext.cacheRecycler().objectFloatMap(-1);
    Recycler.V<ObjectIntOpenHashMap<HashedBytesArray>> uidToCount = null;

    final Collector collector;
    switch (scoreType) {
      case AVG:
        uidToCount = searchContext.cacheRecycler().objectIntMap(-1);
        collector =
            new AvgChildUidCollector(
                scoreType, searchContext, parentType, uidToScore.v(), uidToCount.v());
        break;
      default:
        collector = new ChildUidCollector(scoreType, searchContext, parentType, uidToScore.v());
    }
    final Query childQuery;
    if (rewrittenChildQuery == null) {
      childQuery = rewrittenChildQuery = searcher.rewrite(originalChildQuery);
    } else {
      assert rewriteIndexReader == searcher.getIndexReader();
      childQuery = rewrittenChildQuery;
    }
    IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
    indexSearcher.search(childQuery, collector);

    int size = uidToScore.v().size();
    if (size == 0) {
      uidToScore.release();
      if (uidToCount != null) {
        uidToCount.release();
      }
      return Queries.newMatchNoDocsQuery().createWeight(searcher);
    }

    final Filter parentFilter;
    if (size == 1) {
      BytesRef id = uidToScore.v().keys().iterator().next().value.toBytesRef();
      if (nonNestedDocsFilter != null) {
        List<Filter> filters =
            Arrays.asList(
                new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))),
                nonNestedDocsFilter);
        parentFilter = new AndFilter(filters);
      } else {
        parentFilter =
            new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id)));
      }
    } else if (size <= shortCircuitParentDocSet) {
      parentFilter =
          new ParentIdsFilter(
              parentType, uidToScore.v().keys, uidToScore.v().allocated, nonNestedDocsFilter);
    } else {
      parentFilter = new ApplyAcceptedDocsFilter(this.parentFilter);
    }
    ParentWeight parentWeight =
        new ParentWeight(
            rewrittenChildQuery.createWeight(searcher),
            parentFilter,
            searchContext,
            size,
            uidToScore,
            uidToCount);
    searchContext.addReleasable(parentWeight);
    return parentWeight;
  }