コード例 #1
0
 ImageHashScorer(Weight weight, BitSet bitSet, AtomicReaderContext context, Bits liveDocs) {
   super(
       weight,
       luceneFieldName,
       lireFeature,
       context.reader(),
       ImageHashLimitQuery.this.getBoost());
   this.bitSet = bitSet;
   this.liveDocs = liveDocs;
   maxDoc = context.reader().maxDoc();
   docBase = context.docBase;
 }
コード例 #2
0
    @Override
    public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
      Scorer scorer = scorer(context, context.reader().getLiveDocs());
      if (scorer != null) {
        int newDoc = scorer.advance(doc);
        if (newDoc == doc) {
          float score = scorer.score();
          ComplexExplanation result = new ComplexExplanation();
          result.setDescription("ImageHashLimitQuery, product of:");
          result.setValue(score);
          if (getBoost() != 1.0f) {
            result.addDetail(new Explanation(getBoost(), "boost"));
            score = score / getBoost();
          }
          result.addDetail(new Explanation(score, "image score (1/distance)"));
          result.setMatch(true);
          return result;
        }
      }

      return new ComplexExplanation(false, 0.0f, "no matching term");
    }
コード例 #3
0
 @Override
 public Scorer scorer(
     AtomicReaderContext context, boolean scoreDocsInOrder, boolean topScorer, Bits acceptDocs)
     throws IOException {
   ParentDoc[] readerParentDocs = parentDocs.v().get(context.reader().getCoreCacheKey());
   if (readerParentDocs != null) {
     if (scoreType == ScoreType.MAX) {
       return new ParentScorer(this, readerParentDocs) {
         @Override
         public float score() throws IOException {
           assert doc.docId >= 0 || doc.docId < NO_MORE_DOCS;
           return doc.maxScore;
         }
       };
     } else if (scoreType == ScoreType.AVG) {
       return new ParentScorer(this, readerParentDocs) {
         @Override
         public float score() throws IOException {
           assert doc.docId >= 0 || doc.docId < NO_MORE_DOCS;
           return doc.sumScores / doc.count;
         }
       };
     } else if (scoreType == ScoreType.SUM) {
       return new ParentScorer(this, readerParentDocs) {
         @Override
         public float score() throws IOException {
           assert doc.docId >= 0 || doc.docId < NO_MORE_DOCS;
           return doc.sumScores;
         }
       };
     }
     throw new ElasticsearchIllegalStateException(
         "No support for score type [" + scoreType + "]");
   }
   return new EmptyScorer(this);
 }
コード例 #4
0
  int resolveParentDocuments(
      TopDocs topDocs,
      SearchContext context,
      Recycler.V<ObjectObjectOpenHashMap<Object, ParentDoc[]>> parentDocs) {
    int parentHitsResolved = 0;
    Recycler.V<ObjectObjectOpenHashMap<Object, Recycler.V<IntObjectOpenHashMap<ParentDoc>>>>
        parentDocsPerReader =
            cacheRecycler.hashMap(context.searcher().getIndexReader().leaves().size());
    for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
      int readerIndex =
          ReaderUtil.subIndex(scoreDoc.doc, context.searcher().getIndexReader().leaves());
      AtomicReaderContext subContext =
          context.searcher().getIndexReader().leaves().get(readerIndex);
      int subDoc = scoreDoc.doc - subContext.docBase;

      // find the parent id
      HashedBytesArray parentId =
          context.idCache().reader(subContext.reader()).parentIdByDoc(parentType, subDoc);
      if (parentId == null) {
        // no parent found
        continue;
      }
      // now go over and find the parent doc Id and reader tuple
      for (AtomicReaderContext atomicReaderContext : context.searcher().getIndexReader().leaves()) {
        AtomicReader indexReader = atomicReaderContext.reader();
        int parentDocId = context.idCache().reader(indexReader).docById(parentType, parentId);
        Bits liveDocs = indexReader.getLiveDocs();
        if (parentDocId != -1 && (liveDocs == null || liveDocs.get(parentDocId))) {
          // we found a match, add it and break

          Recycler.V<IntObjectOpenHashMap<ParentDoc>> readerParentDocs =
              parentDocsPerReader.v().get(indexReader.getCoreCacheKey());
          if (readerParentDocs == null) {
            readerParentDocs = cacheRecycler.intObjectMap(indexReader.maxDoc());
            parentDocsPerReader.v().put(indexReader.getCoreCacheKey(), readerParentDocs);
          }

          ParentDoc parentDoc = readerParentDocs.v().get(parentDocId);
          if (parentDoc == null) {
            parentHitsResolved++; // we have a hit on a parent
            parentDoc = new ParentDoc();
            parentDoc.docId = parentDocId;
            parentDoc.count = 1;
            parentDoc.maxScore = scoreDoc.score;
            parentDoc.sumScores = scoreDoc.score;
            readerParentDocs.v().put(parentDocId, parentDoc);
          } else {
            parentDoc.count++;
            parentDoc.sumScores += scoreDoc.score;
            if (scoreDoc.score > parentDoc.maxScore) {
              parentDoc.maxScore = scoreDoc.score;
            }
          }
        }
      }
    }
    boolean[] states = parentDocsPerReader.v().allocated;
    Object[] keys = parentDocsPerReader.v().keys;
    Object[] values = parentDocsPerReader.v().values;
    for (int i = 0; i < states.length; i++) {
      if (states[i]) {
        Recycler.V<IntObjectOpenHashMap<ParentDoc>> value =
            (Recycler.V<IntObjectOpenHashMap<ParentDoc>>) values[i];
        ParentDoc[] _parentDocs = value.v().values().toArray(ParentDoc.class);
        Arrays.sort(_parentDocs, PARENT_DOC_COMP);
        parentDocs.v().put(keys[i], _parentDocs);
        Releasables.release(value);
      }
    }
    Releasables.release(parentDocsPerReader);
    return parentHitsResolved;
  }