@Override
 public void collect(int doc, long owningBucketOrdinal) throws IOException {
   final int numOrds = segmentOrdinals.setDocument(doc);
   for (int i = 0; i < numOrds; i++) {
     final long segmentOrd = segmentOrdinals.nextOrd();
     current.increment(segmentOrd, 1);
   }
 }
 @Override
 public void collect(int doc, long owningBucketOrdinal) throws IOException {
   final int numOrds = globalOrdinals.setDocument(doc);
   for (int i = 0; i < numOrds; i++) {
     final long globalOrd = globalOrdinals.nextOrd();
     collectExistingBucket(doc, globalOrd);
   }
 }
    @Override
    public int setDocument(int docId) {
      int numDocs = inner.setDocument(docId);
      buffer = ArrayUtil.grow(buffer, numDocs);
      bufferSlot = 0;

      int numAcceptedOrds = 0;
      for (int slot = 0; slot < numDocs; slot++) {
        long ord = inner.nextOrd();
        if (accepted.get(ord)) {
          buffer[numAcceptedOrds] = ord;
          numAcceptedOrds++;
        }
      }
      return numAcceptedOrds;
    }
    @Override
    public void setNextReader(AtomicReaderContext reader) {
      if (segmentOrdinals != null && segmentOrdinals.getMaxOrd() != globalOrdinals.getMaxOrd()) {
        mapSegmentCountsToGlobalCounts();
      }

      globalValues = valuesSource.globalBytesValues();
      globalOrdinals = globalValues.ordinals();

      BytesValues.WithOrdinals bytesValues = valuesSource.bytesValues();
      segmentOrdinals = bytesValues.ordinals();
      if (segmentOrdinals.getMaxOrd() != globalOrdinals.getMaxOrd()) {
        current = segmentDocCounts;
      } else {
        current = getDocCounts();
      }
    }
 @Override
 public long getOrd(int docId) {
   long ord = inner.getOrd(docId);
   if (accepted.get(ord)) {
     return currentOrd = ord;
   } else {
     return currentOrd = Ordinals.MISSING_ORDINAL;
   }
 }
Ejemplo n.º 6
0
 @Override
 public void setNextReader(AtomicReaderContext context) throws IOException {
   values = indexFieldData.load(context).getBytesValues(parentType);
   if (values != null) {
     ordinals = values.ordinals();
     final long maxOrd = ordinals.getMaxOrd();
     if (parentIdsIndex == null) {
       parentIdsIndex = bigArrays.newLongArray(BigArrays.overSize(maxOrd), false);
     } else if (parentIdsIndex.size() < maxOrd) {
       parentIdsIndex = bigArrays.grow(parentIdsIndex, maxOrd);
     }
     parentIdsIndex.fill(0, maxOrd, -1L);
   }
 }
Ejemplo n.º 7
0
 @Override
 public void collect(int doc) throws IOException {
   if (values != null) {
     long ord = ordinals.getOrd(doc);
     long parentIdx = parentIdsIndex.get(ord);
     if (parentIdx < 0) {
       final BytesRef bytes = values.getValueByOrd(ord);
       final int hash = values.currentValueHash();
       parentIdx = parentIds.add(bytes, hash);
       if (parentIdx < 0) {
         parentIdx = -parentIdx - 1;
         doScore(parentIdx);
       } else {
         scores = bigArrays.grow(scores, parentIdx + 1);
         scores.set(parentIdx, scorer.score());
       }
       parentIdsIndex.set(ord, parentIdx);
     } else {
       doScore(parentIdx);
     }
   }
 }
 @Override
 public boolean isMultiValued() {
   return inner.isMultiValued();
 }
 @Override
 public long getMaxOrd() {
   return inner.getMaxOrd();
 }
 @Override
 protected void doPostCollection() {
   if (segmentOrdinals.getMaxOrd() != globalOrdinals.getMaxOrd()) {
     mapSegmentCountsToGlobalCounts();
   }
 }
  @Override
  public InternalAggregation buildAggregation(long owningBucketOrdinal) {
    if (globalOrdinals == null) { // no context in this reader
      return buildEmptyAggregation();
    }

    final int size;
    if (bucketCountThresholds.getMinDocCount() == 0) {
      // if minDocCount == 0 then we can end up with more buckets then maxBucketOrd() returns
      size = (int) Math.min(globalOrdinals.getMaxOrd(), bucketCountThresholds.getShardSize());
    } else {
      size = (int) Math.min(maxBucketOrd(), bucketCountThresholds.getShardSize());
    }
    BucketPriorityQueue ordered = new BucketPriorityQueue(size, order.comparator(this));
    OrdBucket spare = new OrdBucket(-1, 0, null);
    for (long globalTermOrd = Ordinals.MIN_ORDINAL;
        globalTermOrd < globalOrdinals.getMaxOrd();
        ++globalTermOrd) {
      if (includeExclude != null && !acceptedGlobalOrdinals.get(globalTermOrd)) {
        continue;
      }
      final long bucketOrd = getBucketOrd(globalTermOrd);
      final long bucketDocCount = bucketOrd < 0 ? 0 : bucketDocCount(bucketOrd);
      if (bucketCountThresholds.getMinDocCount() > 0 && bucketDocCount == 0) {
        continue;
      }
      spare.globalOrd = globalTermOrd;
      spare.bucketOrd = bucketOrd;
      spare.docCount = bucketDocCount;
      if (bucketCountThresholds.getShardMinDocCount() <= spare.docCount) {
        spare = (OrdBucket) ordered.insertWithOverflow(spare);
        if (spare == null) {
          spare = new OrdBucket(-1, 0, null);
        }
      }
    }

    // Get the top buckets
    final InternalTerms.Bucket[] list = new InternalTerms.Bucket[ordered.size()];
    long survivingBucketOrds[] = new long[ordered.size()];
    for (int i = ordered.size() - 1; i >= 0; --i) {
      final OrdBucket bucket = (OrdBucket) ordered.pop();
      survivingBucketOrds[i] = bucket.bucketOrd;
      BytesRef scratch = new BytesRef();
      copy(globalValues.getValueByOrd(bucket.globalOrd), scratch);
      list[i] = new StringTerms.Bucket(scratch, bucket.docCount, null);
      list[i].bucketOrd = bucket.bucketOrd;
    }
    // replay any deferred collections
    runDeferredCollections(survivingBucketOrds);
    // Now build the aggs
    for (int i = 0; i < list.length; i++) {
      Bucket bucket = list[i];
      bucket.aggregations =
          bucket.docCount == 0 ? bucketEmptyAggregations() : bucketAggregations(bucket.bucketOrd);
    }

    return new StringTerms(
        name,
        order,
        bucketCountThresholds.getRequiredSize(),
        bucketCountThresholds.getMinDocCount(),
        Arrays.asList(list));
  }