public TermsLongFacetExecutor(
      IndexNumericFieldData indexFieldData,
      int size,
      int shardSize,
      TermsFacet.ComparatorType comparatorType,
      boolean allTerms,
      SearchContext context,
      ImmutableSet<BytesRef> excluded,
      SearchScript script,
      CacheRecycler cacheRecycler) {
    this.indexFieldData = indexFieldData;
    this.size = size;
    this.shardSize = shardSize;
    this.comparatorType = comparatorType;
    this.script = script;
    this.excluded = excluded;
    this.facets = cacheRecycler.longIntMap(-1);

    if (allTerms) {
      for (AtomicReaderContext readerContext : context.searcher().getTopReaderContext().leaves()) {
        int maxDoc = readerContext.reader().maxDoc();
        LongValues values = indexFieldData.load(readerContext).getLongValues();
        for (int docId = 0; docId < maxDoc; docId++) {
          final int numValues = values.setDocument(docId);
          final LongIntOpenHashMap v = facets.v();
          for (int i = 0; i < numValues; i++) {
            v.putIfAbsent(values.nextValue(), 0);
          }
        }
      }
    }
  }
  public ValueScriptDateHistogramFacetExecutor(
      IndexNumericFieldData keyIndexFieldData,
      SearchScript valueScript,
      TimeZoneRounding tzRounding,
      DateHistogramFacet.ComparatorType comparatorType,
      CacheRecycler cacheRecycler) {
    this.comparatorType = comparatorType;
    this.keyIndexFieldData = keyIndexFieldData;
    this.valueScript = valueScript;
    this.tzRounding = tzRounding;

    this.entries = cacheRecycler.longObjectMap(-1);
  }
  @Override
  public Weight createWeight(IndexSearcher searcher) throws IOException {
    Recycler.V<ObjectObjectOpenHashMap<Object, ParentDoc[]>> parentDocs = cacheRecycler.hashMap(-1);
    SearchContext searchContext = SearchContext.current();
    searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());

    int parentHitsResolved;
    int requestedDocs = (searchContext.from() + searchContext.size());
    if (requestedDocs <= 0) {
      requestedDocs = 1;
    }
    int numChildDocs = requestedDocs * factor;

    Query childQuery;
    if (rewrittenChildQuery == null) {
      childQuery = rewrittenChildQuery = searcher.rewrite(originalChildQuery);
    } else {
      assert rewriteIndexReader == searcher.getIndexReader();
      childQuery = rewrittenChildQuery;
    }

    IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
    while (true) {
      parentDocs.v().clear();
      TopDocs topChildDocs = indexSearcher.search(childQuery, numChildDocs);
      parentHitsResolved = resolveParentDocuments(topChildDocs, searchContext, parentDocs);

      // check if we found enough docs, if so, break
      if (parentHitsResolved >= requestedDocs) {
        break;
      }
      // if we did not find enough docs, check if it make sense to search further
      if (topChildDocs.totalHits <= numChildDocs) {
        break;
      }
      // if not, update numDocs, and search again
      numChildDocs *= incrementalFactor;
      if (numChildDocs > topChildDocs.totalHits) {
        numChildDocs = topChildDocs.totalHits;
      }
    }

    ParentWeight parentWeight =
        new ParentWeight(rewrittenChildQuery.createWeight(searcher), parentDocs);
    searchContext.addReleasable(parentWeight);
    return parentWeight;
  }
  int resolveParentDocuments(
      TopDocs topDocs,
      SearchContext context,
      Recycler.V<ObjectObjectOpenHashMap<Object, ParentDoc[]>> parentDocs) {
    int parentHitsResolved = 0;
    Recycler.V<ObjectObjectOpenHashMap<Object, Recycler.V<IntObjectOpenHashMap<ParentDoc>>>>
        parentDocsPerReader =
            cacheRecycler.hashMap(context.searcher().getIndexReader().leaves().size());
    for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
      int readerIndex =
          ReaderUtil.subIndex(scoreDoc.doc, context.searcher().getIndexReader().leaves());
      AtomicReaderContext subContext =
          context.searcher().getIndexReader().leaves().get(readerIndex);
      int subDoc = scoreDoc.doc - subContext.docBase;

      // find the parent id
      HashedBytesArray parentId =
          context.idCache().reader(subContext.reader()).parentIdByDoc(parentType, subDoc);
      if (parentId == null) {
        // no parent found
        continue;
      }
      // now go over and find the parent doc Id and reader tuple
      for (AtomicReaderContext atomicReaderContext : context.searcher().getIndexReader().leaves()) {
        AtomicReader indexReader = atomicReaderContext.reader();
        int parentDocId = context.idCache().reader(indexReader).docById(parentType, parentId);
        Bits liveDocs = indexReader.getLiveDocs();
        if (parentDocId != -1 && (liveDocs == null || liveDocs.get(parentDocId))) {
          // we found a match, add it and break

          Recycler.V<IntObjectOpenHashMap<ParentDoc>> readerParentDocs =
              parentDocsPerReader.v().get(indexReader.getCoreCacheKey());
          if (readerParentDocs == null) {
            readerParentDocs = cacheRecycler.intObjectMap(indexReader.maxDoc());
            parentDocsPerReader.v().put(indexReader.getCoreCacheKey(), readerParentDocs);
          }

          ParentDoc parentDoc = readerParentDocs.v().get(parentDocId);
          if (parentDoc == null) {
            parentHitsResolved++; // we have a hit on a parent
            parentDoc = new ParentDoc();
            parentDoc.docId = parentDocId;
            parentDoc.count = 1;
            parentDoc.maxScore = scoreDoc.score;
            parentDoc.sumScores = scoreDoc.score;
            readerParentDocs.v().put(parentDocId, parentDoc);
          } else {
            parentDoc.count++;
            parentDoc.sumScores += scoreDoc.score;
            if (scoreDoc.score > parentDoc.maxScore) {
              parentDoc.maxScore = scoreDoc.score;
            }
          }
        }
      }
    }
    boolean[] states = parentDocsPerReader.v().allocated;
    Object[] keys = parentDocsPerReader.v().keys;
    Object[] values = parentDocsPerReader.v().values;
    for (int i = 0; i < states.length; i++) {
      if (states[i]) {
        Recycler.V<IntObjectOpenHashMap<ParentDoc>> value =
            (Recycler.V<IntObjectOpenHashMap<ParentDoc>>) values[i];
        ParentDoc[] _parentDocs = value.v().values().toArray(ParentDoc.class);
        Arrays.sort(_parentDocs, PARENT_DOC_COMP);
        parentDocs.v().put(keys[i], _parentDocs);
        Releasables.release(value);
      }
    }
    Releasables.release(parentDocsPerReader);
    return parentHitsResolved;
  }