@Override
        public PercolateShardResponse doPercolate(
            PercolateShardRequest request, PercolateContext context) {
          long count = 0;
          List<BytesRef> matches = new ArrayList<>();
          List<Map<String, HighlightField>> hls = new ArrayList<>();
          Lucene.ExistsCollector collector = new Lucene.ExistsCollector();

          for (Map.Entry<HashedBytesRef, Query> entry : context.percolateQueries().entrySet()) {
            collector.reset();
            if (context.highlight() != null) {
              context.parsedQuery(
                  new ParsedQuery(entry.getValue(), ImmutableMap.<String, Filter>of()));
              context.hitContext().cache().clear();
            }
            try {
              context.docSearcher().search(entry.getValue(), collector);
            } catch (Throwable e) {
              logger.debug("[" + entry.getKey() + "] failed to execute query", e);
              throw new PercolateException(context.indexShard().shardId(), "failed to execute", e);
            }

            if (collector.exists()) {
              if (!context.limit || count < context.size()) {
                matches.add(entry.getKey().bytes);
                if (context.highlight() != null) {
                  highlightPhase.hitExecute(context, context.hitContext());
                  hls.add(context.hitContext().hit().getHighlightFields());
                }
              }
              count++;
            }
          }

          BytesRef[] finalMatches = matches.toArray(new BytesRef[matches.size()]);
          return new PercolateShardResponse(
              finalMatches, hls, count, context, request.index(), request.shardId());
        }
        @Override
        public PercolateShardResponse doPercolate(
            PercolateShardRequest request, PercolateContext context) {
          Engine.Searcher percolatorSearcher = context.indexShard().acquireSearcher("percolate");
          try {
            MatchAndSort matchAndSort = QueryCollector.matchAndSort(logger, context);
            queryBasedPercolating(percolatorSearcher, context, matchAndSort);
            TopDocs topDocs = matchAndSort.topDocs();
            long count = topDocs.totalHits;
            List<BytesRef> matches = new ArrayList<BytesRef>(topDocs.scoreDocs.length);
            float[] scores = new float[topDocs.scoreDocs.length];
            List<Map<String, HighlightField>> hls = null;
            if (context.highlight() != null) {
              hls = new ArrayList<Map<String, HighlightField>>(topDocs.scoreDocs.length);
            }

            final FieldMapper<?> idMapper =
                context.mapperService().smartNameFieldMapper(IdFieldMapper.NAME);
            final IndexFieldData<?> idFieldData = context.fieldData().getForField(idMapper);
            int i = 0;
            final HashedBytesRef spare = new HashedBytesRef(new BytesRef());
            for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
              int segmentIdx =
                  ReaderUtil.subIndex(scoreDoc.doc, percolatorSearcher.reader().leaves());
              AtomicReaderContext atomicReaderContext =
                  percolatorSearcher.reader().leaves().get(segmentIdx);
              BytesValues values = idFieldData.load(atomicReaderContext).getBytesValues(true);
              final int localDocId = scoreDoc.doc - atomicReaderContext.docBase;
              final int numValues = values.setDocument(localDocId);
              assert numValues == 1;
              spare.bytes = values.nextValue();
              spare.hash = values.currentValueHash();
              matches.add(values.copyShared());
              if (hls != null) {
                Query query = context.percolateQueries().get(spare);
                context.parsedQuery(new ParsedQuery(query, ImmutableMap.<String, Filter>of()));
                context.hitContext().cache().clear();
                highlightPhase.hitExecute(context, context.hitContext());
                hls.add(i, context.hitContext().hit().getHighlightFields());
              }
              scores[i++] = scoreDoc.score;
            }
            if (hls != null) {
              return new PercolateShardResponse(
                  matches.toArray(new BytesRef[matches.size()]),
                  hls,
                  count,
                  scores,
                  context,
                  request.index(),
                  request.shardId());
            } else {
              return new PercolateShardResponse(
                  matches.toArray(new BytesRef[matches.size()]),
                  count,
                  scores,
                  context,
                  request.index(),
                  request.shardId());
            }
          } catch (Throwable e) {
            logger.debug("failed to execute", e);
            throw new PercolateException(context.indexShard().shardId(), "failed to execute", e);
          } finally {
            percolatorSearcher.release();
          }
        }