/**
  * Generic test that creates new AggregatorFactory from the test AggregatorFactory and checks both
  * for equality and asserts equality on the two queries.
  */
 public void testFromXContent() throws IOException {
   AF testAgg = createTestAggregatorFactory();
   AggregatorFactories.Builder factoriesBuilder =
       AggregatorFactories.builder().skipResolveOrder().addPipelineAggregator(testAgg);
   logger.info("Content string: {}", factoriesBuilder);
   XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
   if (randomBoolean()) {
     builder.prettyPrint();
   }
   factoriesBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS);
   XContentBuilder shuffled = shuffleXContent(builder);
   XContentParser parser =
       XContentFactory.xContent(shuffled.bytes()).createParser(shuffled.bytes());
   QueryParseContext parseContext =
       new QueryParseContext(queriesRegistry, parser, parseFieldMatcher);
   String contentString = factoriesBuilder.toString();
   logger.info("Content string: {}", contentString);
   assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
   assertSame(XContentParser.Token.FIELD_NAME, parser.nextToken());
   assertEquals(testAgg.getName(), parser.currentName());
   assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
   assertSame(XContentParser.Token.FIELD_NAME, parser.nextToken());
   assertEquals(testAgg.type(), parser.currentName());
   assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
   PipelineAggregationBuilder newAgg =
       aggParsers
           .pipelineParser(testAgg.getWriteableName(), ParseFieldMatcher.STRICT)
           .parse(testAgg.getName(), parseContext);
   assertSame(XContentParser.Token.END_OBJECT, parser.currentToken());
   assertSame(XContentParser.Token.END_OBJECT, parser.nextToken());
   assertSame(XContentParser.Token.END_OBJECT, parser.nextToken());
   assertNull(parser.nextToken());
   assertNotNull(newAgg);
   assertNotSame(newAgg, testAgg);
   assertEquals(testAgg, newAgg);
   assertEquals(testAgg.hashCode(), newAgg.hashCode());
 }
  private AggregatorFactories parseAggregators(
      XContentParser parser, SearchContext context, int level) throws IOException {
    Matcher validAggMatcher = VALID_AGG_NAME.matcher("");
    AggregatorFactories.Builder factories = new AggregatorFactories.Builder();

    XContentParser.Token token = null;
    while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
      if (token != XContentParser.Token.FIELD_NAME) {
        throw new SearchParseException(
            context,
            "Unexpected token "
                + token
                + " in [aggs]: aggregations definitions must start with the name of the aggregation.");
      }
      final String aggregationName = parser.currentName();
      if (!validAggMatcher.reset(aggregationName).matches()) {
        throw new SearchParseException(
            context,
            "Invalid aggregation name ["
                + aggregationName
                + "]. Aggregation names must be alpha-numeric and can only contain '_' and '-'");
      }

      token = parser.nextToken();
      if (token != XContentParser.Token.START_OBJECT) {
        throw new SearchParseException(
            context,
            "Aggregation definition for ["
                + aggregationName
                + " starts with a ["
                + token
                + "], expected a ["
                + XContentParser.Token.START_OBJECT
                + "].");
      }

      AggregatorFactory factory = null;
      AggregatorFactories subFactories = null;

      while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
        if (token != XContentParser.Token.FIELD_NAME) {
          throw new SearchParseException(
              context,
              "Expected ["
                  + XContentParser.Token.FIELD_NAME
                  + "] under a ["
                  + XContentParser.Token.START_OBJECT
                  + "], but got a ["
                  + token
                  + "] in ["
                  + aggregationName
                  + "]");
        }
        final String fieldName = parser.currentName();

        token = parser.nextToken();
        if ("aggregations_binary".equals(fieldName)) {
          if (subFactories != null) {
            throw new SearchParseException(
                context, "Found two sub aggregation definitions under [" + aggregationName + "]");
          }
          XContentParser binaryParser = null;
          if (token == XContentParser.Token.VALUE_STRING
              || token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
            byte[] source = parser.binaryValue();
            binaryParser = XContentFactory.xContent(source).createParser(source);
          } else {
            throw new SearchParseException(
                context,
                "Expected ["
                    + XContentParser.Token.VALUE_STRING
                    + " or "
                    + XContentParser.Token.VALUE_EMBEDDED_OBJECT
                    + "] for ["
                    + fieldName
                    + "], but got a ["
                    + token
                    + "] in ["
                    + aggregationName
                    + "]");
          }
          XContentParser.Token binaryToken = binaryParser.nextToken();
          if (binaryToken != XContentParser.Token.START_OBJECT) {
            throw new SearchParseException(
                context,
                "Expected ["
                    + XContentParser.Token.START_OBJECT
                    + "] as first token when parsing ["
                    + fieldName
                    + "], but got a ["
                    + binaryToken
                    + "] in ["
                    + aggregationName
                    + "]");
          }
          subFactories = parseAggregators(binaryParser, context, level + 1);
        } else if (token == XContentParser.Token.START_OBJECT) {
          switch (fieldName) {
            case "aggregations":
            case "aggs":
              if (subFactories != null) {
                throw new SearchParseException(
                    context,
                    "Found two sub aggregation definitions under [" + aggregationName + "]");
              }
              subFactories = parseAggregators(parser, context, level + 1);
              break;
            default:
              if (factory != null) {
                throw new SearchParseException(
                    context,
                    "Found two aggregation type definitions in ["
                        + aggregationName
                        + "]: ["
                        + factory.type
                        + "] and ["
                        + fieldName
                        + "]");
              }
              Aggregator.Parser aggregatorParser = parser(fieldName);
              if (aggregatorParser == null) {
                throw new SearchParseException(
                    context,
                    "Could not find aggregator type ["
                        + fieldName
                        + "] in ["
                        + aggregationName
                        + "]");
              }
              factory = aggregatorParser.parse(aggregationName, parser, context);
          }
        } else {
          throw new SearchParseException(
              context,
              "Expected ["
                  + XContentParser.Token.START_OBJECT
                  + "] under ["
                  + fieldName
                  + "], but got a ["
                  + token
                  + "] in ["
                  + aggregationName
                  + "]");
        }
      }

      if (factory == null) {
        throw new SearchParseException(
            context, "Missing definition for aggregation [" + aggregationName + "]");
      }

      if (subFactories != null) {
        factory.subFactories(subFactories);
      }

      if (level == 0) {
        factory.validate();
      }

      factories.add(factory);
    }

    return factories.build();
  }
  public void testResetRootDocId() throws Exception {
    Directory directory = newDirectory();
    IndexWriterConfig iwc = new IndexWriterConfig(null);
    iwc.setMergePolicy(NoMergePolicy.INSTANCE);
    RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory, iwc);

    List<Document> documents = new ArrayList<>();

    // 1 segment with, 1 root document, with 3 nested sub docs
    Document document = new Document();
    document.add(
        new Field(UidFieldMapper.NAME, "type#1", UidFieldMapper.Defaults.NESTED_FIELD_TYPE));
    document.add(
        new Field(TypeFieldMapper.NAME, "__nested_field", TypeFieldMapper.Defaults.FIELD_TYPE));
    documents.add(document);
    document = new Document();
    document.add(
        new Field(UidFieldMapper.NAME, "type#1", UidFieldMapper.Defaults.NESTED_FIELD_TYPE));
    document.add(
        new Field(TypeFieldMapper.NAME, "__nested_field", TypeFieldMapper.Defaults.FIELD_TYPE));
    documents.add(document);
    document = new Document();
    document.add(
        new Field(UidFieldMapper.NAME, "type#1", UidFieldMapper.Defaults.NESTED_FIELD_TYPE));
    document.add(
        new Field(TypeFieldMapper.NAME, "__nested_field", TypeFieldMapper.Defaults.FIELD_TYPE));
    documents.add(document);
    document = new Document();
    document.add(new Field(UidFieldMapper.NAME, "type#1", UidFieldMapper.Defaults.FIELD_TYPE));
    document.add(new Field(TypeFieldMapper.NAME, "test", TypeFieldMapper.Defaults.FIELD_TYPE));
    documents.add(document);
    indexWriter.addDocuments(documents);
    indexWriter.commit();

    documents.clear();
    // 1 segment with:
    // 1 document, with 1 nested subdoc
    document = new Document();
    document.add(
        new Field(UidFieldMapper.NAME, "type#2", UidFieldMapper.Defaults.NESTED_FIELD_TYPE));
    document.add(
        new Field(TypeFieldMapper.NAME, "__nested_field", TypeFieldMapper.Defaults.FIELD_TYPE));
    documents.add(document);
    document = new Document();
    document.add(new Field(UidFieldMapper.NAME, "type#2", UidFieldMapper.Defaults.FIELD_TYPE));
    document.add(new Field(TypeFieldMapper.NAME, "test", TypeFieldMapper.Defaults.FIELD_TYPE));
    documents.add(document);
    indexWriter.addDocuments(documents);
    documents.clear();
    // and 1 document, with 1 nested subdoc
    document = new Document();
    document.add(
        new Field(UidFieldMapper.NAME, "type#3", UidFieldMapper.Defaults.NESTED_FIELD_TYPE));
    document.add(
        new Field(TypeFieldMapper.NAME, "__nested_field", TypeFieldMapper.Defaults.FIELD_TYPE));
    documents.add(document);
    document = new Document();
    document.add(new Field(UidFieldMapper.NAME, "type#3", UidFieldMapper.Defaults.FIELD_TYPE));
    document.add(new Field(TypeFieldMapper.NAME, "test", TypeFieldMapper.Defaults.FIELD_TYPE));
    documents.add(document);
    indexWriter.addDocuments(documents);

    indexWriter.commit();
    indexWriter.close();

    IndexService indexService = createIndex("test");
    DirectoryReader directoryReader = DirectoryReader.open(directory);
    directoryReader =
        ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(indexService.index(), 0));
    IndexSearcher searcher = new IndexSearcher(directoryReader);

    indexService
        .mapperService()
        .merge(
            "test",
            new CompressedXContent(
                PutMappingRequest.buildFromSimplifiedDef("test", "nested_field", "type=nested")
                    .string()),
            MapperService.MergeReason.MAPPING_UPDATE,
            false);
    SearchContext searchContext = createSearchContext(indexService);
    AggregationContext context = new AggregationContext(searchContext);

    AggregatorFactories.Builder builder = AggregatorFactories.builder();
    NestedAggregatorBuilder factory = new NestedAggregatorBuilder("test", "nested_field");
    builder.addAggregator(factory);
    AggregatorFactories factories = builder.build(context, null);
    searchContext.aggregations(new SearchContextAggregations(factories));
    Aggregator[] aggs = factories.createTopLevelAggregators();
    BucketCollector collector = BucketCollector.wrap(Arrays.asList(aggs));
    collector.preCollection();
    // A regular search always exclude nested docs, so we use NonNestedDocsFilter.INSTANCE here
    // (otherwise MatchAllDocsQuery would be sufficient)
    // We exclude root doc with uid type#2, this will trigger the bug if we don't reset the root doc
    // when we process a new segment, because
    // root doc type#3 and root doc type#1 have the same segment docid
    BooleanQuery.Builder bq = new BooleanQuery.Builder();
    bq.add(Queries.newNonNestedFilter(), Occur.MUST);
    bq.add(new TermQuery(new Term(UidFieldMapper.NAME, "type#2")), Occur.MUST_NOT);
    searcher.search(new ConstantScoreQuery(bq.build()), collector);
    collector.postCollection();

    Nested nested = (Nested) aggs[0].buildAggregation(0);
    // The bug manifests if 6 docs are returned, because currentRootDoc isn't reset the previous
    // child docs from the first segment are emitted as hits.
    assertThat(nested.getDocCount(), equalTo(4L));

    directoryReader.close();
    directory.close();
  }