public void testUnmapped() throws Exception {
    SearchResponse response =
        client()
            .prepareSearch("idx_unmapped")
            .addAggregation(
                histogram("histo")
                    .field(FIELD_1_NAME)
                    .interval(interval)
                    .subAggregation(sum("field2Sum").field(FIELD_2_NAME))
                    .subAggregation(sum("field3Sum").field(FIELD_3_NAME))
                    .subAggregation(
                        bucketSelector(
                            "bucketSelector",
                            new Script(
                                "Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)",
                                ScriptType.INLINE,
                                null,
                                null),
                            "field2Sum",
                            "field3Sum")))
            .execute()
            .actionGet();

    assertSearchResponse(response);

    InternalHistogram<Bucket> deriv = response.getAggregations().get("histo");
    assertThat(deriv, notNullValue());
    assertThat(deriv.getName(), equalTo("histo"));
    assertThat(deriv.getBuckets().size(), equalTo(0));
  }
  @Test
  public void docCountDerivativeWithGaps() throws Exception {
    SearchResponse searchResponse =
        client()
            .prepareSearch("empty_bucket_idx")
            .setQuery(matchAllQuery())
            .addAggregation(
                histogram("histo")
                    .field(SINGLE_VALUED_FIELD_NAME)
                    .interval(1)
                    .subAggregation(derivative("deriv").setBucketsPaths("_count")))
            .execute()
            .actionGet();

    assertThat(searchResponse.getHits().getTotalHits(), equalTo(numDocsEmptyIdx));

    InternalHistogram<Bucket> deriv = searchResponse.getAggregations().get("histo");
    assertThat(deriv, Matchers.notNullValue());
    assertThat(deriv.getName(), equalTo("histo"));
    List<Bucket> buckets = deriv.getBuckets();
    assertThat(buckets.size(), equalTo(valueCounts_empty.length));

    for (int i = 0; i < valueCounts_empty.length; i++) {
      Histogram.Bucket bucket = buckets.get(i);
      checkBucketKeyAndDocCount("Bucket " + i, bucket, i, valueCounts_empty[i]);
      SimpleValue docCountDeriv = bucket.getAggregations().get("deriv");
      if (firstDerivValueCounts_empty[i] == null) {
        assertThat(docCountDeriv, nullValue());
      } else {
        assertThat(docCountDeriv.value(), equalTo(firstDerivValueCounts_empty[i]));
      }
    }
  }
  public void testInlineScriptNoBucketsLeft() {
    SearchResponse response =
        client()
            .prepareSearch("idx")
            .addAggregation(
                histogram("histo")
                    .field(FIELD_1_NAME)
                    .interval(interval)
                    .subAggregation(sum("field2Sum").field(FIELD_2_NAME))
                    .subAggregation(sum("field3Sum").field(FIELD_3_NAME))
                    .subAggregation(
                        bucketSelector(
                            "bucketSelector",
                            new Script(
                                "Double.isNaN(_value0) ? false : (_value0 > 10000)",
                                ScriptType.INLINE,
                                null,
                                null),
                            "field2Sum",
                            "field3Sum")))
            .execute()
            .actionGet();

    assertSearchResponse(response);

    InternalHistogram<Bucket> histo = response.getAggregations().get("histo");
    assertThat(histo, notNullValue());
    assertThat(histo.getName(), equalTo("histo"));
    List<? extends Bucket> buckets = histo.getBuckets();
    assertThat(buckets.size(), equalTo(0));
  }
  @Test
  public void partiallyUnmapped() throws Exception {
    SearchResponse response =
        client()
            .prepareSearch("idx", "idx_unmapped")
            .addAggregation(
                histogram("histo")
                    .field(SINGLE_VALUED_FIELD_NAME)
                    .interval(interval)
                    .subAggregation(derivative("deriv").setBucketsPaths("_count")))
            .execute()
            .actionGet();

    assertSearchResponse(response);

    InternalHistogram<Bucket> deriv = response.getAggregations().get("histo");
    assertThat(deriv, notNullValue());
    assertThat(deriv.getName(), equalTo("histo"));
    List<? extends Bucket> buckets = deriv.getBuckets();
    assertThat(deriv.getBuckets().size(), equalTo(numValueBuckets));

    for (int i = 0; i < numValueBuckets; ++i) {
      Histogram.Bucket bucket = buckets.get(i);
      checkBucketKeyAndDocCount("Bucket " + i, bucket, i * interval, valueCounts[i]);
      SimpleValue docCountDeriv = bucket.getAggregations().get("deriv");
      if (i > 0) {
        assertThat(docCountDeriv, notNullValue());
        assertThat(docCountDeriv.value(), equalTo((double) firstDerivValueCounts[i - 1]));
      } else {
        assertThat(docCountDeriv, nullValue());
      }
    }
  }
  /** test first and second derivative on the sing */
  @Test
  public void singleValuedField_normalised() {

    SearchResponse response =
        client()
            .prepareSearch("idx")
            .addAggregation(
                histogram("histo")
                    .field(SINGLE_VALUED_FIELD_NAME)
                    .interval(interval)
                    .minDocCount(0)
                    .subAggregation(derivative("deriv").setBucketsPaths("_count").unit("1"))
                    .subAggregation(derivative("2nd_deriv").setBucketsPaths("deriv").unit("10")))
            .execute()
            .actionGet();

    assertSearchResponse(response);

    InternalHistogram<Bucket> deriv = response.getAggregations().get("histo");
    assertThat(deriv, notNullValue());
    assertThat(deriv.getName(), equalTo("histo"));
    List<? extends Bucket> buckets = deriv.getBuckets();
    assertThat(buckets.size(), equalTo(numValueBuckets));

    for (int i = 0; i < numValueBuckets; ++i) {
      Histogram.Bucket bucket = buckets.get(i);
      checkBucketKeyAndDocCount("Bucket " + i, bucket, i * interval, valueCounts[i]);
      Derivative docCountDeriv = bucket.getAggregations().get("deriv");
      if (i > 0) {
        assertThat(docCountDeriv, notNullValue());
        assertThat(
            docCountDeriv.value(), closeTo((double) (firstDerivValueCounts[i - 1]), 0.00001));
        assertThat(
            docCountDeriv.normalizedValue(),
            closeTo((double) (firstDerivValueCounts[i - 1]) / 5, 0.00001));
      } else {
        assertThat(docCountDeriv, nullValue());
      }
      Derivative docCount2ndDeriv = bucket.getAggregations().get("2nd_deriv");
      if (i > 1) {
        assertThat(docCount2ndDeriv, notNullValue());
        assertThat(
            docCount2ndDeriv.value(), closeTo((double) (secondDerivValueCounts[i - 2]), 0.00001));
        assertThat(
            docCount2ndDeriv.normalizedValue(),
            closeTo((double) (secondDerivValueCounts[i - 2]) * 2, 0.00001));
      } else {
        assertThat(docCount2ndDeriv, nullValue());
      }
    }
  }
  @Test
  public void singleValueAggDerivativeWithGaps_random() throws Exception {
    GapPolicy gapPolicy = randomFrom(GapPolicy.values());
    SearchResponse searchResponse =
        client()
            .prepareSearch("empty_bucket_idx_rnd")
            .setQuery(matchAllQuery())
            .addAggregation(
                histogram("histo")
                    .field(SINGLE_VALUED_FIELD_NAME)
                    .interval(1)
                    .extendedBounds(0l, (long) numBuckets_empty_rnd - 1)
                    .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))
                    .subAggregation(
                        derivative("deriv").setBucketsPaths("sum").gapPolicy(gapPolicy)))
            .execute()
            .actionGet();

    assertThat(searchResponse.getHits().getTotalHits(), equalTo(numDocsEmptyIdx_rnd));

    InternalHistogram<Bucket> deriv = searchResponse.getAggregations().get("histo");
    assertThat(deriv, Matchers.notNullValue());
    assertThat(deriv.getName(), equalTo("histo"));
    List<Bucket> buckets = deriv.getBuckets();
    assertThat(buckets.size(), equalTo(numBuckets_empty_rnd));

    double lastSumValue = Double.NaN;
    for (int i = 0; i < valueCounts_empty_rnd.length; i++) {
      Histogram.Bucket bucket = buckets.get(i);
      checkBucketKeyAndDocCount("Bucket " + i, bucket, i, valueCounts_empty_rnd[i]);
      Sum sum = bucket.getAggregations().get("sum");
      double thisSumValue = sum.value();
      if (bucket.getDocCount() == 0) {
        thisSumValue = gapPolicy == GapPolicy.INSERT_ZEROS ? 0 : Double.NaN;
      }
      SimpleValue sumDeriv = bucket.getAggregations().get("deriv");
      if (i == 0) {
        assertThat(sumDeriv, nullValue());
      } else {
        double expectedDerivative = thisSumValue - lastSumValue;
        if (Double.isNaN(expectedDerivative)) {
          assertThat(sumDeriv.value(), equalTo(expectedDerivative));
        } else {
          assertThat(sumDeriv.value(), closeTo(expectedDerivative, 0.00001));
        }
      }
      lastSumValue = thisSumValue;
    }
  }
 @Test
 public void reverseToRootGroupByOnNestedFieldWithFilterTestWithReverseNestedOnHistogram()
     throws Exception {
   Aggregations result =
       query(
           String.format(
               "SELECT COUNT(*) FROM %s/nestedType GROUP BY  nested(message.info),filter('myFilter',message.info = 'a'),histogram('field'='myNum','reverse_nested'='','interval'='2' , 'alias' = 'someAlias' )",
               TEST_INDEX));
   InternalNested nested = result.get("message.info@NESTED");
   InternalFilter filter = nested.getAggregations().get("myFilter@FILTER");
   Terms infos = filter.getAggregations().get("message.info");
   Assert.assertEquals(1, infos.getBuckets().size());
   for (Terms.Bucket bucket : infos.getBuckets()) {
     InternalReverseNested reverseNested = bucket.getAggregations().get("someAlias@NESTED");
     InternalHistogram histogram = reverseNested.getAggregations().get("someAlias");
     Assert.assertEquals(2, histogram.getBuckets().size());
   }
 }
  public void testInlineScriptNamedVars() {
    Map<String, String> bucketPathsMap = new HashMap<>();
    bucketPathsMap.put("my_value1", "field2Sum");
    bucketPathsMap.put("my_value2", "field3Sum");

    SearchResponse response =
        client()
            .prepareSearch("idx")
            .addAggregation(
                histogram("histo")
                    .field(FIELD_1_NAME)
                    .interval(interval)
                    .subAggregation(sum("field2Sum").field(FIELD_2_NAME))
                    .subAggregation(sum("field3Sum").field(FIELD_3_NAME))
                    .subAggregation(
                        bucketSelector(
                            "bucketSelector",
                            bucketPathsMap,
                            new Script(
                                "Double.isNaN(my_value1) ? false : (my_value1 + my_value2 > 100)",
                                ScriptType.INLINE,
                                null,
                                null))))
            .execute()
            .actionGet();

    assertSearchResponse(response);

    InternalHistogram<Bucket> histo = response.getAggregations().get("histo");
    assertThat(histo, notNullValue());
    assertThat(histo.getName(), equalTo("histo"));
    List<? extends Bucket> buckets = histo.getBuckets();

    for (int i = 0; i < buckets.size(); ++i) {
      Histogram.Bucket bucket = buckets.get(i);
      Sum field2Sum = bucket.getAggregations().get("field2Sum");
      assertThat(field2Sum, notNullValue());
      double field2SumValue = field2Sum.getValue();
      Sum field3Sum = bucket.getAggregations().get("field3Sum");
      assertThat(field3Sum, notNullValue());
      double field3SumValue = field3Sum.getValue();
      assertThat(field2SumValue + field3SumValue, greaterThan(100.0));
    }
  }
  @Test
  public void unmapped() throws Exception {
    SearchResponse response =
        client()
            .prepareSearch("idx_unmapped")
            .addAggregation(
                histogram("histo")
                    .field(SINGLE_VALUED_FIELD_NAME)
                    .interval(interval)
                    .subAggregation(derivative("deriv").setBucketsPaths("_count")))
            .execute()
            .actionGet();

    assertSearchResponse(response);

    InternalHistogram<Bucket> deriv = response.getAggregations().get("histo");
    assertThat(deriv, notNullValue());
    assertThat(deriv.getName(), equalTo("histo"));
    assertThat(deriv.getBuckets().size(), equalTo(0));
  }
  public void testPartiallyUnmapped() throws Exception {
    SearchResponse response =
        client()
            .prepareSearch("idx", "idx_unmapped")
            .addAggregation(
                histogram("histo")
                    .field(FIELD_1_NAME)
                    .interval(interval)
                    .subAggregation(sum("field2Sum").field(FIELD_2_NAME))
                    .subAggregation(sum("field3Sum").field(FIELD_3_NAME))
                    .subAggregation(
                        bucketSelector(
                            "bucketSelector",
                            new Script(
                                "Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)",
                                ScriptType.INLINE,
                                null,
                                null),
                            "field2Sum",
                            "field3Sum")))
            .execute()
            .actionGet();

    assertSearchResponse(response);

    InternalHistogram<Bucket> histo = response.getAggregations().get("histo");
    assertThat(histo, notNullValue());
    assertThat(histo.getName(), equalTo("histo"));
    List<? extends Bucket> buckets = histo.getBuckets();

    for (int i = 0; i < buckets.size(); ++i) {
      Histogram.Bucket bucket = buckets.get(i);
      Sum field2Sum = bucket.getAggregations().get("field2Sum");
      assertThat(field2Sum, notNullValue());
      double field2SumValue = field2Sum.getValue();
      Sum field3Sum = bucket.getAggregations().get("field3Sum");
      assertThat(field3Sum, notNullValue());
      double field3SumValue = field3Sum.getValue();
      assertThat(field2SumValue + field3SumValue, greaterThan(100.0));
    }
  }
  public void testInlineScriptInsertZeros() {
    SearchResponse response =
        client()
            .prepareSearch("idx")
            .addAggregation(
                histogram("histo")
                    .field(FIELD_1_NAME)
                    .interval(interval)
                    .subAggregation(sum("field2Sum").field(FIELD_2_NAME))
                    .subAggregation(sum("field3Sum").field(FIELD_3_NAME))
                    .subAggregation(
                        bucketSelector(
                                "bucketSelector",
                                new Script(
                                    "_value0 + _value1 > 100", ScriptType.INLINE, null, null),
                                "field2Sum",
                                "field3Sum")
                            .gapPolicy(GapPolicy.INSERT_ZEROS)))
            .execute()
            .actionGet();

    assertSearchResponse(response);

    InternalHistogram<Bucket> histo = response.getAggregations().get("histo");
    assertThat(histo, notNullValue());
    assertThat(histo.getName(), equalTo("histo"));
    List<? extends Bucket> buckets = histo.getBuckets();

    for (int i = 0; i < buckets.size(); ++i) {
      Histogram.Bucket bucket = buckets.get(i);
      Sum field2Sum = bucket.getAggregations().get("field2Sum");
      assertThat(field2Sum, notNullValue());
      double field2SumValue = field2Sum.getValue();
      Sum field3Sum = bucket.getAggregations().get("field3Sum");
      assertThat(field3Sum, notNullValue());
      double field3SumValue = field3Sum.getValue();
      assertThat(field2SumValue + field3SumValue, greaterThan(100.0));
    }
  }
  @Test
  public void multiValueAggDerivative() throws Exception {
    SearchResponse response =
        client()
            .prepareSearch("idx")
            .addAggregation(
                histogram("histo")
                    .field(SINGLE_VALUED_FIELD_NAME)
                    .interval(interval)
                    .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))
                    .subAggregation(derivative("deriv").setBucketsPaths("stats.sum")))
            .execute()
            .actionGet();

    assertSearchResponse(response);

    InternalHistogram<Bucket> deriv = response.getAggregations().get("histo");
    assertThat(deriv, notNullValue());
    assertThat(deriv.getName(), equalTo("histo"));
    assertThat(deriv.getBuckets().size(), equalTo(numValueBuckets));
    Object[] propertiesKeys = (Object[]) deriv.getProperty("_key");
    Object[] propertiesDocCounts = (Object[]) deriv.getProperty("_count");
    Object[] propertiesSumCounts = (Object[]) deriv.getProperty("stats.sum");

    List<Bucket> buckets = new ArrayList<Bucket>(deriv.getBuckets());
    Long expectedSumPreviousBucket = Long.MIN_VALUE; // start value, gets
    // overwritten
    for (int i = 0; i < numValueBuckets; ++i) {
      Histogram.Bucket bucket = buckets.get(i);
      checkBucketKeyAndDocCount("Bucket " + i, bucket, i * interval, valueCounts[i]);
      Stats stats = bucket.getAggregations().get("stats");
      assertThat(stats, notNullValue());
      long expectedSum = valueCounts[i] * (i * interval);
      assertThat(stats.getSum(), equalTo((double) expectedSum));
      SimpleValue sumDeriv = bucket.getAggregations().get("deriv");
      if (i > 0) {
        assertThat(sumDeriv, notNullValue());
        long sumDerivValue = expectedSum - expectedSumPreviousBucket;
        assertThat(sumDeriv.value(), equalTo((double) sumDerivValue));
        assertThat(
            (double)
                bucket.getProperty(
                    "histo", AggregationPath.parse("deriv.value").getPathElementsAsStringList()),
            equalTo((double) sumDerivValue));
      } else {
        assertThat(sumDeriv, nullValue());
      }
      expectedSumPreviousBucket = expectedSum;
      assertThat((long) propertiesKeys[i], equalTo((long) i * interval));
      assertThat((long) propertiesDocCounts[i], equalTo(valueCounts[i]));
      assertThat((double) propertiesSumCounts[i], equalTo((double) expectedSum));
    }
  }
  public void testEmptyBuckets() {
    SearchResponse response =
        client()
            .prepareSearch("idx_with_gaps")
            .addAggregation(
                histogram("histo")
                    .field(FIELD_1_NAME)
                    .interval(1)
                    .subAggregation(
                        histogram("inner_histo")
                            .field(FIELD_1_NAME)
                            .interval(1)
                            .extendedBounds(new ExtendedBounds(1L, 4L))
                            .minDocCount(0)
                            .subAggregation(
                                derivative("derivative", "_count")
                                    .gapPolicy(GapPolicy.INSERT_ZEROS))))
            .execute()
            .actionGet();

    assertSearchResponse(response);

    InternalHistogram<Bucket> histo = response.getAggregations().get("histo");
    assertThat(histo, notNullValue());
    assertThat(histo.getName(), equalTo("histo"));
    List<? extends Bucket> buckets = histo.getBuckets();
    assertThat(buckets.size(), equalTo(3));

    Histogram.Bucket bucket = buckets.get(0);
    assertThat(bucket, notNullValue());
    assertThat(bucket.getKeyAsString(), equalTo("1"));
    Histogram innerHisto = bucket.getAggregations().get("inner_histo");
    assertThat(innerHisto, notNullValue());
    List<? extends Histogram.Bucket> innerBuckets = innerHisto.getBuckets();
    assertThat(innerBuckets, notNullValue());
    assertThat(innerBuckets.size(), equalTo(4));
    for (int i = 0; i < innerBuckets.size(); i++) {
      Histogram.Bucket innerBucket = innerBuckets.get(i);
      if (i == 0) {
        assertThat(innerBucket.getAggregations().get("derivative"), nullValue());
      } else {
        assertThat(innerBucket.getAggregations().get("derivative"), notNullValue());
      }
    }

    bucket = buckets.get(1);
    assertThat(bucket, notNullValue());
    assertThat(bucket.getKeyAsString(), equalTo("2"));
    innerHisto = bucket.getAggregations().get("inner_histo");
    assertThat(innerHisto, notNullValue());
    innerBuckets = innerHisto.getBuckets();
    assertThat(innerBuckets, notNullValue());
    assertThat(innerBuckets.size(), equalTo(4));
    for (int i = 0; i < innerBuckets.size(); i++) {
      Histogram.Bucket innerBucket = innerBuckets.get(i);
      if (i == 0) {
        assertThat(innerBucket.getAggregations().get("derivative"), nullValue());
      } else {
        assertThat(innerBucket.getAggregations().get("derivative"), notNullValue());
      }
    }
    bucket = buckets.get(2);
    assertThat(bucket, notNullValue());
    assertThat(bucket.getKeyAsString(), equalTo("3"));
    innerHisto = bucket.getAggregations().get("inner_histo");
    assertThat(innerHisto, notNullValue());
    innerBuckets = innerHisto.getBuckets();
    assertThat(innerBuckets, notNullValue());
    assertThat(innerBuckets.size(), equalTo(4));
    for (int i = 0; i < innerBuckets.size(); i++) {
      Histogram.Bucket innerBucket = innerBuckets.get(i);
      if (i == 0) {
        assertThat(innerBucket.getAggregations().get("derivative"), nullValue());
      } else {
        assertThat(innerBucket.getAggregations().get("derivative"), notNullValue());
      }
    }
  }
  // test to make sure expressions are allowed to be used for reduce in pipeline aggregations
  public void testPipelineAggregationScript() throws Exception {
    createIndex("agg_index");
    ensureGreen("agg_index");
    indexRandom(
        true,
        client()
            .prepareIndex("agg_index", "doc", "1")
            .setSource("one", 1.0, "two", 2.0, "three", 3.0, "four", 4.0),
        client()
            .prepareIndex("agg_index", "doc", "2")
            .setSource("one", 2.0, "two", 2.0, "three", 3.0, "four", 4.0),
        client()
            .prepareIndex("agg_index", "doc", "3")
            .setSource("one", 3.0, "two", 2.0, "three", 3.0, "four", 4.0),
        client()
            .prepareIndex("agg_index", "doc", "4")
            .setSource("one", 4.0, "two", 2.0, "three", 3.0, "four", 4.0),
        client()
            .prepareIndex("agg_index", "doc", "5")
            .setSource("one", 5.0, "two", 2.0, "three", 3.0, "four", 4.0));
    SearchResponse response =
        client()
            .prepareSearch("agg_index")
            .addAggregation(
                histogram("histogram")
                    .field("one")
                    .interval(2)
                    .subAggregation(sum("twoSum").field("two"))
                    .subAggregation(sum("threeSum").field("three"))
                    .subAggregation(sum("fourSum").field("four"))
                    .subAggregation(
                        bucketScript("totalSum")
                            .setBucketsPaths("twoSum", "threeSum", "fourSum")
                            .script(
                                new Script(
                                    "_value0 + _value1 + _value2",
                                    ScriptType.INLINE,
                                    ExpressionScriptEngineService.NAME,
                                    null))))
            .execute()
            .actionGet();

    InternalHistogram<Bucket> histogram = response.getAggregations().get("histogram");
    assertThat(histogram, notNullValue());
    assertThat(histogram.getName(), equalTo("histogram"));
    List<Bucket> buckets = histogram.getBuckets();

    for (int bucketCount = 0; bucketCount < buckets.size(); ++bucketCount) {
      Histogram.Bucket bucket = buckets.get(bucketCount);
      if (bucket.getDocCount() == 1) {
        SimpleValue seriesArithmetic = bucket.getAggregations().get("totalSum");
        assertThat(seriesArithmetic, notNullValue());
        double seriesArithmeticValue = seriesArithmetic.value();
        assertEquals(9.0, seriesArithmeticValue, 0.001);
      } else if (bucket.getDocCount() == 2) {
        SimpleValue seriesArithmetic = bucket.getAggregations().get("totalSum");
        assertThat(seriesArithmetic, notNullValue());
        double seriesArithmeticValue = seriesArithmetic.value();
        assertEquals(18.0, seriesArithmeticValue, 0.001);
      } else {
        fail("Incorrect number of documents in a bucket in the histogram.");
      }
    }
  }