@Test public void sumOnNestedField() throws Exception { Aggregations result = query( String.format( "SELECT sum(nested(message.dayOfWeek)) as sumDays FROM %s/nestedType", TEST_INDEX)); InternalNested nested = result.get("message.dayOfWeek@NESTED"); Sum sum = nested.getAggregations().get("sumDays"); Assert.assertEquals(13.0, sum.getValue(), 0.0001); }
@Test public void testMetric_asSubAggOfSingleBucketAgg() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( filter("filter") .filter(termQuery("tag", "tag0")) .subAggregation( histogram("histo") .field(SINGLE_VALUED_FIELD_NAME) .interval(interval) .extendedBounds((long) minRandomValue, (long) maxRandomValue) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .subAggregation(maxBucket("max_bucket").setBucketsPaths("histo>sum"))) .execute() .actionGet(); assertSearchResponse(response); Filter filter = response.getAggregations().get("filter"); assertThat(filter, notNullValue()); assertThat(filter.getName(), equalTo("filter")); Histogram histo = filter.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); List<String> maxKeys = new ArrayList<>(); double maxValue = Double.NEGATIVE_INFINITY; for (int j = 0; j < numValueBuckets; ++j) { Histogram.Bucket bucket = buckets.get(j); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval)); if (bucket.getDocCount() != 0) { Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); if (sum.value() > maxValue) { maxValue = sum.value(); maxKeys = new ArrayList<>(); maxKeys.add(bucket.getKeyAsString()); } else if (sum.value() == maxValue) { maxKeys.add(bucket.getKeyAsString()); } } } InternalBucketMetricValue maxBucketValue = filter.getAggregations().get("max_bucket"); assertThat(maxBucketValue, notNullValue()); assertThat(maxBucketValue.getName(), equalTo("max_bucket")); assertThat(maxBucketValue.value(), equalTo(maxValue)); assertThat(maxBucketValue.keys(), equalTo(maxKeys.toArray(new String[maxKeys.size()]))); }
@Test public void singleValueAggDerivative() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( histogram("histo") .field(SINGLE_VALUED_FIELD_NAME) .interval(interval) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) .subAggregation(derivative("deriv").setBucketsPaths("sum"))) .execute() .actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> deriv = response.getAggregations().get("histo"); assertThat(deriv, notNullValue()); assertThat(deriv.getName(), equalTo("histo")); assertThat(deriv.getBuckets().size(), equalTo(numValueBuckets)); Object[] propertiesKeys = (Object[]) deriv.getProperty("_key"); Object[] propertiesDocCounts = (Object[]) deriv.getProperty("_count"); Object[] propertiesSumCounts = (Object[]) deriv.getProperty("sum.value"); List<Bucket> buckets = new ArrayList<Bucket>(deriv.getBuckets()); Long expectedSumPreviousBucket = Long.MIN_VALUE; // start value, gets // overwritten for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); checkBucketKeyAndDocCount("Bucket " + i, bucket, i * interval, valueCounts[i]); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); long expectedSum = valueCounts[i] * (i * interval); assertThat(sum.getValue(), equalTo((double) expectedSum)); SimpleValue sumDeriv = bucket.getAggregations().get("deriv"); if (i > 0) { assertThat(sumDeriv, notNullValue()); long sumDerivValue = expectedSum - expectedSumPreviousBucket; assertThat(sumDeriv.value(), equalTo((double) sumDerivValue)); assertThat( (double) bucket.getProperty( "histo", AggregationPath.parse("deriv.value").getPathElementsAsStringList()), equalTo((double) sumDerivValue)); } else { assertThat(sumDeriv, nullValue()); } expectedSumPreviousBucket = expectedSum; assertThat((long) propertiesKeys[i], equalTo((long) i * interval)); assertThat((long) propertiesDocCounts[i], equalTo(valueCounts[i])); assertThat((double) propertiesSumCounts[i], equalTo((double) expectedSum)); } }
// https://github.com/elasticsearch/elasticsearch/issues/6435 public void testReduce() throws Exception { createIndex("idx"); final int value = randomIntBetween(0, 10); indexRandom(true, client().prepareIndex("idx", "type").setSource("f", value)); ensureYellow("idx"); // only one document let's make sure all shards have an active primary SearchResponse response = client() .prepareSearch("idx") .addAggregation( filter("filter") .filter(FilterBuilders.matchAllFilter()) .subAggregation( range("range") .field("f") .addUnboundedTo(6) .addUnboundedFrom(6) .subAggregation(sum("sum").field("f")))) .execute() .actionGet(); assertSearchResponse(response); Filter filter = response.getAggregations().get("filter"); assertNotNull(filter); assertEquals(1, filter.getDocCount()); Range range = filter.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); assertThat(range.getBuckets().size(), equalTo(2)); Range.Bucket bucket = range.getBucketByKey("*-6.0"); assertThat(bucket, notNullValue()); assertThat(bucket.getKey(), equalTo("*-6.0")); assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(bucket.getTo().doubleValue(), equalTo(6.0)); assertThat(bucket.getDocCount(), equalTo(value < 6 ? 1L : 0L)); Sum sum = bucket.getAggregations().get("sum"); assertEquals(value < 6 ? value : 0, sum.getValue(), 0d); bucket = range.getBucketByKey("6.0-*"); assertThat(bucket, notNullValue()); assertThat(bucket.getKey(), equalTo("6.0-*")); assertThat(bucket.getFrom().doubleValue(), equalTo(6.0)); assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getDocCount(), equalTo(value >= 6 ? 1L : 0L)); sum = bucket.getAggregations().get("sum"); assertEquals(value >= 6 ? value : 0, sum.getValue(), 0d); }
@Test public void singleValueAggDerivativeWithGaps_random() throws Exception { GapPolicy gapPolicy = randomFrom(GapPolicy.values()); SearchResponse searchResponse = client() .prepareSearch("empty_bucket_idx_rnd") .setQuery(matchAllQuery()) .addAggregation( histogram("histo") .field(SINGLE_VALUED_FIELD_NAME) .interval(1) .extendedBounds(0l, (long) numBuckets_empty_rnd - 1) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) .subAggregation( derivative("deriv").setBucketsPaths("sum").gapPolicy(gapPolicy))) .execute() .actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(numDocsEmptyIdx_rnd)); InternalHistogram<Bucket> deriv = searchResponse.getAggregations().get("histo"); assertThat(deriv, Matchers.notNullValue()); assertThat(deriv.getName(), equalTo("histo")); List<Bucket> buckets = deriv.getBuckets(); assertThat(buckets.size(), equalTo(numBuckets_empty_rnd)); double lastSumValue = Double.NaN; for (int i = 0; i < valueCounts_empty_rnd.length; i++) { Histogram.Bucket bucket = buckets.get(i); checkBucketKeyAndDocCount("Bucket " + i, bucket, i, valueCounts_empty_rnd[i]); Sum sum = bucket.getAggregations().get("sum"); double thisSumValue = sum.value(); if (bucket.getDocCount() == 0) { thisSumValue = gapPolicy == GapPolicy.INSERT_ZEROS ? 0 : Double.NaN; } SimpleValue sumDeriv = bucket.getAggregations().get("deriv"); if (i == 0) { assertThat(sumDeriv, nullValue()); } else { double expectedDerivative = thisSumValue - lastSumValue; if (Double.isNaN(expectedDerivative)) { assertThat(sumDeriv.value(), equalTo(expectedDerivative)); } else { assertThat(sumDeriv.value(), closeTo(expectedDerivative, 0.00001)); } } lastSumValue = thisSumValue; } }
@Test public void testWithDeletes() throws Exception { String indexName = "xyz"; assertAcked( prepareCreate(indexName) .addMapping("parent") .addMapping("child", "_parent", "type=parent", "count", "type=long")); List<IndexRequestBuilder> requests = new ArrayList<>(); requests.add(client().prepareIndex(indexName, "parent", "1").setSource("{}")); requests.add( client().prepareIndex(indexName, "child", "0").setParent("1").setSource("count", 1)); requests.add( client().prepareIndex(indexName, "child", "1").setParent("1").setSource("count", 1)); requests.add( client().prepareIndex(indexName, "child", "2").setParent("1").setSource("count", 1)); requests.add( client().prepareIndex(indexName, "child", "3").setParent("1").setSource("count", 1)); indexRandom(true, requests); for (int i = 0; i < 10; i++) { SearchResponse searchResponse = client() .prepareSearch(indexName) .addAggregation( children("children") .childType("child") .subAggregation(sum("counts").field("count"))) .get(); assertNoFailures(searchResponse); Children children = searchResponse.getAggregations().get("children"); assertThat(children.getDocCount(), equalTo(4l)); Sum count = children.getAggregations().get("counts"); assertThat(count.getValue(), equalTo(4.)); String idToUpdate = Integer.toString(randomInt(3)); UpdateResponse updateResponse = client() .prepareUpdate(indexName, "child", idToUpdate) .setParent("1") .setDoc("count", 1) .get(); assertThat(updateResponse.getVersion(), greaterThan(1l)); refresh(); } }
@Test public void testMetric_topLevel() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( terms("terms") .field("tag") .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .addAggregation(maxBucket("max_bucket").setBucketsPaths("terms>sum")) .execute() .actionGet(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); List<Terms.Bucket> buckets = terms.getBuckets(); assertThat(buckets.size(), equalTo(interval)); List<String> maxKeys = new ArrayList<>(); double maxValue = Double.NEGATIVE_INFINITY; for (int i = 0; i < interval; ++i) { Terms.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("tag" + (i % interval))); assertThat(bucket.getDocCount(), greaterThan(0l)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); if (sum.value() > maxValue) { maxValue = sum.value(); maxKeys = new ArrayList<>(); maxKeys.add(bucket.getKeyAsString()); } else if (sum.value() == maxValue) { maxKeys.add(bucket.getKeyAsString()); } } InternalBucketMetricValue maxBucketValue = response.getAggregations().get("max_bucket"); assertThat(maxBucketValue, notNullValue()); assertThat(maxBucketValue.getName(), equalTo("max_bucket")); assertThat(maxBucketValue.value(), equalTo(maxValue)); assertThat(maxBucketValue.keys(), equalTo(maxKeys.toArray(new String[maxKeys.size()]))); }
public void testInlineScriptNamedVars() { Map<String, String> bucketPathsMap = new HashMap<>(); bucketPathsMap.put("my_value1", "field2Sum"); bucketPathsMap.put("my_value2", "field3Sum"); SearchResponse response = client() .prepareSearch("idx") .addAggregation( histogram("histo") .field(FIELD_1_NAME) .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( bucketSelector( "bucketSelector", bucketPathsMap, new Script( "Double.isNaN(my_value1) ? false : (my_value1 + my_value2 > 100)", ScriptType.INLINE, null, null)))) .execute() .actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); for (int i = 0; i < buckets.size(); ++i) { Histogram.Bucket bucket = buckets.get(i); Sum field2Sum = bucket.getAggregations().get("field2Sum"); assertThat(field2Sum, notNullValue()); double field2SumValue = field2Sum.getValue(); Sum field3Sum = bucket.getAggregations().get("field3Sum"); assertThat(field3Sum, notNullValue()); double field3SumValue = field3Sum.getValue(); assertThat(field2SumValue + field3SumValue, greaterThan(100.0)); } }
public void testPartiallyUnmapped() throws Exception { SearchResponse response = client() .prepareSearch("idx", "idx_unmapped") .addAggregation( histogram("histo") .field(FIELD_1_NAME) .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( bucketSelector( "bucketSelector", new Script( "Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", ScriptType.INLINE, null, null), "field2Sum", "field3Sum"))) .execute() .actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); for (int i = 0; i < buckets.size(); ++i) { Histogram.Bucket bucket = buckets.get(i); Sum field2Sum = bucket.getAggregations().get("field2Sum"); assertThat(field2Sum, notNullValue()); double field2SumValue = field2Sum.getValue(); Sum field3Sum = bucket.getAggregations().get("field3Sum"); assertThat(field3Sum, notNullValue()); double field3SumValue = field3Sum.getValue(); assertThat(field2SumValue + field3SumValue, greaterThan(100.0)); } }
public void testNestNestedAggs() throws Exception { SearchResponse response = client() .prepareSearch("idx_nested_nested_aggs") .addAggregation( nested("level1", "nested1") .subAggregation( terms("a") .field("nested1.a") .collectMode(aggCollectionMode) .subAggregation( nested("level2", "nested1.nested2") .subAggregation(sum("sum").field("nested1.nested2.b"))))) .get(); assertSearchResponse(response); Nested level1 = response.getAggregations().get("level1"); assertThat(level1, notNullValue()); assertThat(level1.getName(), equalTo("level1")); assertThat(level1.getDocCount(), equalTo(2L)); StringTerms a = level1.getAggregations().get("a"); Terms.Bucket bBucket = a.getBucketByKey("a"); assertThat(bBucket.getDocCount(), equalTo(1L)); Nested level2 = bBucket.getAggregations().get("level2"); assertThat(level2.getDocCount(), equalTo(1L)); Sum sum = level2.getAggregations().get("sum"); assertThat(sum.getValue(), equalTo(2d)); a = level1.getAggregations().get("a"); bBucket = a.getBucketByKey("b"); assertThat(bBucket.getDocCount(), equalTo(1L)); level2 = bBucket.getAggregations().get("level2"); assertThat(level2.getDocCount(), equalTo(1L)); sum = level2.getAggregations().get("sum"); assertThat(sum.getValue(), equalTo(2d)); }
public void testInlineScriptInsertZeros() { SearchResponse response = client() .prepareSearch("idx") .addAggregation( histogram("histo") .field(FIELD_1_NAME) .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( bucketSelector( "bucketSelector", new Script( "_value0 + _value1 > 100", ScriptType.INLINE, null, null), "field2Sum", "field3Sum") .gapPolicy(GapPolicy.INSERT_ZEROS))) .execute() .actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); for (int i = 0; i < buckets.size(); ++i) { Histogram.Bucket bucket = buckets.get(i); Sum field2Sum = bucket.getAggregations().get("field2Sum"); assertThat(field2Sum, notNullValue()); double field2SumValue = field2Sum.getValue(); Sum field3Sum = bucket.getAggregations().get("field3Sum"); assertThat(field3Sum, notNullValue()); double field3SumValue = field3Sum.getValue(); assertThat(field2SumValue + field3SumValue, greaterThan(100.0)); } }
public void testSingleValuedFieldWithSubAggregation() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( dateHistogram("histo") .field("date") .dateHistogramInterval(DateHistogramInterval.MONTH) .minDocCount(0) .subAggregation(sum("sum").field("value")) .subAggregation(derivative("deriv", "sum"))) .execute() .actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(3)); Object[] propertiesKeys = (Object[]) histo.getProperty("_key"); Object[] propertiesDocCounts = (Object[]) histo.getProperty("_count"); Object[] propertiesCounts = (Object[]) histo.getProperty("sum.value"); DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC); Histogram.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat((DateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); assertThat(sum.getValue(), equalTo(1.0)); SimpleValue deriv = bucket.getAggregations().get("deriv"); assertThat(deriv, nullValue()); assertThat((DateTime) propertiesKeys[0], equalTo(key)); assertThat((long) propertiesDocCounts[0], equalTo(1L)); assertThat((double) propertiesCounts[0], equalTo(1.0)); key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((DateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(2L)); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); assertThat(sum.getValue(), equalTo(5.0)); deriv = bucket.getAggregations().get("deriv"); assertThat(deriv, notNullValue()); assertThat(deriv.value(), equalTo(4.0)); assertThat( (double) bucket.getProperty( "histo", AggregationPath.parse("deriv.value").getPathElementsAsStringList()), equalTo(4.0)); assertThat((DateTime) propertiesKeys[1], equalTo(key)); assertThat((long) propertiesDocCounts[1], equalTo(2L)); assertThat((double) propertiesCounts[1], equalTo(5.0)); key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((DateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(3L)); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); assertThat(sum.getValue(), equalTo(15.0)); deriv = bucket.getAggregations().get("deriv"); assertThat(deriv, notNullValue()); assertThat(deriv.value(), equalTo(10.0)); assertThat( (double) bucket.getProperty( "histo", AggregationPath.parse("deriv.value").getPathElementsAsStringList()), equalTo(10.0)); assertThat((DateTime) propertiesKeys[2], equalTo(key)); assertThat((long) propertiesDocCounts[2], equalTo(3L)); assertThat((double) propertiesCounts[2], equalTo(15.0)); }
@Test public void testMetric_asSubAggWithInsertZeros() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( terms("terms") .field("tag") .order(Order.term(true)) .subAggregation( histogram("histo") .field(SINGLE_VALUED_FIELD_NAME) .interval(interval) .extendedBounds((long) minRandomValue, (long) maxRandomValue) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .subAggregation( maxBucket("max_bucket") .setBucketsPaths("histo>sum") .gapPolicy(GapPolicy.INSERT_ZEROS))) .execute() .actionGet(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); List<Terms.Bucket> termsBuckets = terms.getBuckets(); assertThat(termsBuckets.size(), equalTo(interval)); for (int i = 0; i < interval; ++i) { Terms.Bucket termsBucket = termsBuckets.get(i); assertThat(termsBucket, notNullValue()); assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval))); Histogram histo = termsBucket.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); List<String> maxKeys = new ArrayList<>(); double maxValue = Double.NEGATIVE_INFINITY; for (int j = 0; j < numValueBuckets; ++j) { Histogram.Bucket bucket = buckets.get(j); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); if (sum.value() > maxValue) { maxValue = sum.value(); maxKeys = new ArrayList<>(); maxKeys.add(bucket.getKeyAsString()); } else if (sum.value() == maxValue) { maxKeys.add(bucket.getKeyAsString()); } } InternalBucketMetricValue maxBucketValue = termsBucket.getAggregations().get("max_bucket"); assertThat(maxBucketValue, notNullValue()); assertThat(maxBucketValue.getName(), equalTo("max_bucket")); assertThat(maxBucketValue.value(), equalTo(maxValue)); assertThat(maxBucketValue.keys(), equalTo(maxKeys.toArray(new String[maxKeys.size()]))); } }
@Test public void sumTest() throws IOException, SqlParseException, SQLFeatureNotSupportedException { Aggregations result = query(String.format("SELECT SUM(balance) FROM %s/account", TEST_INDEX)); Sum sum = result.get("SUM(balance)"); assertThat(sum.getValue(), equalTo(25714837.0)); }