@Test public void stringTermsNestedIntoPerBucketAggregator() throws Exception { // no execution hint so that the logic that decides whether or not to use ordinals is executed SearchResponse response = client() .prepareSearch("idx") .setTypes("type") .addAggregation( filter("filter") .filter(termFilter(MULTI_VALUED_FIELD_NAME, "val3")) .subAggregation(terms("terms").field(MULTI_VALUED_FIELD_NAME))) .execute() .actionGet(); assertThat(response.getFailedShards(), equalTo(0)); Filter filter = response.getAggregations().get("filter"); Terms terms = filter.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(3)); for (int i = 2; i <= 4; i++) { Terms.Bucket bucket = terms.getBucketByKey("val" + i); assertThat(bucket, notNullValue()); assertThat(key(bucket), equalTo("val" + i)); assertThat(bucket.getDocCount(), equalTo(i == 3 ? 2L : 1L)); } }
@Test public void testMetric_asSubAggOfSingleBucketAgg() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( filter("filter") .filter(termQuery("tag", "tag0")) .subAggregation( histogram("histo") .field(SINGLE_VALUED_FIELD_NAME) .interval(interval) .extendedBounds((long) minRandomValue, (long) maxRandomValue) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .subAggregation(maxBucket("max_bucket").setBucketsPaths("histo>sum"))) .execute() .actionGet(); assertSearchResponse(response); Filter filter = response.getAggregations().get("filter"); assertThat(filter, notNullValue()); assertThat(filter.getName(), equalTo("filter")); Histogram histo = filter.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); List<String> maxKeys = new ArrayList<>(); double maxValue = Double.NEGATIVE_INFINITY; for (int j = 0; j < numValueBuckets; ++j) { Histogram.Bucket bucket = buckets.get(j); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval)); if (bucket.getDocCount() != 0) { Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); if (sum.value() > maxValue) { maxValue = sum.value(); maxKeys = new ArrayList<>(); maxKeys.add(bucket.getKeyAsString()); } else if (sum.value() == maxValue) { maxKeys.add(bucket.getKeyAsString()); } } } InternalBucketMetricValue maxBucketValue = filter.getAggregations().get("max_bucket"); assertThat(maxBucketValue, notNullValue()); assertThat(maxBucketValue.getName(), equalTo("max_bucket")); assertThat(maxBucketValue.value(), equalTo(maxValue)); assertThat(maxBucketValue.keys(), equalTo(maxKeys.toArray(new String[maxKeys.size()]))); }
// https://github.com/elasticsearch/elasticsearch/issues/6435 public void testReduce() throws Exception { createIndex("idx"); final int value = randomIntBetween(0, 10); indexRandom(true, client().prepareIndex("idx", "type").setSource("f", value)); ensureYellow("idx"); // only one document let's make sure all shards have an active primary SearchResponse response = client() .prepareSearch("idx") .addAggregation( filter("filter") .filter(FilterBuilders.matchAllFilter()) .subAggregation( range("range") .field("f") .addUnboundedTo(6) .addUnboundedFrom(6) .subAggregation(sum("sum").field("f")))) .execute() .actionGet(); assertSearchResponse(response); Filter filter = response.getAggregations().get("filter"); assertNotNull(filter); assertEquals(1, filter.getDocCount()); Range range = filter.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); assertThat(range.getBuckets().size(), equalTo(2)); Range.Bucket bucket = range.getBucketByKey("*-6.0"); assertThat(bucket, notNullValue()); assertThat(bucket.getKey(), equalTo("*-6.0")); assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(bucket.getTo().doubleValue(), equalTo(6.0)); assertThat(bucket.getDocCount(), equalTo(value < 6 ? 1L : 0L)); Sum sum = bucket.getAggregations().get("sum"); assertEquals(value < 6 ? value : 0, sum.getValue(), 0d); bucket = range.getBucketByKey("6.0-*"); assertThat(bucket, notNullValue()); assertThat(bucket.getKey(), equalTo("6.0-*")); assertThat(bucket.getFrom().doubleValue(), equalTo(6.0)); assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getDocCount(), equalTo(value >= 6 ? 1L : 0L)); sum = bucket.getAggregations().get("sum"); assertEquals(value >= 6 ? value : 0, sum.getValue(), 0d); }
@Override public void testOrderByEmptyAggregation() throws Exception { SearchResponse searchResponse = client() .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( terms("terms") .field("value") .order( Terms.Order.compound( Terms.Order.aggregation("filter>percentiles.99", true))) .subAggregation( filter("filter", termQuery("value", 100)) .subAggregation( percentiles("percentiles") .method(PercentilesMethod.TDIGEST) .field("value")))) .get(); assertHitCount(searchResponse, 10); Terms terms = searchResponse.getAggregations().get("terms"); assertThat(terms, notNullValue()); List<Terms.Bucket> buckets = terms.getBuckets(); assertThat(buckets, notNullValue()); assertThat(buckets.size(), equalTo(10)); for (int i = 0; i < 10; i++) { Terms.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsNumber(), equalTo((long) i + 1)); assertThat(bucket.getDocCount(), equalTo(1L)); Filter filter = bucket.getAggregations().get("filter"); assertThat(filter, notNullValue()); assertThat(filter.getDocCount(), equalTo(0L)); Percentiles percentiles = filter.getAggregations().get("percentiles"); assertThat(percentiles, notNullValue()); assertThat(percentiles.percentile(99), equalTo(Double.NaN)); } }
@Test public void singleValuedField_OrderedBySingleBucketSubAggregationAsc() throws Exception { boolean asc = randomBoolean(); SearchResponse response = client() .prepareSearch("idx") .setTypes("type") .addAggregation( terms("tags") .executionHint(randomExecutionHint()) .field("tag") .order(Terms.Order.aggregation("filter", asc)) .subAggregation(filter("filter").filter(FilterBuilders.matchAllFilter()))) .execute() .actionGet(); assertSearchResponse(response); Terms tags = response.getAggregations().get("tags"); assertThat(tags, notNullValue()); assertThat(tags.getName(), equalTo("tags")); assertThat(tags.getBuckets().size(), equalTo(2)); Iterator<Terms.Bucket> iters = tags.getBuckets().iterator(); Terms.Bucket tag = iters.next(); assertThat(tag, notNullValue()); assertThat(key(tag), equalTo(asc ? "less" : "more")); assertThat(tag.getDocCount(), equalTo(asc ? 2l : 3l)); Filter filter = tag.getAggregations().get("filter"); assertThat(filter, notNullValue()); assertThat(filter.getDocCount(), equalTo(asc ? 2l : 3l)); tag = iters.next(); assertThat(tag, notNullValue()); assertThat(key(tag), equalTo(asc ? "more" : "less")); assertThat(tag.getDocCount(), equalTo(asc ? 3l : 2l)); filter = tag.getAggregations().get("filter"); assertThat(filter, notNullValue()); assertThat(filter.getDocCount(), equalTo(asc ? 3l : 2l)); }
@Override public void testOrderByEmptyAggregation() throws Exception { SearchResponse searchResponse = client() .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( terms("terms") .field("value") .order(Order.compound(Order.aggregation("filter>stats.avg", true))) .subAggregation( filter("filter", termQuery("value", 100)) .subAggregation(stats("stats").field("value")))) .get(); assertHitCount(searchResponse, 10); Terms terms = searchResponse.getAggregations().get("terms"); assertThat(terms, notNullValue()); List<Terms.Bucket> buckets = terms.getBuckets(); assertThat(buckets, notNullValue()); assertThat(buckets.size(), equalTo(10)); for (int i = 0; i < 10; i++) { Terms.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsNumber(), equalTo((long) i + 1)); assertThat(bucket.getDocCount(), equalTo(1L)); Filter filter = bucket.getAggregations().get("filter"); assertThat(filter, notNullValue()); assertThat(filter.getDocCount(), equalTo(0L)); Stats stats = filter.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getMin(), equalTo(Double.POSITIVE_INFINITY)); assertThat(stats.getMax(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(stats.getAvg(), equalTo(Double.NaN)); assertThat(stats.getSum(), equalTo(0.0)); assertThat(stats.getCount(), equalTo(0L)); } }
// Test based on: https://github.com/elastic/elasticsearch/issues/9280 public void testParentFilterResolvedCorrectly() throws Exception { XContentBuilder mapping = jsonBuilder() .startObject() .startObject("provider") .startObject("properties") .startObject("comments") .field("type", "nested") .startObject("properties") .startObject("cid") .field("type", "long") .endObject() .startObject("identifier") .field("type", "keyword") .endObject() .startObject("tags") .field("type", "nested") .startObject("properties") .startObject("tid") .field("type", "long") .endObject() .startObject("name") .field("type", "keyword") .endObject() .endObject() .endObject() .endObject() .endObject() .startObject("dates") .field("type", "object") .startObject("properties") .startObject("day") .field("type", "date") .field("format", "dateOptionalTime") .endObject() .startObject("month") .field("type", "object") .startObject("properties") .startObject("end") .field("type", "date") .field("format", "dateOptionalTime") .endObject() .startObject("start") .field("type", "date") .field("format", "dateOptionalTime") .endObject() .startObject("label") .field("type", "keyword") .endObject() .endObject() .endObject() .endObject() .endObject() .endObject() .endObject() .endObject(); assertAcked( prepareCreate("idx2") .setSettings( Settings.builder() .put(SETTING_NUMBER_OF_SHARDS, 1) .put(SETTING_NUMBER_OF_REPLICAS, 0)) .addMapping("provider", mapping)); ensureGreen("idx2"); List<IndexRequestBuilder> indexRequests = new ArrayList<>(2); indexRequests.add( client() .prepareIndex("idx2", "provider", "1") .setSource( "{\"dates\": {\"month\": {\"label\": \"2014-11\", \"end\": \"2014-11-30\", \"start\": \"2014-11-01\"}, \"day\": \"2014-11-30\"}, \"comments\": [{\"cid\": 3,\"identifier\": \"29111\"}, {\"cid\": 4,\"tags\": [{\"tid\" :44,\"name\": \"Roles\"}], \"identifier\": \"29101\"}]}")); indexRequests.add( client() .prepareIndex("idx2", "provider", "2") .setSource( "{\"dates\": {\"month\": {\"label\": \"2014-12\", \"end\": \"2014-12-31\", \"start\": \"2014-12-01\"}, \"day\": \"2014-12-03\"}, \"comments\": [{\"cid\": 1, \"identifier\": \"29111\"}, {\"cid\": 2,\"tags\": [{\"tid\" : 22, \"name\": \"DataChannels\"}], \"identifier\": \"29101\"}]}")); indexRandom(true, indexRequests); SearchResponse response = client() .prepareSearch("idx2") .setTypes("provider") .addAggregation( terms("startDate") .field("dates.month.start") .subAggregation( terms("endDate") .field("dates.month.end") .subAggregation( terms("period") .field("dates.month.label") .subAggregation( nested("ctxt_idfier_nested", "comments") .subAggregation( filter( "comment_filter", termQuery("comments.identifier", "29111")) .subAggregation( nested("nested_tags", "comments.tags") .subAggregation( terms("tag") .field( "comments.tags.name")))))))) .get(); assertNoFailures(response); assertHitCount(response, 2); Terms startDate = response.getAggregations().get("startDate"); assertThat(startDate.getBuckets().size(), equalTo(2)); Terms.Bucket bucket = startDate.getBucketByKey("2014-11-01T00:00:00.000Z"); assertThat(bucket.getDocCount(), equalTo(1L)); Terms endDate = bucket.getAggregations().get("endDate"); bucket = endDate.getBucketByKey("2014-11-30T00:00:00.000Z"); assertThat(bucket.getDocCount(), equalTo(1L)); Terms period = bucket.getAggregations().get("period"); bucket = period.getBucketByKey("2014-11"); assertThat(bucket.getDocCount(), equalTo(1L)); Nested comments = bucket.getAggregations().get("ctxt_idfier_nested"); assertThat(comments.getDocCount(), equalTo(2L)); Filter filter = comments.getAggregations().get("comment_filter"); assertThat(filter.getDocCount(), equalTo(1L)); Nested nestedTags = filter.getAggregations().get("nested_tags"); assertThat(nestedTags.getDocCount(), equalTo(0L)); // This must be 0 Terms tags = nestedTags.getAggregations().get("tag"); assertThat(tags.getBuckets().size(), equalTo(0)); // and this must be empty bucket = startDate.getBucketByKey("2014-12-01T00:00:00.000Z"); assertThat(bucket.getDocCount(), equalTo(1L)); endDate = bucket.getAggregations().get("endDate"); bucket = endDate.getBucketByKey("2014-12-31T00:00:00.000Z"); assertThat(bucket.getDocCount(), equalTo(1L)); period = bucket.getAggregations().get("period"); bucket = period.getBucketByKey("2014-12"); assertThat(bucket.getDocCount(), equalTo(1L)); comments = bucket.getAggregations().get("ctxt_idfier_nested"); assertThat(comments.getDocCount(), equalTo(2L)); filter = comments.getAggregations().get("comment_filter"); assertThat(filter.getDocCount(), equalTo(1L)); nestedTags = filter.getAggregations().get("nested_tags"); assertThat(nestedTags.getDocCount(), equalTo(0L)); // This must be 0 tags = nestedTags.getAggregations().get("tag"); assertThat(tags.getBuckets().size(), equalTo(0)); // and this must be empty }
@Test public void singleValuedField_OrderedBySubAggregationAsc_MultiHierarchyLevels() throws Exception { boolean asc = randomBoolean(); SearchResponse response = client() .prepareSearch("idx") .setTypes("type") .addAggregation( terms("tags") .executionHint(randomExecutionHint()) .field("tag") .order(Terms.Order.aggregation("filter1>filter2>stats.max", asc)) .subAggregation( filter("filter1") .filter(FilterBuilders.matchAllFilter()) .subAggregation( filter("filter2") .filter(FilterBuilders.matchAllFilter()) .subAggregation(stats("stats").field("i"))))) .execute() .actionGet(); assertSearchResponse(response); Terms tags = response.getAggregations().get("tags"); assertThat(tags, notNullValue()); assertThat(tags.getName(), equalTo("tags")); assertThat(tags.getBuckets().size(), equalTo(2)); Iterator<Terms.Bucket> iters = tags.getBuckets().iterator(); // the max for "more" is 2 // the max for "less" is 4 Terms.Bucket tag = iters.next(); assertThat(tag, notNullValue()); assertThat(key(tag), equalTo(asc ? "more" : "less")); assertThat(tag.getDocCount(), equalTo(asc ? 3l : 2l)); Filter filter1 = tag.getAggregations().get("filter1"); assertThat(filter1, notNullValue()); assertThat(filter1.getDocCount(), equalTo(asc ? 3l : 2l)); Filter filter2 = filter1.getAggregations().get("filter2"); assertThat(filter2, notNullValue()); assertThat(filter2.getDocCount(), equalTo(asc ? 3l : 2l)); Stats stats = filter2.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getMax(), equalTo(asc ? 2.0 : 4.0)); tag = iters.next(); assertThat(tag, notNullValue()); assertThat(key(tag), equalTo(asc ? "less" : "more")); assertThat(tag.getDocCount(), equalTo(asc ? 2l : 3l)); filter1 = tag.getAggregations().get("filter1"); assertThat(filter1, notNullValue()); assertThat(filter1.getDocCount(), equalTo(asc ? 2l : 3l)); filter2 = filter1.getAggregations().get("filter2"); assertThat(filter2, notNullValue()); assertThat(filter2.getDocCount(), equalTo(asc ? 2l : 3l)); stats = filter2.getAggregations().get("stats"); assertThat(stats, notNullValue()); assertThat(stats.getMax(), equalTo(asc ? 4.0 : 2.0)); }
// Make sure that unordered, reversed, disjoint and/or overlapping ranges are supported // Duel with filters public void testRandomRanges() throws Exception { final int numDocs = scaledRandomIntBetween(500, 5000); final double[][] docs = new double[numDocs][]; for (int i = 0; i < numDocs; ++i) { final int numValues = randomInt(5); docs[i] = new double[numValues]; for (int j = 0; j < numValues; ++j) { docs[i][j] = randomDouble() * 100; } } createIndex("idx"); for (int i = 0; i < docs.length; ++i) { XContentBuilder source = jsonBuilder().startObject().startArray("values"); for (int j = 0; j < docs[i].length; ++j) { source = source.value(docs[i][j]); } source = source.endArray().endObject(); client().prepareIndex("idx", "type").setSource(source).execute().actionGet(); } assertNoFailures( client() .admin() .indices() .prepareRefresh("idx") .setIndicesOptions(IndicesOptions.lenientExpandOpen()) .execute() .get()); final int numRanges = randomIntBetween(1, 20); final double[][] ranges = new double[numRanges][]; for (int i = 0; i < ranges.length; ++i) { switch (randomInt(2)) { case 0: ranges[i] = new double[] {Double.NEGATIVE_INFINITY, randomInt(100)}; break; case 1: ranges[i] = new double[] {randomInt(100), Double.POSITIVE_INFINITY}; break; case 2: ranges[i] = new double[] {randomInt(100), randomInt(100)}; break; default: throw new AssertionError(); } } RangeBuilder query = range("range").field("values"); for (int i = 0; i < ranges.length; ++i) { String key = Integer.toString(i); if (ranges[i][0] == Double.NEGATIVE_INFINITY) { query.addUnboundedTo(key, ranges[i][1]); } else if (ranges[i][1] == Double.POSITIVE_INFINITY) { query.addUnboundedFrom(key, ranges[i][0]); } else { query.addRange(key, ranges[i][0], ranges[i][1]); } } SearchRequestBuilder reqBuilder = client().prepareSearch("idx").addAggregation(query); for (int i = 0; i < ranges.length; ++i) { RangeFilterBuilder filter = FilterBuilders.rangeFilter("values"); if (ranges[i][0] != Double.NEGATIVE_INFINITY) { filter = filter.from(ranges[i][0]); } if (ranges[i][1] != Double.POSITIVE_INFINITY) { filter = filter.to(ranges[i][1]); } reqBuilder = reqBuilder.addAggregation(filter("filter" + i).filter(filter)); } SearchResponse resp = reqBuilder.execute().actionGet(); Range range = resp.getAggregations().get("range"); for (int i = 0; i < ranges.length; ++i) { long count = 0; for (double[] values : docs) { for (double value : values) { if (value >= ranges[i][0] && value < ranges[i][1]) { ++count; break; } } } final Range.Bucket bucket = range.getBucketByKey(Integer.toString(i)); assertEquals(bucket.getKey(), count, bucket.getDocCount()); final Filter filter = resp.getAggregations().get("filter" + i); assertThat(filter.getDocCount(), equalTo(count)); } }