@Test public void singleValueField_OrderedByTermDesc() throws Exception { SearchResponse response = client() .prepareSearch("idx") .setTypes("type") .addAggregation( terms("terms") .executionHint(randomExecutionHint()) .field(SINGLE_VALUED_FIELD_NAME) .order(Terms.Order.term(false))) .execute() .actionGet(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(5)); int i = 4; for (Terms.Bucket bucket : terms.getBuckets()) { assertThat(bucket, notNullValue()); assertThat(key(bucket), equalTo("val" + i)); assertThat(bucket.getDocCount(), equalTo(1l)); i--; } }
@Test public void testSimpleSubAggregations() throws Exception { final String query = String.format( "SELECT /*! DOCS_WITH_AGGREGATION(10) */ * FROM %s/account GROUP BY (gender), (state) ", TEST_INDEX); SqlElasticSearchRequestBuilder select = getSearchRequestBuilder(query); SearchResponse response = (SearchResponse) select.get(); Aggregations result = response.getAggregations(); Terms gender = result.get("gender"); for (Terms.Bucket genderBucket : gender.getBuckets()) { String genderKey = genderBucket.getKey(); Assert.assertTrue("Gender should be m or f", genderKey.equals("m") || genderKey.equals("f")); } Assert.assertEquals(2, gender.getBuckets().size()); Terms state = result.get("state"); for (Terms.Bucket stateBucket : state.getBuckets()) { if (stateBucket.getKey().equalsIgnoreCase("ak")) { Assert.assertTrue("There are 22 entries for state ak", stateBucket.getDocCount() == 22); } } Assert.assertEquals(response.getHits().totalHits(), 1000); Assert.assertEquals(response.getHits().hits().length, 10); }
@Test public void singleValuedField_OrderedBySingleValueSubAggregationAsc() throws Exception { boolean asc = true; SearchResponse response = client() .prepareSearch("idx") .setTypes("type") .addAggregation( terms("terms") .executionHint(randomExecutionHint()) .field(SINGLE_VALUED_FIELD_NAME) .order(Terms.Order.aggregation("avg_i", asc)) .subAggregation(avg("avg_i").field("i"))) .execute() .actionGet(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(5)); int i = 0; for (Terms.Bucket bucket : terms.getBuckets()) { assertThat(bucket, notNullValue()); assertThat(key(bucket), equalTo("val" + i)); assertThat(bucket.getDocCount(), equalTo(1l)); Avg avg = bucket.getAggregations().get("avg_i"); assertThat(avg, notNullValue()); assertThat(avg.getValue(), equalTo((double) i)); i++; } }
@Test public void testFromSizeWithAggregations() throws Exception { final String query1 = String.format( "SELECT /*! DOCS_WITH_AGGREGATION(0,1) */" + " account_number FROM %s/account GROUP BY gender", TEST_INDEX); SearchResponse response1 = (SearchResponse) getSearchRequestBuilder(query1).get(); Assert.assertEquals(1, response1.getHits().getHits().length); Terms gender1 = response1.getAggregations().get("gender"); Assert.assertEquals(2, gender1.getBuckets().size()); Object account1 = response1.getHits().getHits()[0].getSource().get("account_number"); final String query2 = String.format( "SELECT /*! DOCS_WITH_AGGREGATION(1,1) */" + " account_number FROM %s/account GROUP BY gender", TEST_INDEX); SearchResponse response2 = (SearchResponse) getSearchRequestBuilder(query2).get(); Assert.assertEquals(1, response2.getHits().getHits().length); Terms gender2 = response2.getAggregations().get("gender"); Assert.assertEquals(2, gender2.getBuckets().size()); Object account2 = response2.getHits().getHits()[0].getSource().get("account_number"); Assert.assertEquals(response1.getHits().getTotalHits(), response2.getHits().getTotalHits()); Assert.assertNotEquals(account1, account2); }
@Test public void reverseAnotherNestedGroupByOnNestedFieldWithFilterTestWithReverseNestedNoPath() throws Exception { Aggregations result = query( String.format( "SELECT COUNT(*) FROM %s/nestedType GROUP BY nested(message.info),filter('myFilter',message.info = 'a'),reverse_nested(comment.data,'~comment')", TEST_INDEX)); InternalNested nested = result.get("message.info@NESTED"); InternalFilter filter = nested.getAggregations().get("myFilter@FILTER"); Terms infos = filter.getAggregations().get("message.info"); Assert.assertEquals(1, infos.getBuckets().size()); for (Terms.Bucket bucket : infos.getBuckets()) { InternalReverseNested reverseNested = bucket.getAggregations().get("comment.data@NESTED_REVERSED"); InternalNested innerNested = reverseNested.getAggregations().get("comment.data@NESTED"); Terms terms = innerNested.getAggregations().get("comment.data"); Terms.Bucket internalBucket = terms.getBuckets().get(0); long count = ((ValueCount) internalBucket.getAggregations().get("COUNT(*)")).getValue(); String key = internalBucket.getKey(); if (key.equalsIgnoreCase("ab")) { Assert.assertEquals(2, count); } else { throw new Exception(String.format("Unexpected key. expected: only a . found: %s", key)); } } }
// Duel between histograms and scripted terms public void testDuelTermsHistogram() throws Exception { createIndex("idx"); final int numDocs = scaledRandomIntBetween(500, 5000); final int maxNumTerms = randomIntBetween(10, 2000); final int interval = randomIntBetween(1, 100); final Integer[] values = new Integer[maxNumTerms]; for (int i = 0; i < values.length; ++i) { values[i] = randomInt(maxNumTerms * 3) - maxNumTerms; } for (int i = 0; i < numDocs; ++i) { XContentBuilder source = jsonBuilder().startObject().field("num", randomDouble()).startArray("values"); final int numValues = randomInt(4); for (int j = 0; j < numValues; ++j) { source = source.value(randomFrom(values)); } source = source.endArray().endObject(); client().prepareIndex("idx", "type").setSource(source).execute().actionGet(); } assertNoFailures( client() .admin() .indices() .prepareRefresh("idx") .setIndicesOptions(IndicesOptions.lenientExpandOpen()) .execute() .get()); SearchResponse resp = client() .prepareSearch("idx") .addAggregation( terms("terms") .field("values") .collectMode(randomFrom(SubAggCollectionMode.values())) .script("floor(_value / interval)") .param("interval", interval) .size(maxNumTerms)) .addAggregation(histogram("histo").field("values").interval(interval)) .execute() .actionGet(); assertSearchResponse(resp); Terms terms = resp.getAggregations().get("terms"); assertThat(terms, notNullValue()); Histogram histo = resp.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(terms.getBuckets().size(), equalTo(histo.getBuckets().size())); for (Terms.Bucket bucket : terms.getBuckets()) { final long key = bucket.getKeyAsNumber().longValue() * interval; final Histogram.Bucket histoBucket = histo.getBucketByKey(key); assertEquals(bucket.getDocCount(), histoBucket.getDocCount()); } }
@Test public void testChildrenAggs() throws Exception { SearchResponse searchResponse = client() .prepareSearch("test") .setQuery(matchQuery("randomized", true)) .addAggregation( terms("category") .field("category") .size(0) .subAggregation( children("to_comment") .childType("comment") .subAggregation( terms("commenters") .field("commenter") .size(0) .subAggregation(topHits("top_comments"))))) .get(); assertSearchResponse(searchResponse); Terms categoryTerms = searchResponse.getAggregations().get("category"); assertThat(categoryTerms.getBuckets().size(), equalTo(categoryToControl.size())); for (Map.Entry<String, Control> entry1 : categoryToControl.entrySet()) { Terms.Bucket categoryBucket = categoryTerms.getBucketByKey(entry1.getKey()); assertThat(categoryBucket.getKey(), equalTo(entry1.getKey())); assertThat(categoryBucket.getDocCount(), equalTo((long) entry1.getValue().articleIds.size())); Children childrenBucket = categoryBucket.getAggregations().get("to_comment"); assertThat(childrenBucket.getName(), equalTo("to_comment")); assertThat(childrenBucket.getDocCount(), equalTo((long) entry1.getValue().commentIds.size())); assertThat( (long) childrenBucket.getProperty("_count"), equalTo((long) entry1.getValue().commentIds.size())); Terms commentersTerms = childrenBucket.getAggregations().get("commenters"); assertThat((Terms) childrenBucket.getProperty("commenters"), sameInstance(commentersTerms)); assertThat( commentersTerms.getBuckets().size(), equalTo(entry1.getValue().commenterToCommentId.size())); for (Map.Entry<String, Set<String>> entry2 : entry1.getValue().commenterToCommentId.entrySet()) { Terms.Bucket commentBucket = commentersTerms.getBucketByKey(entry2.getKey()); assertThat(commentBucket.getKey(), equalTo(entry2.getKey())); assertThat(commentBucket.getDocCount(), equalTo((long) entry2.getValue().size())); TopHits topHits = commentBucket.getAggregations().get("top_comments"); for (SearchHit searchHit : topHits.getHits().getHits()) { assertThat(entry2.getValue().contains(searchHit.getId()), is(true)); } } } }
public void run() { Client client = null; TransportClient transportClient = null; try { transportClient = new TransportClient(); client = transportClient.addTransportAddress(new InetSocketTransportAddress("192.168.1.40", 9300)); SearchResponse response = client .prepareSearch("tms-allflat") .setTypes("personal") .setQuery(QueryBuilders.matchAllQuery()) .addAggregation( AggregationBuilders.terms("aggs1") .field("skill_1") .size(20) .order(Terms.Order.count(false))) .execute() .actionGet(); Terms terms = response.getAggregations().get("aggs1"); terms .getBuckets() .stream() .forEach(s -> System.out.println(s.getKeyAsText() + "(" + s.getDocCount() + ")")); } finally { transportClient.close(); client.close(); } }
@Test public void partiallyUnmapped() throws Exception { SearchResponse response = client() .prepareSearch("idx", "idx_unmapped") .setTypes("type") .addAggregation( terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME)) .execute() .actionGet(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(5)); for (int i = 0; i < 5; i++) { Terms.Bucket bucket = terms.getBucketByKey("val" + i); assertThat(bucket, notNullValue()); assertThat(key(bucket), equalTo("val" + i)); assertThat(bucket.getDocCount(), equalTo(1l)); } }
@Test public void singleValueField_WithMaxSize() throws Exception { SearchResponse response = client() .prepareSearch("idx") .setTypes("high_card_type") .addAggregation( terms("terms") .executionHint(randomExecutionHint()) .field(SINGLE_VALUED_FIELD_NAME) .size(20) .order( Terms.Order.term( true))) // we need to sort by terms cause we're checking the first 20 // values .execute() .actionGet(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(20)); for (int i = 0; i < 20; i++) { Terms.Bucket bucket = terms.getBucketByKey("val" + Strings.padStart(i + "", 3, '0')); assertThat(bucket, notNullValue()); assertThat(key(bucket), equalTo("val" + Strings.padStart(i + "", 3, '0'))); assertThat(bucket.getDocCount(), equalTo(1l)); } }
@Test public void stringTermsNestedIntoPerBucketAggregator() throws Exception { // no execution hint so that the logic that decides whether or not to use ordinals is executed SearchResponse response = client() .prepareSearch("idx") .setTypes("type") .addAggregation( filter("filter") .filter(termFilter(MULTI_VALUED_FIELD_NAME, "val3")) .subAggregation(terms("terms").field(MULTI_VALUED_FIELD_NAME))) .execute() .actionGet(); assertThat(response.getFailedShards(), equalTo(0)); Filter filter = response.getAggregations().get("filter"); Terms terms = filter.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(3)); for (int i = 2; i <= 4; i++) { Terms.Bucket bucket = terms.getBucketByKey("val" + i); assertThat(bucket, notNullValue()); assertThat(key(bucket), equalTo("val" + i)); assertThat(bucket.getDocCount(), equalTo(i == 3 ? 2L : 1L)); } }
public Map<String, Long> countByField(IndexField indexField, FilterBuilder filter) { Map<String, Long> counts = new HashMap<>(); SearchRequestBuilder request = client .prepareSearch(this.getIndexName()) .setTypes(this.getIndexType()) .setQuery(QueryBuilders.filteredQuery(QueryBuilders.matchAllQuery(), filter)) .setSize(0) .addAggregation( AggregationBuilders.terms(indexField.field()) .field(indexField.field()) .order(Terms.Order.count(false)) .size(Integer.MAX_VALUE) .minDocCount(0)); SearchResponse response = request.get(); Terms values = response.getAggregations().get(indexField.field()); for (Terms.Bucket value : values.getBuckets()) { counts.put(value.getKey(), value.getDocCount()); } return counts; }
@Test public void multiValuedField_WithValueScript_NotUnique() throws Exception { SearchResponse response = client() .prepareSearch("idx") .setTypes("type") .addAggregation( terms("terms") .executionHint(randomExecutionHint()) .field(MULTI_VALUED_FIELD_NAME) .script("_value.substring(0,3)")) .execute() .actionGet(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(1)); Terms.Bucket bucket = terms.getBucketByKey("val"); assertThat(bucket, notNullValue()); assertThat(key(bucket), equalTo("val")); assertThat(bucket.getDocCount(), equalTo(5l)); }
/* * 统计每种兴趣下职工的平均年龄 * 聚合也允许分级汇总 */ @Test public void subAggregationTest() { Client client = ClientTemplate.getClient(); SearchResponse response = client .prepareSearch("fccs") .setTypes("employee") .addAggregation( AggregationBuilders.terms("by_interests") .field("interests") .subAggregation(AggregationBuilders.avg("avg_age").field("age"))) .get(); Terms terms = response.getAggregations().get("by_interests"); List<Bucket> buckets = terms.getBuckets(); for (Bucket bucket : buckets) { Avg avg = bucket.getAggregations().get("avg_age"); System.out.println( "key:" + bucket.getKey() + " doc_count:" + bucket.getDocCount() + " avg_age:" + avg.getValue()); } client.close(); }
@Test public void script_SingleValue_WithSubAggregator_Inherited() throws Exception { SearchResponse response = client() .prepareSearch("idx") .setTypes("type") .addAggregation( terms("terms") .executionHint(randomExecutionHint()) .script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value") .subAggregation(count("count"))) .execute() .actionGet(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(5)); for (int i = 0; i < 5; i++) { Terms.Bucket bucket = terms.getBucketByKey("val" + i); assertThat(bucket, notNullValue()); assertThat(key(bucket), equalTo("val" + i)); assertThat(bucket.getDocCount(), equalTo(1l)); ValueCount valueCount = bucket.getAggregations().get("count"); assertThat(valueCount, notNullValue()); assertThat(valueCount.getValue(), equalTo(1l)); } }
void assertBasicAggregationWorks(String indexName) { // histogram on a long SearchResponse searchRsp = client() .prepareSearch(indexName) .addAggregation(AggregationBuilders.histogram("histo").field("long_sort").interval(10)) .get(); ElasticsearchAssertions.assertSearchResponse(searchRsp); Histogram histo = searchRsp.getAggregations().get("histo"); assertNotNull(histo); long totalCount = 0; for (Histogram.Bucket bucket : histo.getBuckets()) { totalCount += bucket.getDocCount(); } assertEquals(totalCount, searchRsp.getHits().getTotalHits()); // terms on a boolean searchRsp = client() .prepareSearch(indexName) .addAggregation(AggregationBuilders.terms("bool_terms").field("bool")) .get(); Terms terms = searchRsp.getAggregations().get("bool_terms"); totalCount = 0; for (Terms.Bucket bucket : terms.getBuckets()) { totalCount += bucket.getDocCount(); } assertEquals(totalCount, searchRsp.getHits().getTotalHits()); }
@Test public void testNoBuckets() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( terms("terms") .field("tag") .exclude("tag.*") .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .addAggregation(maxBucket("max_bucket").setBucketsPaths("terms>sum")) .execute() .actionGet(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); List<Terms.Bucket> buckets = terms.getBuckets(); assertThat(buckets.size(), equalTo(0)); InternalBucketMetricValue maxBucketValue = response.getAggregations().get("max_bucket"); assertThat(maxBucketValue, notNullValue()); assertThat(maxBucketValue.getName(), equalTo("max_bucket")); assertThat(maxBucketValue.value(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(maxBucketValue.keys(), equalTo(new String[0])); }
@Test // the main purpose of this test is to make sure we're not allocating 2GB of memory per shard public void sizeIsZero() { final int minDocCount = randomInt(1); SearchResponse response = client() .prepareSearch("idx") .setTypes("high_card_type") .addAggregation( terms("terms") .executionHint(randomExecutionHint()) .field(SINGLE_VALUED_FIELD_NAME) .minDocCount(minDocCount) .size(0)) .execute() .actionGet(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat( terms.getBuckets().size(), equalTo(minDocCount == 0 ? 105 : 100)); // 105 because of the other type }
@Test public void script_MultiValued() throws Exception { SearchResponse response = client() .prepareSearch("idx") .setTypes("type") .addAggregation( terms("terms") .executionHint(randomExecutionHint()) .script("doc['" + MULTI_VALUED_FIELD_NAME + "'].values")) .execute() .actionGet(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); assertThat(terms.getBuckets().size(), equalTo(6)); for (int i = 0; i < 6; i++) { Terms.Bucket bucket = terms.getBucketByKey("val" + i); assertThat(bucket, notNullValue()); assertThat(key(bucket), equalTo("val" + i)); if (i == 0 || i == 5) { assertThat(bucket.getDocCount(), equalTo(1l)); } else { assertThat(bucket.getDocCount(), equalTo(2l)); } } }
public static LinkedHashMap<String, Long> termsToMap(Terms terms) { LinkedHashMap<String, Long> map = new LinkedHashMap<>(); List<Terms.Bucket> buckets = terms.getBuckets(); for (Terms.Bucket bucket : buckets) { map.put(bucket.getKeyAsString(), bucket.getDocCount()); } return map; }
@Test public void testDocCount_asSubAgg() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( terms("terms") .field("tag") .order(Order.term(true)) .subAggregation( histogram("histo") .field(SINGLE_VALUED_FIELD_NAME) .interval(interval) .extendedBounds((long) minRandomValue, (long) maxRandomValue)) .subAggregation(maxBucket("max_bucket").setBucketsPaths("histo>_count"))) .execute() .actionGet(); assertSearchResponse(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); List<Terms.Bucket> termsBuckets = terms.getBuckets(); assertThat(termsBuckets.size(), equalTo(interval)); for (int i = 0; i < interval; ++i) { Terms.Bucket termsBucket = termsBuckets.get(i); assertThat(termsBucket, notNullValue()); assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval))); Histogram histo = termsBucket.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); List<String> maxKeys = new ArrayList<>(); double maxValue = Double.NEGATIVE_INFINITY; for (int j = 0; j < numValueBuckets; ++j) { Histogram.Bucket bucket = buckets.get(j); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval)); if (bucket.getDocCount() > maxValue) { maxValue = bucket.getDocCount(); maxKeys = new ArrayList<>(); maxKeys.add(bucket.getKeyAsString()); } else if (bucket.getDocCount() == maxValue) { maxKeys.add(bucket.getKeyAsString()); } } InternalBucketMetricValue maxBucketValue = termsBucket.getAggregations().get("max_bucket"); assertThat(maxBucketValue, notNullValue()); assertThat(maxBucketValue.getName(), equalTo("max_bucket")); assertThat(maxBucketValue.value(), equalTo(maxValue)); assertThat(maxBucketValue.keys(), equalTo(maxKeys.toArray(new String[maxKeys.size()]))); } }
@Test public void reverseToRootGroupByOnNestedFieldWithFilterAndSumOnReverseNestedField() throws Exception { Aggregations result = query( String.format( "SELECT sum(reverse_nested(myNum)) bla FROM %s/nestedType GROUP BY nested(message.info),filter('myFilter',message.info = 'a')", TEST_INDEX)); InternalNested nested = result.get("message.info@NESTED"); InternalFilter filter = nested.getAggregations().get("myFilter@FILTER"); Terms infos = filter.getAggregations().get("message.info"); Assert.assertEquals(1, infos.getBuckets().size()); for (Terms.Bucket bucket : infos.getBuckets()) { InternalReverseNested reverseNested = bucket.getAggregations().get("myNum@NESTED"); InternalSum sum = reverseNested.getAggregations().get("bla"); Assert.assertEquals(5.0, sum.getValue(), 0.000001); } }
@Test public void reverseToRootGroupByOnNestedFieldWithFilterTestWithReverseNestedOnHistogram() throws Exception { Aggregations result = query( String.format( "SELECT COUNT(*) FROM %s/nestedType GROUP BY nested(message.info),filter('myFilter',message.info = 'a'),histogram('field'='myNum','reverse_nested'='','interval'='2' , 'alias' = 'someAlias' )", TEST_INDEX)); InternalNested nested = result.get("message.info@NESTED"); InternalFilter filter = nested.getAggregations().get("myFilter@FILTER"); Terms infos = filter.getAggregations().get("message.info"); Assert.assertEquals(1, infos.getBuckets().size()); for (Terms.Bucket bucket : infos.getBuckets()) { InternalReverseNested reverseNested = bucket.getAggregations().get("someAlias@NESTED"); InternalHistogram histogram = reverseNested.getAggregations().get("someAlias"); Assert.assertEquals(2, histogram.getBuckets().size()); } }
public static List<String> termsKeys(Terms terms) { return Lists.transform( terms.getBuckets(), new Function<Terms.Bucket, String>() { @Override public String apply(Terms.Bucket bucket) { return bucket.getKeyAsString(); } }); }
public void testUnmappedTerms() { SearchResponse response = client() .prepareSearch("idx") .addAggregation(terms("my_terms").field("non_existing_field").missing("bar")) .get(); assertSearchResponse(response); Terms terms = response.getAggregations().get("my_terms"); assertEquals(1, terms.getBuckets().size()); assertEquals(2, terms.getBucketByKey("bar").getDocCount()); }
@Test public void limitTest() throws IOException, SqlParseException, SQLFeatureNotSupportedException { Aggregations result = query( String.format( "SELECT COUNT(*) FROM %s/account GROUP BY age ORDER BY COUNT(*) LIMIT 5", TEST_INDEX)); Terms age = result.get("age"); assertThat(age.getBuckets().size(), equalTo(5)); }
@Test public void singleValuedField_OrderedBySingleBucketSubAggregationAsc() throws Exception { boolean asc = randomBoolean(); SearchResponse response = client() .prepareSearch("idx") .setTypes("type") .addAggregation( terms("tags") .executionHint(randomExecutionHint()) .field("tag") .order(Terms.Order.aggregation("filter", asc)) .subAggregation(filter("filter").filter(FilterBuilders.matchAllFilter()))) .execute() .actionGet(); assertSearchResponse(response); Terms tags = response.getAggregations().get("tags"); assertThat(tags, notNullValue()); assertThat(tags.getName(), equalTo("tags")); assertThat(tags.getBuckets().size(), equalTo(2)); Iterator<Terms.Bucket> iters = tags.getBuckets().iterator(); Terms.Bucket tag = iters.next(); assertThat(tag, notNullValue()); assertThat(key(tag), equalTo(asc ? "less" : "more")); assertThat(tag.getDocCount(), equalTo(asc ? 2l : 3l)); Filter filter = tag.getAggregations().get("filter"); assertThat(filter, notNullValue()); assertThat(filter.getDocCount(), equalTo(asc ? 2l : 3l)); tag = iters.next(); assertThat(tag, notNullValue()); assertThat(key(tag), equalTo(asc ? "more" : "less")); assertThat(tag.getDocCount(), equalTo(asc ? 3l : 2l)); filter = tag.getAggregations().get("filter"); assertThat(filter, notNullValue()); assertThat(filter.getDocCount(), equalTo(asc ? 3l : 2l)); }
@Test public void termsWithSize() throws Exception { Map<String, Set<Integer>> buckets = new HashMap<>(); Aggregations result = query( String.format( "SELECT COUNT(*) FROM %s/account GROUP BY terms('alias'='ageAgg','field'='age','size'=3)", TEST_INDEX)); Terms gender = result.get("ageAgg"); Assert.assertEquals(3, gender.getBuckets().size()); }
@Test public void groupByOnNestedFieldWithFilterTest() throws Exception { Aggregations result = query( String.format( "SELECT COUNT(*) FROM %s/nestedType GROUP BY nested(message.info),filter('myFilter',message.info = 'a')", TEST_INDEX)); InternalNested nested = result.get("message.info@NESTED"); InternalFilter filter = nested.getAggregations().get("myFilter@FILTER"); Terms infos = filter.getAggregations().get("message.info"); Assert.assertEquals(1, infos.getBuckets().size()); for (Terms.Bucket bucket : infos.getBuckets()) { String key = bucket.getKey(); long count = ((ValueCount) bucket.getAggregations().get("COUNT(*)")).getValue(); if (key.equalsIgnoreCase("a")) { Assert.assertEquals(2, count); } else { throw new Exception(String.format("Unexpected key. expected: only a . found: %s", key)); } } }
@Test public void testSubAggregations() throws Exception { Set expectedAges = new HashSet<>(ContiguousSet.create(Range.closed(20, 40), DiscreteDomain.integers())); final String query = String.format( "SELECT /*! DOCS_WITH_AGGREGATION(10) */" + " * FROM %s/account GROUP BY (gender, age), (state) LIMIT 0,10", TEST_INDEX); Map<String, Set<Integer>> buckets = new HashMap<>(); SqlElasticSearchRequestBuilder select = getSearchRequestBuilder(query); SearchResponse response = (SearchResponse) select.get(); Aggregations result = response.getAggregations(); Terms gender = result.get("gender"); for (Terms.Bucket genderBucket : gender.getBuckets()) { String genderKey = genderBucket.getKey(); buckets.put(genderKey, new HashSet<Integer>()); Terms ageBuckets = (Terms) genderBucket.getAggregations().get("age"); for (Terms.Bucket ageBucket : ageBuckets.getBuckets()) { buckets.get(genderKey).add(Integer.parseInt(ageBucket.getKey())); } } Assert.assertEquals(2, buckets.keySet().size()); Assert.assertEquals(expectedAges, buckets.get("m")); Assert.assertEquals(expectedAges, buckets.get("f")); Terms state = result.get("state"); for (Terms.Bucket stateBucket : state.getBuckets()) { if (stateBucket.getKey().equalsIgnoreCase("ak")) { Assert.assertTrue("There are 22 entries for state ak", stateBucket.getDocCount() == 22); } } Assert.assertEquals(response.getHits().totalHits(), 1000); Assert.assertEquals(response.getHits().hits().length, 10); }