public void testLargeNumbersOfPercentileBuckets() throws Exception { // test high numbers of percentile buckets to make sure paging and release work correctly createIndex("idx"); final int numDocs = scaledRandomIntBetween(2500, 5000); logger.info("Indexing [" + numDocs + "] docs"); List<IndexRequestBuilder> indexingRequests = Lists.newArrayList(); for (int i = 0; i < numDocs; ++i) { indexingRequests.add( client() .prepareIndex("idx", "type", Integer.toString(i)) .setSource("double_value", randomDouble())); } indexRandom(true, indexingRequests); SearchResponse response = client() .prepareSearch("idx") .addAggregation( terms("terms") .field("double_value") .collectMode(randomFrom(SubAggCollectionMode.values())) .subAggregation(percentiles("pcts").field("double_value"))) .execute() .actionGet(); assertAllSuccessful(response); assertEquals(numDocs, response.getHits().getTotalHits()); }
// Duel between histograms and scripted terms public void testDuelTermsHistogram() throws Exception { createIndex("idx"); final int numDocs = scaledRandomIntBetween(500, 5000); final int maxNumTerms = randomIntBetween(10, 2000); final int interval = randomIntBetween(1, 100); final Integer[] values = new Integer[maxNumTerms]; for (int i = 0; i < values.length; ++i) { values[i] = randomInt(maxNumTerms * 3) - maxNumTerms; } for (int i = 0; i < numDocs; ++i) { XContentBuilder source = jsonBuilder().startObject().field("num", randomDouble()).startArray("values"); final int numValues = randomInt(4); for (int j = 0; j < numValues; ++j) { source = source.value(randomFrom(values)); } source = source.endArray().endObject(); client().prepareIndex("idx", "type").setSource(source).execute().actionGet(); } assertNoFailures( client() .admin() .indices() .prepareRefresh("idx") .setIndicesOptions(IndicesOptions.lenientExpandOpen()) .execute() .get()); SearchResponse resp = client() .prepareSearch("idx") .addAggregation( terms("terms") .field("values") .collectMode(randomFrom(SubAggCollectionMode.values())) .script("floor(_value / interval)") .param("interval", interval) .size(maxNumTerms)) .addAggregation(histogram("histo").field("values").interval(interval)) .execute() .actionGet(); assertSearchResponse(resp); Terms terms = resp.getAggregations().get("terms"); assertThat(terms, notNullValue()); Histogram histo = resp.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(terms.getBuckets().size(), equalTo(histo.getBuckets().size())); for (Terms.Bucket bucket : terms.getBuckets()) { final long key = bucket.getKeyAsNumber().longValue() * interval; final Histogram.Bucket histoBucket = histo.getBucketByKey(key); assertEquals(bucket.getDocCount(), histoBucket.getDocCount()); } }
@Before public void init() throws Exception { createIndex("idx"); IndexRequestBuilder[] lowCardBuilders = new IndexRequestBuilder[5]; // TODO randomize the size? for (int i = 0; i < lowCardBuilders.length; i++) { lowCardBuilders[i] = client() .prepareIndex("idx", "type") .setSource( jsonBuilder() .startObject() .field(SINGLE_VALUED_FIELD_NAME, "val" + i) .field("i", i) .field("tag", i < lowCardBuilders.length / 2 + 1 ? "more" : "less") .startArray(MULTI_VALUED_FIELD_NAME) .value("val" + i) .value("val" + (i + 1)) .endArray() .endObject()); } indexRandom(true, lowCardBuilders); IndexRequestBuilder[] highCardBuilders = new IndexRequestBuilder[100]; // TODO randomize the size? for (int i = 0; i < highCardBuilders.length; i++) { highCardBuilders[i] = client() .prepareIndex("idx", "high_card_type") .setSource( jsonBuilder() .startObject() .field(SINGLE_VALUED_FIELD_NAME, "val" + Strings.padStart(i + "", 3, '0')) .startArray(MULTI_VALUED_FIELD_NAME) .value("val" + Strings.padStart(i + "", 3, '0')) .value("val" + Strings.padStart((i + 1) + "", 3, '0')) .endArray() .endObject()); } indexRandom(true, highCardBuilders); createIndex("idx_unmapped"); ensureSearchable(); }
// https://github.com/elasticsearch/elasticsearch/issues/6435 public void testReduce() throws Exception { createIndex("idx"); final int value = randomIntBetween(0, 10); indexRandom(true, client().prepareIndex("idx", "type").setSource("f", value)); ensureYellow("idx"); // only one document let's make sure all shards have an active primary SearchResponse response = client() .prepareSearch("idx") .addAggregation( filter("filter") .filter(FilterBuilders.matchAllFilter()) .subAggregation( range("range") .field("f") .addUnboundedTo(6) .addUnboundedFrom(6) .subAggregation(sum("sum").field("f")))) .execute() .actionGet(); assertSearchResponse(response); Filter filter = response.getAggregations().get("filter"); assertNotNull(filter); assertEquals(1, filter.getDocCount()); Range range = filter.getAggregations().get("range"); assertThat(range, notNullValue()); assertThat(range.getName(), equalTo("range")); assertThat(range.getBuckets().size(), equalTo(2)); Range.Bucket bucket = range.getBucketByKey("*-6.0"); assertThat(bucket, notNullValue()); assertThat(bucket.getKey(), equalTo("*-6.0")); assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(bucket.getTo().doubleValue(), equalTo(6.0)); assertThat(bucket.getDocCount(), equalTo(value < 6 ? 1L : 0L)); Sum sum = bucket.getAggregations().get("sum"); assertEquals(value < 6 ? value : 0, sum.getValue(), 0d); bucket = range.getBucketByKey("6.0-*"); assertThat(bucket, notNullValue()); assertThat(bucket.getKey(), equalTo("6.0-*")); assertThat(bucket.getFrom().doubleValue(), equalTo(6.0)); assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getDocCount(), equalTo(value >= 6 ? 1L : 0L)); sum = bucket.getAggregations().get("sum"); assertEquals(value >= 6 ? value : 0, sum.getValue(), 0d); }
// Make sure that unordered, reversed, disjoint and/or overlapping ranges are supported // Duel with filters public void testRandomRanges() throws Exception { final int numDocs = scaledRandomIntBetween(500, 5000); final double[][] docs = new double[numDocs][]; for (int i = 0; i < numDocs; ++i) { final int numValues = randomInt(5); docs[i] = new double[numValues]; for (int j = 0; j < numValues; ++j) { docs[i][j] = randomDouble() * 100; } } createIndex("idx"); for (int i = 0; i < docs.length; ++i) { XContentBuilder source = jsonBuilder().startObject().startArray("values"); for (int j = 0; j < docs[i].length; ++j) { source = source.value(docs[i][j]); } source = source.endArray().endObject(); client().prepareIndex("idx", "type").setSource(source).execute().actionGet(); } assertNoFailures( client() .admin() .indices() .prepareRefresh("idx") .setIndicesOptions(IndicesOptions.lenientExpandOpen()) .execute() .get()); final int numRanges = randomIntBetween(1, 20); final double[][] ranges = new double[numRanges][]; for (int i = 0; i < ranges.length; ++i) { switch (randomInt(2)) { case 0: ranges[i] = new double[] {Double.NEGATIVE_INFINITY, randomInt(100)}; break; case 1: ranges[i] = new double[] {randomInt(100), Double.POSITIVE_INFINITY}; break; case 2: ranges[i] = new double[] {randomInt(100), randomInt(100)}; break; default: throw new AssertionError(); } } RangeBuilder query = range("range").field("values"); for (int i = 0; i < ranges.length; ++i) { String key = Integer.toString(i); if (ranges[i][0] == Double.NEGATIVE_INFINITY) { query.addUnboundedTo(key, ranges[i][1]); } else if (ranges[i][1] == Double.POSITIVE_INFINITY) { query.addUnboundedFrom(key, ranges[i][0]); } else { query.addRange(key, ranges[i][0], ranges[i][1]); } } SearchRequestBuilder reqBuilder = client().prepareSearch("idx").addAggregation(query); for (int i = 0; i < ranges.length; ++i) { RangeFilterBuilder filter = FilterBuilders.rangeFilter("values"); if (ranges[i][0] != Double.NEGATIVE_INFINITY) { filter = filter.from(ranges[i][0]); } if (ranges[i][1] != Double.POSITIVE_INFINITY) { filter = filter.to(ranges[i][1]); } reqBuilder = reqBuilder.addAggregation(filter("filter" + i).filter(filter)); } SearchResponse resp = reqBuilder.execute().actionGet(); Range range = resp.getAggregations().get("range"); for (int i = 0; i < ranges.length; ++i) { long count = 0; for (double[] values : docs) { for (double value : values) { if (value >= ranges[i][0] && value < ranges[i][1]) { ++count; break; } } } final Range.Bucket bucket = range.getBucketByKey(Integer.toString(i)); assertEquals(bucket.getKey(), count, bucket.getDocCount()); final Filter filter = resp.getAggregations().get("filter" + i); assertThat(filter.getDocCount(), equalTo(count)); } }