public void testNestedAsSubAggregation() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( terms("top_values") .field("value") .size(100) .collectMode(aggCollectionMode) .subAggregation( nested("nested", "nested") .subAggregation(max("max_value").field("nested.value")))) .execute() .actionGet(); assertSearchResponse(response); LongTerms values = response.getAggregations().get("top_values"); assertThat(values, notNullValue()); assertThat(values.getName(), equalTo("top_values")); assertThat(values.getBuckets(), notNullValue()); assertThat(values.getBuckets().size(), equalTo(numParents)); for (int i = 0; i < numParents; i++) { String topValue = "" + (i + 1); assertThat(values.getBucketByKey(topValue), notNullValue()); Nested nested = values.getBucketByKey(topValue).getAggregations().get("nested"); assertThat(nested, notNullValue()); Max max = nested.getAggregations().get("max_value"); assertThat(max, notNullValue()); assertThat( max.getValue(), equalTo(numChildren[i] == 0 ? Double.NEGATIVE_INFINITY : (double) i + numChildren[i])); } }
public void testEmptyAggregation() throws Exception { SearchResponse searchResponse = client() .prepareSearch("empty_bucket_idx") .setQuery(matchAllQuery()) .addAggregation( histogram("histo") .field("value") .interval(1L) .minDocCount(0) .subAggregation(nested("nested", "nested"))) .execute() .actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L)); Histogram histo = searchResponse.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); assertThat(bucket, Matchers.notNullValue()); Nested nested = bucket.getAggregations().get("nested"); assertThat(nested, Matchers.notNullValue()); assertThat(nested.getName(), equalTo("nested")); assertThat(nested.getDocCount(), is(0L)); }
public void testNonExistingNestedField() throws Exception { SearchResponse searchResponse = client() .prepareSearch("idx") .addAggregation( nested("nested", "value") .subAggregation(stats("nested_value_stats").field("nested.value"))) .execute() .actionGet(); Nested nested = searchResponse.getAggregations().get("nested"); assertThat(nested, Matchers.notNullValue()); assertThat(nested.getName(), equalTo("nested")); assertThat(nested.getDocCount(), is(0L)); }
public void testNestedWithSubTermsAgg() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( nested("nested", "nested") .subAggregation( terms("values") .field("nested.value") .size(100) .collectMode(aggCollectionMode))) .execute() .actionGet(); assertSearchResponse(response); long docCount = 0; long[] counts = new long[numParents + 6]; for (int i = 0; i < numParents; ++i) { for (int j = 0; j < numChildren[i]; ++j) { final int value = i + 1 + j; ++counts[value]; ++docCount; } } int uniqueValues = 0; for (long count : counts) { if (count > 0) { ++uniqueValues; } } Nested nested = response.getAggregations().get("nested"); assertThat(nested, notNullValue()); assertThat(nested.getName(), equalTo("nested")); assertThat(nested.getDocCount(), equalTo(docCount)); assertThat((long) nested.getProperty("_count"), equalTo(docCount)); assertThat(nested.getAggregations().asList().isEmpty(), is(false)); LongTerms values = nested.getAggregations().get("values"); assertThat(values, notNullValue()); assertThat(values.getName(), equalTo("values")); assertThat(values.getBuckets(), notNullValue()); assertThat(values.getBuckets().size(), equalTo(uniqueValues)); for (int i = 0; i < counts.length; ++i) { final String key = Long.toString(i); if (counts[i] == 0) { assertNull(values.getBucketByKey(key)); } else { Bucket bucket = values.getBucketByKey(key); assertNotNull(bucket); assertEquals(counts[i], bucket.getDocCount()); } } assertThat((LongTerms) nested.getProperty("values"), sameInstance(values)); }
public void testSimple() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( nested("nested", "nested") .subAggregation(stats("nested_value_stats").field("nested.value"))) .execute() .actionGet(); assertSearchResponse(response); double min = Double.POSITIVE_INFINITY; double max = Double.NEGATIVE_INFINITY; long sum = 0; long count = 0; for (int i = 0; i < numParents; ++i) { for (int j = 0; j < numChildren[i]; ++j) { final long value = i + 1 + j; min = Math.min(min, value); max = Math.max(max, value); sum += value; ++count; } } Nested nested = response.getAggregations().get("nested"); assertThat(nested, notNullValue()); assertThat(nested.getName(), equalTo("nested")); assertThat(nested.getDocCount(), equalTo(count)); assertThat(nested.getAggregations().asList().isEmpty(), is(false)); Stats stats = nested.getAggregations().get("nested_value_stats"); assertThat(stats, notNullValue()); assertThat(stats.getMin(), equalTo(min)); assertThat(stats.getMax(), equalTo(max)); assertThat(stats.getCount(), equalTo(count)); assertThat(stats.getSum(), equalTo((double) sum)); assertThat(stats.getAvg(), equalTo((double) sum / count)); }
public void testNestNestedAggs() throws Exception { SearchResponse response = client() .prepareSearch("idx_nested_nested_aggs") .addAggregation( nested("level1", "nested1") .subAggregation( terms("a") .field("nested1.a") .collectMode(aggCollectionMode) .subAggregation( nested("level2", "nested1.nested2") .subAggregation(sum("sum").field("nested1.nested2.b"))))) .get(); assertSearchResponse(response); Nested level1 = response.getAggregations().get("level1"); assertThat(level1, notNullValue()); assertThat(level1.getName(), equalTo("level1")); assertThat(level1.getDocCount(), equalTo(2L)); StringTerms a = level1.getAggregations().get("a"); Terms.Bucket bBucket = a.getBucketByKey("a"); assertThat(bBucket.getDocCount(), equalTo(1L)); Nested level2 = bBucket.getAggregations().get("level2"); assertThat(level2.getDocCount(), equalTo(1L)); Sum sum = level2.getAggregations().get("sum"); assertThat(sum.getValue(), equalTo(2d)); a = level1.getAggregations().get("a"); bBucket = a.getBucketByKey("b"); assertThat(bBucket.getDocCount(), equalTo(1L)); level2 = bBucket.getAggregations().get("level2"); assertThat(level2.getDocCount(), equalTo(1L)); sum = level2.getAggregations().get("sum"); assertThat(sum.getValue(), equalTo(2d)); }
public void testNestedSameDocIdProcessedMultipleTime() throws Exception { assertAcked( prepareCreate("idx4") .setSettings( Settings.builder() .put(SETTING_NUMBER_OF_SHARDS, 1) .put(SETTING_NUMBER_OF_REPLICAS, 0)) .addMapping( "product", "categories", "type=text", "name", "type=text", "property", "type=nested")); ensureGreen("idx4"); client() .prepareIndex("idx4", "product", "1") .setSource( jsonBuilder() .startObject() .field("name", "product1") .field("categories", "1", "2", "3", "4") .startArray("property") .startObject() .field("id", 1) .endObject() .startObject() .field("id", 2) .endObject() .startObject() .field("id", 3) .endObject() .endArray() .endObject()) .get(); client() .prepareIndex("idx4", "product", "2") .setSource( jsonBuilder() .startObject() .field("name", "product2") .field("categories", "1", "2") .startArray("property") .startObject() .field("id", 1) .endObject() .startObject() .field("id", 5) .endObject() .startObject() .field("id", 4) .endObject() .endArray() .endObject()) .get(); refresh(); SearchResponse response = client() .prepareSearch("idx4") .setTypes("product") .addAggregation( terms("category") .field("categories") .subAggregation( nested("property", "property") .subAggregation(terms("property_id").field("property.id")))) .get(); assertNoFailures(response); assertHitCount(response, 2); Terms category = response.getAggregations().get("category"); assertThat(category.getBuckets().size(), equalTo(4)); Terms.Bucket bucket = category.getBucketByKey("1"); assertThat(bucket.getDocCount(), equalTo(2L)); Nested property = bucket.getAggregations().get("property"); assertThat(property.getDocCount(), equalTo(6L)); Terms propertyId = property.getAggregations().get("property_id"); assertThat(propertyId.getBuckets().size(), equalTo(5)); assertThat(propertyId.getBucketByKey("1").getDocCount(), equalTo(2L)); assertThat(propertyId.getBucketByKey("2").getDocCount(), equalTo(1L)); assertThat(propertyId.getBucketByKey("3").getDocCount(), equalTo(1L)); assertThat(propertyId.getBucketByKey("4").getDocCount(), equalTo(1L)); assertThat(propertyId.getBucketByKey("5").getDocCount(), equalTo(1L)); bucket = category.getBucketByKey("2"); assertThat(bucket.getDocCount(), equalTo(2L)); property = bucket.getAggregations().get("property"); assertThat(property.getDocCount(), equalTo(6L)); propertyId = property.getAggregations().get("property_id"); assertThat(propertyId.getBuckets().size(), equalTo(5)); assertThat(propertyId.getBucketByKey("1").getDocCount(), equalTo(2L)); assertThat(propertyId.getBucketByKey("2").getDocCount(), equalTo(1L)); assertThat(propertyId.getBucketByKey("3").getDocCount(), equalTo(1L)); assertThat(propertyId.getBucketByKey("4").getDocCount(), equalTo(1L)); assertThat(propertyId.getBucketByKey("5").getDocCount(), equalTo(1L)); bucket = category.getBucketByKey("3"); assertThat(bucket.getDocCount(), equalTo(1L)); property = bucket.getAggregations().get("property"); assertThat(property.getDocCount(), equalTo(3L)); propertyId = property.getAggregations().get("property_id"); assertThat(propertyId.getBuckets().size(), equalTo(3)); assertThat(propertyId.getBucketByKey("1").getDocCount(), equalTo(1L)); assertThat(propertyId.getBucketByKey("2").getDocCount(), equalTo(1L)); assertThat(propertyId.getBucketByKey("3").getDocCount(), equalTo(1L)); bucket = category.getBucketByKey("4"); assertThat(bucket.getDocCount(), equalTo(1L)); property = bucket.getAggregations().get("property"); assertThat(property.getDocCount(), equalTo(3L)); propertyId = property.getAggregations().get("property_id"); assertThat(propertyId.getBuckets().size(), equalTo(3)); assertThat(propertyId.getBucketByKey("1").getDocCount(), equalTo(1L)); assertThat(propertyId.getBucketByKey("2").getDocCount(), equalTo(1L)); assertThat(propertyId.getBucketByKey("3").getDocCount(), equalTo(1L)); }
// Test based on: https://github.com/elastic/elasticsearch/issues/9280 public void testParentFilterResolvedCorrectly() throws Exception { XContentBuilder mapping = jsonBuilder() .startObject() .startObject("provider") .startObject("properties") .startObject("comments") .field("type", "nested") .startObject("properties") .startObject("cid") .field("type", "long") .endObject() .startObject("identifier") .field("type", "keyword") .endObject() .startObject("tags") .field("type", "nested") .startObject("properties") .startObject("tid") .field("type", "long") .endObject() .startObject("name") .field("type", "keyword") .endObject() .endObject() .endObject() .endObject() .endObject() .startObject("dates") .field("type", "object") .startObject("properties") .startObject("day") .field("type", "date") .field("format", "dateOptionalTime") .endObject() .startObject("month") .field("type", "object") .startObject("properties") .startObject("end") .field("type", "date") .field("format", "dateOptionalTime") .endObject() .startObject("start") .field("type", "date") .field("format", "dateOptionalTime") .endObject() .startObject("label") .field("type", "keyword") .endObject() .endObject() .endObject() .endObject() .endObject() .endObject() .endObject() .endObject(); assertAcked( prepareCreate("idx2") .setSettings( Settings.builder() .put(SETTING_NUMBER_OF_SHARDS, 1) .put(SETTING_NUMBER_OF_REPLICAS, 0)) .addMapping("provider", mapping)); ensureGreen("idx2"); List<IndexRequestBuilder> indexRequests = new ArrayList<>(2); indexRequests.add( client() .prepareIndex("idx2", "provider", "1") .setSource( "{\"dates\": {\"month\": {\"label\": \"2014-11\", \"end\": \"2014-11-30\", \"start\": \"2014-11-01\"}, \"day\": \"2014-11-30\"}, \"comments\": [{\"cid\": 3,\"identifier\": \"29111\"}, {\"cid\": 4,\"tags\": [{\"tid\" :44,\"name\": \"Roles\"}], \"identifier\": \"29101\"}]}")); indexRequests.add( client() .prepareIndex("idx2", "provider", "2") .setSource( "{\"dates\": {\"month\": {\"label\": \"2014-12\", \"end\": \"2014-12-31\", \"start\": \"2014-12-01\"}, \"day\": \"2014-12-03\"}, \"comments\": [{\"cid\": 1, \"identifier\": \"29111\"}, {\"cid\": 2,\"tags\": [{\"tid\" : 22, \"name\": \"DataChannels\"}], \"identifier\": \"29101\"}]}")); indexRandom(true, indexRequests); SearchResponse response = client() .prepareSearch("idx2") .setTypes("provider") .addAggregation( terms("startDate") .field("dates.month.start") .subAggregation( terms("endDate") .field("dates.month.end") .subAggregation( terms("period") .field("dates.month.label") .subAggregation( nested("ctxt_idfier_nested", "comments") .subAggregation( filter( "comment_filter", termQuery("comments.identifier", "29111")) .subAggregation( nested("nested_tags", "comments.tags") .subAggregation( terms("tag") .field( "comments.tags.name")))))))) .get(); assertNoFailures(response); assertHitCount(response, 2); Terms startDate = response.getAggregations().get("startDate"); assertThat(startDate.getBuckets().size(), equalTo(2)); Terms.Bucket bucket = startDate.getBucketByKey("2014-11-01T00:00:00.000Z"); assertThat(bucket.getDocCount(), equalTo(1L)); Terms endDate = bucket.getAggregations().get("endDate"); bucket = endDate.getBucketByKey("2014-11-30T00:00:00.000Z"); assertThat(bucket.getDocCount(), equalTo(1L)); Terms period = bucket.getAggregations().get("period"); bucket = period.getBucketByKey("2014-11"); assertThat(bucket.getDocCount(), equalTo(1L)); Nested comments = bucket.getAggregations().get("ctxt_idfier_nested"); assertThat(comments.getDocCount(), equalTo(2L)); Filter filter = comments.getAggregations().get("comment_filter"); assertThat(filter.getDocCount(), equalTo(1L)); Nested nestedTags = filter.getAggregations().get("nested_tags"); assertThat(nestedTags.getDocCount(), equalTo(0L)); // This must be 0 Terms tags = nestedTags.getAggregations().get("tag"); assertThat(tags.getBuckets().size(), equalTo(0)); // and this must be empty bucket = startDate.getBucketByKey("2014-12-01T00:00:00.000Z"); assertThat(bucket.getDocCount(), equalTo(1L)); endDate = bucket.getAggregations().get("endDate"); bucket = endDate.getBucketByKey("2014-12-31T00:00:00.000Z"); assertThat(bucket.getDocCount(), equalTo(1L)); period = bucket.getAggregations().get("period"); bucket = period.getBucketByKey("2014-12"); assertThat(bucket.getDocCount(), equalTo(1L)); comments = bucket.getAggregations().get("ctxt_idfier_nested"); assertThat(comments.getDocCount(), equalTo(2L)); filter = comments.getAggregations().get("comment_filter"); assertThat(filter.getDocCount(), equalTo(1L)); nestedTags = filter.getAggregations().get("nested_tags"); assertThat(nestedTags.getDocCount(), equalTo(0L)); // This must be 0 tags = nestedTags.getAggregations().get("tag"); assertThat(tags.getBuckets().size(), equalTo(0)); // and this must be empty }
public static void loadAggregationResults( ResponsePojo rp, Facets facets, Aggregations aggs, AggregationOutputPojo aggOutParams, ScoringUtils scoreStats, AliasLookupTable aliasLookup, String[] entityTypeFilterStrings, String[] assocVerbFilterStrings, AggregationUtils.GeoContainer extraAliasAggregatedGeo) { HashMap<String, List<? extends Object>> moments = null; if ((null != facets) && (null != facets.getFacets())) for (Map.Entry<String, Facet> facet : facets.getFacets().entrySet()) { // Geo if (facet.getKey().equals("geo")) { TermsFacet geoFacet = (TermsFacet) facet.getValue(); Set<GeoAggregationPojo> geoCounts = null; int nHighestCount = -1; int nLowestCount = Integer.MAX_VALUE; // If we've got some geotags from the alias masters then start with them: if ((null != extraAliasAggregatedGeo) && (null != extraAliasAggregatedGeo.geotags)) { geoCounts = extraAliasAggregatedGeo.geotags; nHighestCount = (int) extraAliasAggregatedGeo.minCount; nLowestCount = (int) extraAliasAggregatedGeo.maxCount; } else { geoCounts = new TreeSet<GeoAggregationPojo>(); } for (TermsFacet.Entry geo : geoFacet.getEntries()) { String geohash = FacetUtils.getTerm(geo).substring(2); double[] loc = GeoHashUtils.decode(geohash); GeoAggregationPojo geoObj = new GeoAggregationPojo(loc[0], loc[1]); geoObj.count = geo.getCount(); geoObj.type = GeoOntologyMapping.decodeOntologyCode(FacetUtils.getTerm(geo).charAt(0)); geoCounts.add(geoObj); // (note this aggregates geo points whose decoded lat/logns are the same, which can // result in slightly fewer records than requested) // (note the aggregation writes the aggregated count into geoObj.count) if (geoObj.count > nHighestCount) { // (the counts can be modified by the add command above) nHighestCount = geo.getCount(); } if (geoObj.count < nLowestCount) { nLowestCount = geo.getCount(); } } rp.setGeo(geoCounts, nHighestCount, nLowestCount); } // (TESTED) if (facet.getKey().equals("time")) { DateHistogramFacet timeFacet = (DateHistogramFacet) facet.getValue(); rp.setTimes( timeFacet.getEntries(), QueryHandler.getInterval(aggOutParams.timesInterval, 'm')); } // (TESTED) if (facet.getKey().equals("events")) { TermsFacet eventsFacet = (TermsFacet) facet.getValue(); rp.setEvents( parseEventAggregationOutput( "Event", eventsFacet, scoreStats, aliasLookup, entityTypeFilterStrings, assocVerbFilterStrings)); } if (facet.getKey().equals("facts")) { TermsFacet factsFacet = (TermsFacet) facet.getValue(); rp.setFacts( parseEventAggregationOutput( "Fact", factsFacet, scoreStats, aliasLookup, entityTypeFilterStrings, assocVerbFilterStrings)); } // TESTED x2 if (facet.getKey().equals("sourceTags")) { TermsFacet tagsFacet = (TermsFacet) facet.getValue(); rp.setSourceMetaTags(tagsFacet.getEntries()); } if (facet.getKey().equals("sourceTypes")) { TermsFacet typesFacet = (TermsFacet) facet.getValue(); rp.setSourceMetaTypes(typesFacet.getEntries()); } if (facet.getKey().equals("sourceKeys")) { TermsFacet keysFacet = (TermsFacet) facet.getValue(); rp.setSources(keysFacet.getEntries()); } // TESTED x3 // Moments (basic functionality) if (facet.getKey().startsWith("moments.")) { DateHistogramFacet momentFacet = (DateHistogramFacet) facet.getValue(); if (null == moments) { moments = new HashMap<String, List<? extends Object>>(); } moments.put(facet.getKey().substring(8), momentFacet.getEntries()); } // TESTED } // (end loop over generated facets) if ((null != aggs) && (null != aggs.asMap())) for (Map.Entry<String, Aggregation> agg : aggs.asMap().entrySet()) { if (agg.getKey().equals("moments")) { if (null == moments) { moments = new HashMap<String, List<? extends Object>>(); } DateHistogram val = (DateHistogram) agg.getValue(); // TODO (INF-2688): Finalize format BasicDBList dbl = new BasicDBList(); for (DateHistogram.Bucket dateBucket : val.getBuckets()) { if (dateBucket.getKeyAsNumber().longValue() > 0) { BasicDBObject dataBucketDbo = new BasicDBObject(); dataBucketDbo.put("time", dateBucket.getKeyAsNumber().longValue()); dataBucketDbo.put("count", dateBucket.getDocCount()); for (Map.Entry<String, Aggregation> dateAggs : dateBucket.getAggregations().asMap().entrySet()) { if (dateAggs.getKey().equals("geo")) { BasicDBList dbl_geo = new BasicDBList(); MultiBucketsAggregation geoVal = (MultiBucketsAggregation) dateAggs.getValue(); long nHighestCount = Long.MIN_VALUE; for (MultiBucketsAggregation.Bucket geoBucket : geoVal.getBuckets()) { String geohash = geoBucket.getKey().substring(2); double[] loc = GeoHashUtils.decode(geohash); GeoAggregationPojo geoObj = new GeoAggregationPojo(loc[0], loc[1]); BasicDBObject geoDbo = new BasicDBObject(4); geoDbo.put("lat", geoObj.lat); geoDbo.put("lon", geoObj.lon); geoDbo.put("count", geoBucket.getDocCount()); geoDbo.put( "type", GeoOntologyMapping.decodeOntologyCode(geoBucket.getKey().charAt(0))); dbl_geo.add(geoDbo); if (geoBucket.getDocCount() > nHighestCount) { // (the counts can be modified by the add command above) nHighestCount = geoBucket.getDocCount(); } } dataBucketDbo.put("maxGeoCount", nHighestCount); dataBucketDbo.put("geo", dbl_geo); } } dbl.add(dataBucketDbo); } } moments.put("times", dbl); } else { if (null == moments) { moments = new HashMap<String, List<? extends Object>>(); } DateHistogram val = (DateHistogram) agg.getValue(); BasicDBList dbl = new BasicDBList(); for (DateHistogram.Bucket dateBucket : val.getBuckets()) { if (dateBucket.getKeyAsNumber().longValue() > 0) { BasicDBObject dataBucketDbo = new BasicDBObject(); dataBucketDbo.put("time", dateBucket.getKeyAsNumber().longValue()); dataBucketDbo.put("count", dateBucket.getDocCount()); for (Map.Entry<String, Aggregation> dateAggs : dateBucket.getAggregations().asMap().entrySet()) { if (dateAggs.getKey().equals("moments.assoc.nested")) { BasicDBList dbl_assoc = new BasicDBList(); Nested nestedVal = (Nested) dateAggs.getValue(); MultiBucketsAggregation assocVal = (MultiBucketsAggregation) nestedVal.getAggregations().asList().get(0); long nHighestCount = Long.MIN_VALUE; for (MultiBucketsAggregation.Bucket assocBucket : assocVal.getBuckets()) { BasicDBObject assocDbo = new BasicDBObject(2); assocDbo.put("key", assocBucket.getKey()); assocDbo.put("docCount", assocBucket.getDocCount()); dbl_assoc.add(assocDbo); if (assocBucket.getDocCount() > nHighestCount) { // (the counts can be modified by the add command above) nHighestCount = assocBucket.getDocCount(); } } dataBucketDbo.put("maxAssocCount", nHighestCount); dataBucketDbo.put("assoc", dbl_assoc); } } dbl.add(dataBucketDbo); } moments.put("assocs", dbl); } } } // (end loop over generated aggregations) if ((null != moments) && !moments.isEmpty()) { rp.setMoments(moments, QueryHandler.getInterval(aggOutParams.moments.timesInterval, 'm')); } } // TESTED