public void testMissingType() throws Exception { String source = JsonXContent.contentBuilder() .startObject() .startObject("by_date") .startObject("date_histogram") .field("field", "timestamp") .field("interval", "month") .endObject() .startObject("aggs") .startObject("tag_count") // the aggregation type is missing // .startObject("cardinality") .field("field", "tag") // .endObject() .endObject() .endObject() .endObject() .endObject() .string(); try { XContentParser parser = XContentFactory.xContent(source).createParser(source); QueryParseContext parseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); aggParsers.parseAggregators(parseContext); fail(); } catch (ParsingException e) { // All Good } }
public void testTwoTypes() throws Exception { String source = JsonXContent.contentBuilder() .startObject() .startObject("in_stock") .startObject("filter") .startObject("range") .startObject("stock") .field("gt", 0) .endObject() .endObject() .endObject() .startObject("terms") .field("field", "stock") .endObject() .endObject() .endObject() .string(); try { XContentParser parser = XContentFactory.xContent(source).createParser(source); QueryParseContext parseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); aggParsers.parseAggregators(parseContext); fail(); } catch (ParsingException e) { assertThat( e.toString(), containsString( "Found two aggregation type definitions in [in_stock]: [filter] and [terms]")); } }
public void testSameAggregationName() throws Exception { final String name = randomAsciiOfLengthBetween(1, 10); String source = JsonXContent.contentBuilder() .startObject() .startObject(name) .startObject("terms") .field("field", "a") .endObject() .endObject() .startObject(name) .startObject("terms") .field("field", "b") .endObject() .endObject() .endObject() .string(); try { XContentParser parser = XContentFactory.xContent(source).createParser(source); QueryParseContext parseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); aggParsers.parseAggregators(parseContext); fail(); } catch (IllegalArgumentException e) { assertThat( e.toString(), containsString("Two sibling aggregations cannot have the same name: [" + name + "]")); } }
public void testXContentRepresentationOfUnfinishedSlices() throws IOException { XContentBuilder builder = JsonXContent.contentBuilder(); BulkByScrollTask.Status completedStatus = new BulkByScrollTask.Status( 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, timeValueMillis(0), Float.POSITIVE_INFINITY, null, timeValueMillis(0)); BulkByScrollTask.Status status = new BulkByScrollTask.Status( Arrays.asList(null, null, new BulkByScrollTask.StatusOrException(completedStatus)), null); status.toXContent(builder, ToXContent.EMPTY_PARAMS); assertThat(builder.string(), containsString("\"slices\":[null,null,{\"slice_id\":2")); }
public void run() { ToXContent.Params params = new ToXContent.MapParams(new HashMap<String, String>()); try { XContentBuilder xContentBuilder = JsonXContent.contentBuilder(); xContentBuilder.startObject(); xContentBuilder.field("node", client.settings().get("node.name")); xContentBuilder.field("timestamp", new Date().getTime()); // processService.stats().toXContent(xContentBuilder, null); osService.stats().toXContent(xContentBuilder, params); networkService.stats().toXContent(xContentBuilder, params); jvmService.stats().toXContent(xContentBuilder, params); fsService.stats().toXContent(xContentBuilder, params); xContentBuilder.endObject(); IndexRequest indexRequest = new IndexRequest("$system", "monitor").ttl(3600L); indexRequest.listenerThreaded(false); indexRequest.operationThreaded(true); indexRequest.source(xContentBuilder); indexRequest.timeout(IndexRequest.DEFAULT_TIMEOUT); indexRequest.refresh(indexRequest.refresh()); client.index(indexRequest); } catch (IOException e) { e.printStackTrace(); } }
public void testToXContent() throws IOException { SearchSortValues sortValues = new SearchSortValues(new Object[] {1, "foo", 3.0}); XContentBuilder builder = JsonXContent.contentBuilder(); builder.startObject(); sortValues.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); assertEquals("{\"sort\":[1,\"foo\",3.0]}", builder.string()); }
@Override public String toString() { try { return toXContent(JsonXContent.contentBuilder(), ToXContent.EMPTY_PARAMS).string(); } catch (IOException e) { return super.toString(); } }
public void testXContentRepresentationOfSliceFailures() throws IOException { XContentBuilder builder = JsonXContent.contentBuilder(); Exception e = new Exception(); BulkByScrollTask.Status status = new BulkByScrollTask.Status( Arrays.asList(null, null, new BulkByScrollTask.StatusOrException(e)), null); status.toXContent(builder, ToXContent.EMPTY_PARAMS); assertThat(builder.string(), containsString("\"slices\":[null,null,{\"type\":\"exception\"")); }
public SearchResponse query(JetwickQuery query, boolean log, boolean explain) { SearchRequestBuilder srb = createSearchBuilder(); srb.setExplain(query.isExplain()); query.initRequestBuilder(srb); if (log) try { logger.info( srb.internalBuilder().toXContent(JsonXContent.unCachedContentBuilder(), null).string()); } catch (Exception ex) { } return srb.execute().actionGet(); }
public XContentBuilder toXContentBuilder() throws IOException { if (this == null) { return null; } XContentBuilder builder = JsonXContent.contentBuilder(); builder.startObject(); Iterator<Entry<String, Object>> it = this.entrySet().iterator(); while (it.hasNext()) { Entry<String, Object> entry = it.next(); builder.field(entry.getKey()).value(entry.getValue()); } builder.endObject(); return builder; }
public void testInvalidAggregationName() throws Exception { Matcher matcher = Pattern.compile("[^\\[\\]>]+").matcher(""); String name; Random rand = random(); int len = randomIntBetween(1, 5); char[] word = new char[len]; while (true) { for (int i = 0; i < word.length; i++) { word[i] = (char) rand.nextInt(127); } name = String.valueOf(word); if (!matcher.reset(name).matches()) { break; } } String source = JsonXContent.contentBuilder() .startObject() .startObject(name) .startObject("filter") .startObject("range") .startObject("stock") .field("gt", 0) .endObject() .endObject() .endObject() .endObject() .endObject() .string(); try { XContentParser parser = XContentFactory.xContent(source).createParser(source); QueryParseContext parseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); aggParsers.parseAggregators(parseContext); fail(); } catch (ParsingException e) { assertThat(e.toString(), containsString("Invalid aggregation name [" + name + "]")); } }
public void testXContentRepresentationOfUnlimitedRequestsPerSecond() throws IOException { XContentBuilder builder = JsonXContent.contentBuilder(); BulkByScrollTask.Status status = new BulkByScrollTask.Status( null, 0, 0, 0, 0, 0, 0, 0, 0, 0, timeValueMillis(0), Float.POSITIVE_INFINITY, null, timeValueMillis(0)); status.toXContent(builder, ToXContent.EMPTY_PARAMS); assertThat(builder.string(), containsString("\"requests_per_second\":-1")); }
public void testTwoAggs() throws Exception { String source = JsonXContent.contentBuilder() .startObject() .startObject("by_date") .startObject("date_histogram") .field("field", "timestamp") .field("interval", "month") .endObject() .startObject("aggs") .startObject("tag_count") .startObject("cardinality") .field("field", "tag") .endObject() .endObject() .endObject() .startObject("aggs") // 2nd "aggs": illegal .startObject("tag_count2") .startObject("cardinality") .field("field", "tag") .endObject() .endObject() .endObject() .endObject() .endObject() .string(); try { XContentParser parser = XContentFactory.xContent(source).createParser(source); QueryParseContext parseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); aggParsers.parseAggregators(parseContext); fail(); } catch (ParsingException e) { assertThat( e.toString(), containsString("Found two sub aggregation definitions under [by_date]")); } }
@SuppressWarnings("unchecked") @Test public void updateDefaultMappingSettings() throws Exception { logger.info("Creating index with _default_ mappings"); client() .admin() .indices() .prepareCreate("test") .addMapping( MapperService.DEFAULT_MAPPING, JsonXContent.contentBuilder() .startObject() .startObject(MapperService.DEFAULT_MAPPING) .field("date_detection", false) .endObject() .endObject()) .get(); GetMappingsResponse getResponse = client() .admin() .indices() .prepareGetMappings("test") .addTypes(MapperService.DEFAULT_MAPPING) .get(); Map<String, Object> defaultMapping = getResponse.getMappings().get("test").get(MapperService.DEFAULT_MAPPING).sourceAsMap(); assertThat(defaultMapping, hasKey("date_detection")); logger.info("Emptying _default_ mappings"); // now remove it PutMappingResponse putResponse = client() .admin() .indices() .preparePutMapping("test") .setType(MapperService.DEFAULT_MAPPING) .setSource( JsonXContent.contentBuilder() .startObject() .startObject(MapperService.DEFAULT_MAPPING) .endObject() .endObject()) .get(); assertThat(putResponse.isAcknowledged(), equalTo(true)); logger.info("Done Emptying _default_ mappings"); getResponse = client() .admin() .indices() .prepareGetMappings("test") .addTypes(MapperService.DEFAULT_MAPPING) .get(); defaultMapping = getResponse.getMappings().get("test").get(MapperService.DEFAULT_MAPPING).sourceAsMap(); assertThat(defaultMapping, not(hasKey("date_detection"))); // now test you can change stuff that are normally unchangeable logger.info("Creating _default_ mappings with an analyzed field"); putResponse = client() .admin() .indices() .preparePutMapping("test") .setType(MapperService.DEFAULT_MAPPING) .setSource( JsonXContent.contentBuilder() .startObject() .startObject(MapperService.DEFAULT_MAPPING) .startObject("properties") .startObject("f") .field("type", "string") .field("index", "analyzed") .endObject() .endObject() .endObject() .endObject()) .get(); assertThat(putResponse.isAcknowledged(), equalTo(true)); logger.info("Changing _default_ mappings field from analyzed to non-analyzed"); putResponse = client() .admin() .indices() .preparePutMapping("test") .setType(MapperService.DEFAULT_MAPPING) .setSource( JsonXContent.contentBuilder() .startObject() .startObject(MapperService.DEFAULT_MAPPING) .startObject("properties") .startObject("f") .field("type", "string") .field("index", "not_analyzed") .endObject() .endObject() .endObject() .endObject()) .get(); assertThat(putResponse.isAcknowledged(), equalTo(true)); logger.info("Done changing _default_ mappings field from analyzed to non-analyzed"); getResponse = client() .admin() .indices() .prepareGetMappings("test") .addTypes(MapperService.DEFAULT_MAPPING) .get(); defaultMapping = getResponse.getMappings().get("test").get(MapperService.DEFAULT_MAPPING).sourceAsMap(); Map<String, Object> fieldSettings = (Map<String, Object>) ((Map) defaultMapping.get("properties")).get("f"); assertThat(fieldSettings, hasEntry("index", (Object) "not_analyzed")); // but we still validate the _default_ type logger.info("Confirming _default_ mappings validation"); assertThrows( client() .admin() .indices() .preparePutMapping("test") .setType(MapperService.DEFAULT_MAPPING) .setSource( JsonXContent.contentBuilder() .startObject() .startObject(MapperService.DEFAULT_MAPPING) .startObject("properties") .startObject("f") .field("type", "DOESNT_EXIST") .endObject() .endObject() .endObject() .endObject()), MapperParsingException.class); }
@SuppressWarnings("unchecked") @Test public void updateIncludeExclude() throws Exception { assertAcked( prepareCreate("test") .addMapping( "type", jsonBuilder() .startObject() .startObject("type") .startObject("properties") .startObject("normal") .field("type", "long") .endObject() .startObject("exclude") .field("type", "long") .endObject() .startObject("include") .field("type", "long") .endObject() .endObject() .endObject() .endObject())); ensureGreen(); // make sure that replicas are initialized so the refresh command will work them // too logger.info("Index doc"); index( "test", "type", "1", JsonXContent.contentBuilder() .startObject() .field("normal", 1) .field("exclude", 1) .field("include", 1) .endObject()); refresh(); // commit it for later testing. logger.info("Adding exclude settings"); PutMappingResponse putResponse = client() .admin() .indices() .preparePutMapping("test") .setType("type") .setSource( JsonXContent.contentBuilder() .startObject() .startObject("type") .startObject("_source") .startArray("excludes") .value("exclude") .endArray() .endObject() .endObject()) .get(); assertTrue(putResponse.isAcknowledged()); // changed mapping doesn't affect indexed documents (checking backward compatibility) GetResponse getResponse = client().prepareGet("test", "type", "1").setRealtime(false).get(); assertThat(getResponse.getSource(), hasKey("normal")); assertThat(getResponse.getSource(), hasKey("exclude")); assertThat(getResponse.getSource(), hasKey("include")); logger.info("Index doc again"); index( "test", "type", "1", JsonXContent.contentBuilder() .startObject() .field("normal", 2) .field("exclude", 1) .field("include", 2) .endObject()); // but do affect newly indexed docs getResponse = get("test", "type", "1"); assertThat(getResponse.getSource(), hasKey("normal")); assertThat(getResponse.getSource(), not(hasKey("exclude"))); assertThat(getResponse.getSource(), hasKey("include")); logger.info("Changing mapping to includes"); putResponse = client() .admin() .indices() .preparePutMapping("test") .setType("type") .setSource( JsonXContent.contentBuilder() .startObject() .startObject("type") .startObject("_source") .startArray("excludes") .endArray() .startArray("includes") .value("include") .endArray() .endObject() .endObject()) .get(); assertTrue(putResponse.isAcknowledged()); GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("test").get(); MappingMetaData typeMapping = getMappingsResponse.getMappings().get("test").get("type"); assertThat( (Map<String, Object>) typeMapping.getSourceAsMap().get("_source"), hasKey("includes")); ArrayList<String> includes = (ArrayList<String>) ((Map<String, Object>) typeMapping.getSourceAsMap().get("_source")).get("includes"); assertThat(includes, contains("include")); assertThat( (Map<String, Object>) typeMapping.getSourceAsMap().get("_source"), hasKey("excludes")); assertThat( (ArrayList<String>) ((Map<String, Object>) typeMapping.getSourceAsMap().get("_source")).get("excludes"), emptyIterable()); logger.info("Indexing doc yet again"); index( "test", "type", "1", JsonXContent.contentBuilder() .startObject() .field("normal", 3) .field("exclude", 3) .field("include", 3) .endObject()); getResponse = get("test", "type", "1"); assertThat(getResponse.getSource(), not(hasKey("normal"))); assertThat(getResponse.getSource(), not(hasKey("exclude"))); assertThat(getResponse.getSource(), hasKey("include")); logger.info("Adding excludes, but keep includes"); putResponse = client() .admin() .indices() .preparePutMapping("test") .setType("type") .setSource( JsonXContent.contentBuilder() .startObject() .startObject("type") .startObject("_source") .startArray("excludes") .value("*.excludes") .endArray() .endObject() .endObject()) .get(); assertTrue(putResponse.isAcknowledged()); getMappingsResponse = client().admin().indices().prepareGetMappings("test").get(); typeMapping = getMappingsResponse.getMappings().get("test").get("type"); assertThat( (Map<String, Object>) typeMapping.getSourceAsMap().get("_source"), hasKey("includes")); includes = (ArrayList<String>) ((Map<String, Object>) typeMapping.getSourceAsMap().get("_source")).get("includes"); assertThat(includes, contains("include")); assertThat( (Map<String, Object>) typeMapping.getSourceAsMap().get("_source"), hasKey("excludes")); ArrayList<String> excludes = (ArrayList<String>) ((Map<String, Object>) typeMapping.getSourceAsMap().get("_source")).get("excludes"); assertThat(excludes, contains("*.excludes")); }
@Test // see #3878 public void testThatXContentSerializationInsideOfArrayWorks() throws Exception { EnvelopeBuilder envelopeBuilder = ShapeBuilder.newEnvelope().topLeft(0, 0).bottomRight(10, 10); GeoShapeQueryBuilder geoQuery = QueryBuilders.geoShapeQuery("searchGeometry", envelopeBuilder); JsonXContent.contentBuilder().startArray().value(geoQuery).endArray(); }