@Test public void compareParsingTokens() throws IOException { BytesStreamOutput xsonOs = new BytesStreamOutput(); XContentGenerator xsonGen = XContentFactory.xContent(XContentType.SMILE).createGenerator(xsonOs); BytesStreamOutput jsonOs = new BytesStreamOutput(); XContentGenerator jsonGen = XContentFactory.xContent(XContentType.JSON).createGenerator(jsonOs); xsonGen.writeStartObject(); jsonGen.writeStartObject(); xsonGen.writeStringField("test", "value"); jsonGen.writeStringField("test", "value"); xsonGen.writeArrayFieldStart("arr"); jsonGen.writeArrayFieldStart("arr"); xsonGen.writeNumber(1); jsonGen.writeNumber(1); xsonGen.writeNull(); jsonGen.writeNull(); xsonGen.writeEndArray(); jsonGen.writeEndArray(); xsonGen.writeEndObject(); jsonGen.writeEndObject(); xsonGen.close(); jsonGen.close(); verifySameTokens( XContentFactory.xContent(XContentType.JSON).createParser(jsonOs.bytes().toBytes()), XContentFactory.xContent(XContentType.SMILE).createParser(xsonOs.bytes().toBytes())); }
public void testExtractRawValue() throws Exception { XContentBuilder builder = XContentFactory.jsonBuilder().startObject().field("test", "value").endObject(); Map<String, Object> map; try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.string())) { map = parser.map(); } assertThat(XContentMapValues.extractRawValues("test", map).get(0).toString(), equalTo("value")); builder = XContentFactory.jsonBuilder().startObject().field("test.me", "value").endObject(); try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.string())) { map = parser.map(); } assertThat( XContentMapValues.extractRawValues("test.me", map).get(0).toString(), equalTo("value")); builder = XContentFactory.jsonBuilder() .startObject() .startObject("path1") .startObject("path2") .field("test", "value") .endObject() .endObject() .endObject(); try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.string())) { map = parser.map(); } assertThat( XContentMapValues.extractRawValues("path1.path2.test", map).get(0).toString(), equalTo("value")); builder = XContentFactory.jsonBuilder() .startObject() .startObject("path1.xxx") .startObject("path2.yyy") .field("test", "value") .endObject() .endObject() .endObject(); try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.string())) { map = parser.map(); } assertThat( XContentMapValues.extractRawValues("path1.xxx.path2.yyy.test", map).get(0).toString(), equalTo("value")); }
public void testDefaultParsing() throws Exception { MovAvgPipelineAggregationBuilder expected = new MovAvgPipelineAggregationBuilder("commits_moving_avg", "commits"); String json = "{" + " \"commits_moving_avg\": {" + " \"moving_avg\": {" + " \"buckets_path\": \"commits\"" + " }" + " }" + "}"; XContentParser parser = XContentFactory.xContent(json).createParser(json); QueryParseContext parseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals(expected.getName(), parser.currentName()); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals(expected.type(), parser.currentName()); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); PipelineAggregationBuilder newAgg = aggParsers .pipelineParser(expected.getWriteableName(), ParseFieldMatcher.STRICT) .parse(expected.getName(), parseContext); assertSame(XContentParser.Token.END_OBJECT, parser.currentToken()); assertSame(XContentParser.Token.END_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.END_OBJECT, parser.nextToken()); assertNull(parser.nextToken()); assertNotNull(newAgg); assertNotSame(newAgg, expected); assertEquals(expected, newAgg); assertEquals(expected.hashCode(), newAgg.hashCode()); }
@Override protected ShardSuggestResponse shardOperation(ShardSuggestRequest request) throws ElasticSearchException { IndexService indexService = indicesService.indexServiceSafe(request.index()); IndexShard indexShard = indexService.shardSafe(request.shardId()); final Engine.Searcher searcher = indexShard.searcher(); XContentParser parser = null; try { BytesReference suggest = request.suggest(); if (suggest != null && suggest.length() > 0) { parser = XContentFactory.xContent(suggest).createParser(suggest); if (parser.nextToken() != XContentParser.Token.START_OBJECT) { throw new ElasticSearchIllegalArgumentException("suggest content missing"); } final SuggestionSearchContext context = suggestPhase .parseElement() .parseInternal( parser, indexService.mapperService(), request.index(), request.shardId()); final Suggest result = suggestPhase.execute(context, searcher.reader()); return new ShardSuggestResponse(request.index(), request.shardId(), result); } return new ShardSuggestResponse(request.index(), request.shardId(), new Suggest()); } catch (Throwable ex) { throw new ElasticSearchException("failed to execute suggest", ex); } finally { searcher.release(); if (parser != null) { parser.close(); } } }
/** The template source definition. */ public PutIndexTemplateRequest source(BytesReference source) { try { return source(XContentFactory.xContent(source).createParser(source).mapOrderedAndClose()); } catch (IOException e) { throw new ElasticSearchIllegalArgumentException("failed to parse template source", e); } }
public static void toXContent( IndexTemplateMetaData indexTemplateMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(indexTemplateMetaData.name(), XContentBuilder.FieldCaseConversion.NONE); builder.field("order", indexTemplateMetaData.order()); builder.field("template", indexTemplateMetaData.template()); builder.startObject("settings"); for (Map.Entry<String, String> entry : indexTemplateMetaData.settings().getAsMap().entrySet()) { builder.field(entry.getKey(), entry.getValue()); } builder.endObject(); builder.startArray("mappings"); for (Map.Entry<String, CompressedString> entry : indexTemplateMetaData.mappings().entrySet()) { byte[] data = entry.getValue().uncompressed(); XContentParser parser = XContentFactory.xContent(data).createParser(data); Map<String, Object> mapping = parser.map(); parser.close(); builder.map(mapping); } builder.endArray(); builder.endObject(); }
@Override public void login(final RestRequest request, final ActionListener<String[]> listener) { String username = request.param(usernameKey); String password = request.param(passwordKey); final BytesReference content = request.content(); final XContentType xContentType = XContentFactory.xContentType(content); XContentParser parser = null; try { parser = XContentFactory.xContent(xContentType).createParser(content); final XContentParser.Token t = parser.nextToken(); if (t != null) { final Map<String, Object> contentMap = parser.map(); username = MapUtil.getAsString(contentMap, usernameKey, username); password = MapUtil.getAsString(contentMap, passwordKey, password); } } catch (final Exception e) { listener.onFailure(e); return; } finally { if (parser != null) { parser.close(); } } if (username == null) { listener.onResponse(new String[0]); return; } processLogin(username, password, listener); }
public void testTwoTypes() throws Exception { String source = JsonXContent.contentBuilder() .startObject() .startObject("in_stock") .startObject("filter") .startObject("range") .startObject("stock") .field("gt", 0) .endObject() .endObject() .endObject() .startObject("terms") .field("field", "stock") .endObject() .endObject() .endObject() .string(); try { XContentParser parser = XContentFactory.xContent(source).createParser(source); QueryParseContext parseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); aggParsers.parseAggregators(parseContext); fail(); } catch (ParsingException e) { assertThat( e.toString(), containsString( "Found two aggregation type definitions in [in_stock]: [filter] and [terms]")); } }
private ParsedDocument parseFetchedDoc( PercolateContext context, BytesReference fetchedDoc, IndexService documentIndexService, String type) { ParsedDocument doc = null; XContentParser parser = null; try { parser = XContentFactory.xContent(fetchedDoc).createParser(fetchedDoc); MapperService mapperService = documentIndexService.mapperService(); DocumentMapper docMapper = mapperService.documentMapperWithAutoCreate(type); doc = docMapper.parse(source(parser).type(type).flyweight(true)); if (context.highlight() != null) { doc.setSource(fetchedDoc); } } catch (Throwable e) { throw new ElasticsearchParseException("failed to parse request", e); } finally { if (parser != null) { parser.close(); } } if (doc == null) { throw new ElasticsearchParseException("No doc to percolate in the request"); } return doc; }
public void testNestedList() throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder() .startObject() .startArray("some_array") .startArray() .endArray() .startArray() .value(1) .value(3) .endArray() .startArray() .value(2) .endArray() .endArray() .endObject(); try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.string())) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("some_array", parser.currentName()); if (random().nextBoolean()) { // sometimes read the start array token, sometimes not assertEquals(XContentParser.Token.START_ARRAY, parser.nextToken()); } assertEquals( Arrays.asList(Collections.<Integer>emptyList(), Arrays.asList(1, 3), Arrays.asList(2)), parser.list()); } }
public void testNestedMapInList() throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder() .startObject() .startArray("some_array") .startObject() .field("foo", "bar") .endObject() .startObject() .endObject() .endArray() .endObject(); try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.string())) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("some_array", parser.currentName()); if (random().nextBoolean()) { // sometimes read the start array token, sometimes not assertEquals(XContentParser.Token.START_ARRAY, parser.nextToken()); } assertEquals(Arrays.asList(singletonMap("foo", "bar"), emptyMap()), parser.list()); } }
public void testSameAggregationName() throws Exception { final String name = randomAsciiOfLengthBetween(1, 10); String source = JsonXContent.contentBuilder() .startObject() .startObject(name) .startObject("terms") .field("field", "a") .endObject() .endObject() .startObject(name) .startObject("terms") .field("field", "b") .endObject() .endObject() .endObject() .string(); try { XContentParser parser = XContentFactory.xContent(source).createParser(source); QueryParseContext parseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); aggParsers.parseAggregators(parseContext); fail(); } catch (IllegalArgumentException e) { assertThat( e.toString(), containsString("Two sibling aggregations cannot have the same name: [" + name + "]")); } }
@Override public XContentBuilder newBuilder(@Nullable BytesReference autoDetectSource, boolean useFiltering) throws IOException { XContentType contentType = XContentType.fromMediaTypeOrFormat(format); if (contentType == null) { // try and guess it from the auto detect source if (autoDetectSource != null) { contentType = XContentFactory.xContentType(autoDetectSource); } } if (contentType == null) { // default to JSON contentType = XContentType.JSON; } Set<String> includes = Collections.emptySet(); Set<String> excludes = Collections.emptySet(); if (useFiltering) { Set<String> filters = Strings.splitStringByCommaToSet(filterPath); includes = filters.stream().filter(INCLUDE_FILTER).collect(toSet()); excludes = filters.stream().filter(EXCLUDE_FILTER).map(f -> f.substring(1)).collect(toSet()); } XContentBuilder builder = new XContentBuilder( XContentFactory.xContent(contentType), bytesOutput(), includes, excludes); if (pretty) { builder.prettyPrint().lfAtEnd(); } builder.humanReadable(human); return builder; }
public static void toXContent( AliasMetaData aliasMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(aliasMetaData.alias(), XContentBuilder.FieldCaseConversion.NONE); boolean binary = params.paramAsBoolean("binary", false); if (aliasMetaData.filter() != null) { if (binary) { builder.field("filter", aliasMetaData.filter.compressed()); } else { byte[] data = aliasMetaData.filter().uncompressed(); XContentParser parser = XContentFactory.xContent(data).createParser(data); Map<String, Object> filter = parser.mapOrdered(); parser.close(); builder.field("filter", filter); } } if (aliasMetaData.indexRouting() != null) { builder.field("index_routing", aliasMetaData.indexRouting()); } if (aliasMetaData.searchRouting() != null) { builder.field("search_routing", aliasMetaData.searchRouting()); } builder.endObject(); }
public void testMissingType() throws Exception { String source = JsonXContent.contentBuilder() .startObject() .startObject("by_date") .startObject("date_histogram") .field("field", "timestamp") .field("interval", "month") .endObject() .startObject("aggs") .startObject("tag_count") // the aggregation type is missing // .startObject("cardinality") .field("field", "tag") // .endObject() .endObject() .endObject() .endObject() .endObject() .string(); try { XContentParser parser = XContentFactory.xContent(source).createParser(source); QueryParseContext parseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); aggParsers.parseAggregators(parseContext); fail(); } catch (ParsingException e) { // All Good } }
/** * A parser for the contents of this request if it has contents, otherwise a parser for the {@code * source} parameter if there is one, otherwise throws an {@link ElasticsearchParseException}. Use * {@link #withContentOrSourceParamParserOrNull(CheckedConsumer)} instead if you need to handle * the absence request content gracefully. */ public final XContentParser contentOrSourceParamParser() throws IOException { BytesReference content = contentOrSourceParam(); if (content.length() == 0) { throw new ElasticsearchParseException("Body required"); } return XContentFactory.xContent(content).createParser(content); }
private Map<String, Object> parseBody(RestResponse response) throws IOException { return XContentFactory.xContent(XContentType.JSON) .createParser( response.bufferForBody().array(), response.bufferForBody().arrayOffset(), response.bufferForBody().remaining()) .map(); }
@Override public void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(getName()); builder.field(fieldName); XContentParser parser = XContentFactory.xContent(functionBytes).createParser(functionBytes); builder.copyCurrentStructure(parser); builder.field(DecayFunctionParser.MULTI_VALUE_MODE.getPreferredName(), multiValueMode.name()); builder.endObject(); }
public Builder putMapping(String type, String source) throws IOException { XContentParser parser = XContentFactory.xContent(source).createParser(source); try { putMapping(new MappingMetaData(type, parser.mapOrdered())); } finally { parser.close(); } return this; }
/** * Parses repository definition. JSON, Smile and YAML formats are supported * * @param repositoryDefinition repository definition */ public PutRepositoryRequest source(BytesReference repositoryDefinition) { try { return source( XContentFactory.xContent(repositoryDefinition) .createParser(repositoryDefinition) .mapOrderedAndClose()); } catch (IOException e) { throw new ElasticsearchIllegalArgumentException("failed to parse template source", e); } }
/** * Parses repository definition. JSON, Smile and YAML formats are supported * * @param repositoryDefinition repository definition */ public PutRepositoryRequest source(byte[] repositoryDefinition, int offset, int length) { try { return source( XContentFactory.xContent(repositoryDefinition, offset, length) .createParser(repositoryDefinition, offset, length) .mapOrderedAndClose()); } catch (IOException e) { throw new ElasticsearchIllegalArgumentException("failed to parse repository source", e); } }
public static void toXContent( IndexMetaData indexMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject( indexMetaData.getIndex().getName(), XContentBuilder.FieldCaseConversion.NONE); builder.field("version", indexMetaData.getVersion()); builder.field("state", indexMetaData.getState().toString().toLowerCase(Locale.ENGLISH)); boolean binary = params.paramAsBoolean("binary", false); builder.startObject("settings"); for (Map.Entry<String, String> entry : indexMetaData.getSettings().getAsMap().entrySet()) { builder.field(entry.getKey(), entry.getValue()); } builder.endObject(); builder.startArray("mappings"); for (ObjectObjectCursor<String, MappingMetaData> cursor : indexMetaData.getMappings()) { if (binary) { builder.value(cursor.value.source().compressed()); } else { byte[] data = cursor.value.source().uncompressed(); XContentParser parser = XContentFactory.xContent(data).createParser(data); Map<String, Object> mapping = parser.mapOrdered(); parser.close(); builder.map(mapping); } } builder.endArray(); for (ObjectObjectCursor<String, Custom> cursor : indexMetaData.getCustoms()) { builder.startObject(cursor.key, XContentBuilder.FieldCaseConversion.NONE); cursor.value.toXContent(builder, params); builder.endObject(); } builder.startObject("aliases"); for (ObjectCursor<AliasMetaData> cursor : indexMetaData.getAliases().values()) { AliasMetaData.Builder.toXContent(cursor.value, builder, params); } builder.endObject(); builder.startObject(KEY_ACTIVE_ALLOCATIONS); for (IntObjectCursor<Set<String>> cursor : indexMetaData.activeAllocationIds) { builder.startArray(String.valueOf(cursor.key)); for (String allocationId : cursor.value) { builder.value(allocationId); } builder.endArray(); } builder.endObject(); builder.endObject(); }
private void parseSource(SearchContext context, BytesReference source) throws SearchParseException { // nothing to parse... if (source == null || source.length() == 0) { return; } XContentParser parser = null; try { parser = XContentFactory.xContent(source).createParser(source); XContentParser.Token token; token = parser.nextToken(); if (token != XContentParser.Token.START_OBJECT) { throw new ElasticsearchParseException( "Expected START_OBJECT but got " + token.name() + " " + parser.currentName()); } while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { String fieldName = parser.currentName(); parser.nextToken(); SearchParseElement element = elementParsers.get(fieldName); if (element == null) { throw new SearchParseException( context, "No parser for element [" + fieldName + "]", parser.getTokenLocation()); } element.parse(parser, context); } else { if (token == null) { throw new ElasticsearchParseException( "End of query source reached but query is not complete."); } else { throw new ElasticsearchParseException( "Expected field name but got " + token.name() + " \"" + parser.currentName() + "\""); } } } } catch (Throwable e) { String sSource = "_na_"; try { sSource = XContentHelper.convertToJson(source, false); } catch (Throwable e1) { // ignore } throw new SearchParseException( context, "Failed to parse source [" + sSource + "]", parser.getTokenLocation(), e); } finally { if (parser != null) { parser.close(); } } }
/** The template source definition. */ public PutIndexTemplateRequest source(String templateSource) { try { return source( XContentFactory.xContent(templateSource) .createParser(templateSource) .mapOrderedAndClose()); } catch (Exception e) { throw new ElasticSearchIllegalArgumentException( "failed to parse template source [" + templateSource + "]", e); } }
/** * Parses restore definition * * <p>JSON, YAML and properties formats are supported * * @param source restore definition * @return this request */ public RestoreSnapshotRequest source(String source) { if (hasLength(source)) { try { return source(XContentFactory.xContent(source).createParser(source).mapOrderedAndClose()); } catch (Exception e) { throw new ElasticSearchIllegalArgumentException( "failed to parse repository source [" + source + "]", e); } } return this; }
@Override protected void doAssertLuceneQuery( WrapperQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { try (XContentParser qSourceParser = XContentFactory.xContent(queryBuilder.source()).createParser(queryBuilder.source())) { final QueryShardContext contextCopy = new QueryShardContext(context); contextCopy.reset(qSourceParser); QueryBuilder<?> innerQuery = contextCopy.parseContext().parseInnerQueryBuilder(); Query expected = innerQuery.toQuery(context); assertThat(query, equalTo(expected)); } }
public void testFieldsParsing() throws Exception { BytesArray data = new BytesArray( StreamsUtils.copyToBytesFromClasspath( "/org/elasticsearch/action/fieldstats/" + "fieldstats-index-constraints-request.json")); FieldStatsRequest request = new FieldStatsRequest(); request.source(XContentFactory.xContent(data).createParser(data)); assertThat(request.getFields().length, equalTo(5)); assertThat(request.getFields()[0], equalTo("field1")); assertThat(request.getFields()[1], equalTo("field2")); assertThat(request.getFields()[2], equalTo("field3")); assertThat(request.getFields()[3], equalTo("field4")); assertThat(request.getFields()[4], equalTo("field5")); assertThat(request.getIndexConstraints().length, equalTo(8)); assertThat(request.getIndexConstraints()[0].getField(), equalTo("field2")); assertThat(request.getIndexConstraints()[0].getValue(), equalTo("9")); assertThat(request.getIndexConstraints()[0].getProperty(), equalTo(MAX)); assertThat(request.getIndexConstraints()[0].getComparison(), equalTo(GTE)); assertThat(request.getIndexConstraints()[1].getField(), equalTo("field3")); assertThat(request.getIndexConstraints()[1].getValue(), equalTo("5")); assertThat(request.getIndexConstraints()[1].getProperty(), equalTo(MIN)); assertThat(request.getIndexConstraints()[1].getComparison(), equalTo(GT)); assertThat(request.getIndexConstraints()[2].getField(), equalTo("field4")); assertThat(request.getIndexConstraints()[2].getValue(), equalTo("a")); assertThat(request.getIndexConstraints()[2].getProperty(), equalTo(MIN)); assertThat(request.getIndexConstraints()[2].getComparison(), equalTo(GTE)); assertThat(request.getIndexConstraints()[3].getField(), equalTo("field4")); assertThat(request.getIndexConstraints()[3].getValue(), equalTo("g")); assertThat(request.getIndexConstraints()[3].getProperty(), equalTo(MAX)); assertThat(request.getIndexConstraints()[3].getComparison(), equalTo(LTE)); assertThat(request.getIndexConstraints()[4].getField(), equalTo("field5")); assertThat(request.getIndexConstraints()[4].getValue(), equalTo("2")); assertThat(request.getIndexConstraints()[4].getProperty(), equalTo(MAX)); assertThat(request.getIndexConstraints()[4].getComparison(), equalTo(GT)); assertThat(request.getIndexConstraints()[5].getField(), equalTo("field5")); assertThat(request.getIndexConstraints()[5].getValue(), equalTo("9")); assertThat(request.getIndexConstraints()[5].getProperty(), equalTo(MAX)); assertThat(request.getIndexConstraints()[5].getComparison(), equalTo(LT)); assertThat(request.getIndexConstraints()[6].getField(), equalTo("field1")); assertThat(request.getIndexConstraints()[6].getValue(), equalTo("2014-01-01")); assertThat(request.getIndexConstraints()[6].getProperty(), equalTo(MIN)); assertThat(request.getIndexConstraints()[6].getComparison(), equalTo(GTE)); assertThat(request.getIndexConstraints()[6].getOptionalFormat(), equalTo("date_optional_time")); assertThat(request.getIndexConstraints()[7].getField(), equalTo("field1")); assertThat(request.getIndexConstraints()[7].getValue(), equalTo("2015-01-01")); assertThat(request.getIndexConstraints()[7].getProperty(), equalTo(MAX)); assertThat(request.getIndexConstraints()[7].getComparison(), equalTo(LT)); assertThat(request.getIndexConstraints()[7].getOptionalFormat(), equalTo("date_optional_time")); }
/** Sets the settings and mappings as a single source. */ public CreateIndexRequest source(BytesReference source) { XContentType xContentType = XContentFactory.xContentType(source); if (xContentType != null) { try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(source)) { source(parser.map()); } catch (IOException e) { throw new ElasticsearchParseException("failed to parse source for create index", e); } } else { settings(source.utf8ToString()); } return this; }
/** we resolve empty inner clauses by representing this whole query as empty optional upstream */ public void testFromJsonEmptyQueryBody() throws IOException { String query = "{\n" + " \"has_parent\" : {\n" + " \"query\" : { },\n" + " \"parent_type\" : \"blog\"" + " }" + "}"; XContentParser parser = XContentFactory.xContent(query).createParser(query); QueryParseContext context = createParseContext(parser, ParseFieldMatcher.EMPTY); Optional<QueryBuilder> innerQueryBuilder = context.parseInnerQueryBuilder(); assertTrue(innerQueryBuilder.isPresent() == false); checkWarningHeaders("query malformed, empty clause found at [3:17]"); parser = XContentFactory.xContent(query).createParser(query); QueryParseContext otherContext = createParseContext(parser, ParseFieldMatcher.STRICT); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> otherContext.parseInnerQueryBuilder()); assertThat(ex.getMessage(), equalTo("query malformed, empty clause found at [3:17]")); checkWarningHeaders("query malformed, empty clause found at [3:17]"); }
/** * Parses restore definition * * <p>JSON, YAML and properties formats are supported * * @param source restore definition * @param offset offset * @param length length * @return this request */ public RestoreSnapshotRequest source(byte[] source, int offset, int length) { if (length > 0) { try { return source( XContentFactory.xContent(source, offset, length) .createParser(source, offset, length) .mapOrderedAndClose()); } catch (IOException e) { throw new ElasticSearchIllegalArgumentException("failed to parse repository source", e); } } return this; }