public void testNestedMapInList() throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder() .startObject() .startArray("some_array") .startObject() .field("foo", "bar") .endObject() .startObject() .endObject() .endArray() .endObject(); try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.string())) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("some_array", parser.currentName()); if (random().nextBoolean()) { // sometimes read the start array token, sometimes not assertEquals(XContentParser.Token.START_ARRAY, parser.nextToken()); } assertEquals(Arrays.asList(singletonMap("foo", "bar"), emptyMap()), parser.list()); } }
/** populates a request object (pre-populated with defaults) based on a parser. */ public static void parseRequest(TermVectorRequest termVectorRequest, XContentParser parser) throws IOException { XContentParser.Token token; String currentFieldName = null; List<String> fields = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (currentFieldName != null) { if (currentFieldName.equals("fields")) { if (token == XContentParser.Token.START_ARRAY) { while (parser.nextToken() != XContentParser.Token.END_ARRAY) { fields.add(parser.text()); } } else { throw new ElasticsearchParseException( "The parameter fields must be given as an array! Use syntax : \"fields\" : [\"field1\", \"field2\",...]"); } } else if (currentFieldName.equals("offsets")) { termVectorRequest.offsets(parser.booleanValue()); } else if (currentFieldName.equals("positions")) { termVectorRequest.positions(parser.booleanValue()); } else if (currentFieldName.equals("payloads")) { termVectorRequest.payloads(parser.booleanValue()); } else if (currentFieldName.equals("term_statistics") || currentFieldName.equals("termStatistics")) { termVectorRequest.termStatistics(parser.booleanValue()); } else if (currentFieldName.equals("field_statistics") || currentFieldName.equals("fieldStatistics")) { termVectorRequest.fieldStatistics(parser.booleanValue()); } else if ("_index" .equals(currentFieldName)) { // the following is important for multi request parsing. termVectorRequest.index = parser.text(); } else if ("_type".equals(currentFieldName)) { termVectorRequest.type = parser.text(); } else if ("_id".equals(currentFieldName)) { if (termVectorRequest.doc != null) { throw new ElasticsearchParseException( "Either \"id\" or \"doc\" can be specified, but not both!"); } termVectorRequest.id = parser.text(); } else if ("doc".equals(currentFieldName)) { if (termVectorRequest.id != null) { throw new ElasticsearchParseException( "Either \"id\" or \"doc\" can be specified, but not both!"); } termVectorRequest.doc(jsonBuilder().copyCurrentStructure(parser)); } else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) { termVectorRequest.routing = parser.text(); } else { throw new ElasticsearchParseException( "The parameter " + currentFieldName + " is not valid for term vector request!"); } } } if (fields.size() > 0) { String[] fieldsAsArray = new String[fields.size()]; termVectorRequest.selectedFields(fields.toArray(fieldsAsArray)); } }
public void testNestedList() throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder() .startObject() .startArray("some_array") .startArray() .endArray() .startArray() .value(1) .value(3) .endArray() .startArray() .value(2) .endArray() .endArray() .endObject(); try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.string())) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("some_array", parser.currentName()); if (random().nextBoolean()) { // sometimes read the start array token, sometimes not assertEquals(XContentParser.Token.START_ARRAY, parser.nextToken()); } assertEquals( Arrays.asList(Collections.<Integer>emptyList(), Arrays.asList(1, 3), Arrays.asList(2)), parser.list()); } }
public static WrapperQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.FIELD_NAME) { throw new ParsingException(parser.getTokenLocation(), "[wrapper] query malformed"); } String fieldName = parser.currentName(); if (!QUERY_FIELD.match(fieldName)) { throw new ParsingException( parser.getTokenLocation(), "[wrapper] query malformed, expected `query` but was " + fieldName); } parser.nextToken(); byte[] source = parser.binaryValue(); parser.nextToken(); if (source == null) { throw new ParsingException( parser.getTokenLocation(), "wrapper query has no [query] specified"); } return new WrapperQueryBuilder(source); }
private List<KnapsackState> get(String name) throws IOException { ImmutableList.Builder<KnapsackState> builder = ImmutableList.builder(); try { logger.debug("get knapsack states: {}", name); final Client client = injector.getInstance(Client.class); createIndexIfNotExist(client); GetResponse getResponse = client.prepareGet(INDEX_NAME, MAPPING_NAME, name).execute().actionGet(); if (!getResponse.isExists()) { return builder.build(); } XContentParser parser = xContent(JSON).createParser(getResponse.getSourceAsBytes()); while (parser.nextToken() != START_ARRAY) { // forward } while (parser.nextToken() != END_ARRAY) { KnapsackState state = new KnapsackState(); builder.add(state.fromXContent(parser)); } return builder.build(); } catch (Throwable t) { logger.error("get settings failed", t); return null; } }
@Override public SetSection parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException { XContentParser parser = parseContext.parser(); String currentFieldName = null; XContentParser.Token token; SetSection setSection = new SetSection(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { setSection.addSet(currentFieldName, parser.text()); } } parser.nextToken(); if (setSection.getStash().isEmpty()) { throw new RestTestParseException("set section must set at least a value"); } return setSection; }
private final void parseMissingAndAdd( final String aggregationName, final String currentFieldName, XContentParser parser, final Map<String, Object> missing) throws IOException { XContentParser.Token token = parser.currentToken(); if (token == null) { token = parser.nextToken(); } if (token == XContentParser.Token.FIELD_NAME) { final String fieldName = parser.currentName(); if (missing.containsKey(fieldName)) { throw new ParsingException( parser.getTokenLocation(), "Missing field [" + fieldName + "] already defined as [" + missing.get(fieldName) + "] in [" + aggregationName + "]."); } parser.nextToken(); missing.put(fieldName, parser.objectText()); } else { throw new ParsingException( parser.getTokenLocation(), "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]"); } }
public static AliasMetaData fromXContent(XContentParser parser) throws IOException { Builder builder = new Builder(parser.currentName()); String currentFieldName = null; XContentParser.Token token = parser.nextToken(); if (token == null) { // no data... return builder.build(); } while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("filter".equals(currentFieldName)) { Map<String, Object> filter = parser.mapOrdered(); builder.filter(filter); } } else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { if ("filter".equals(currentFieldName)) { builder.filter(new CompressedString(parser.binaryValue())); } } else if (token == XContentParser.Token.VALUE_STRING) { if ("routing".equals(currentFieldName)) { builder.routing(parser.text()); } else if ("index_routing".equals(currentFieldName) || "indexRouting".equals(currentFieldName)) { builder.indexRouting(parser.text()); } else if ("search_routing".equals(currentFieldName) || "searchRouting".equals(currentFieldName)) { builder.searchRouting(parser.text()); } } } return builder.build(); }
public void testFromXContent() throws IOException { SearchSortValues sortValues = createTestItem(); XContentType xcontentType = randomFrom(XContentType.values()); XContentBuilder builder = XContentFactory.contentBuilder(xcontentType); if (randomBoolean()) { builder.prettyPrint(); } builder .startObject(); // we need to wrap xContent output in proper object to create a parser for // it builder = sortValues.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); XContentParser parser = createParser(builder); parser .nextToken(); // skip to the elements field name token, fromXContent advances from there if // called from ourside parser.nextToken(); if (sortValues.sortValues().length > 0) { SearchSortValues parsed = SearchSortValues.fromXContent(parser); assertToXContentEquivalent( builder.bytes(), toXContent(parsed, xcontentType, true), xcontentType); parser.nextToken(); } assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertNull(parser.nextToken()); }
private void parseSource(SearchContext context, BytesReference source) throws SearchParseException { // nothing to parse... if (source == null || source.length() == 0) { return; } XContentParser parser = null; try { parser = XContentFactory.xContent(source).createParser(source); XContentParser.Token token; token = parser.nextToken(); if (token != XContentParser.Token.START_OBJECT) { throw new ElasticsearchParseException( "Expected START_OBJECT but got " + token.name() + " " + parser.currentName()); } while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { String fieldName = parser.currentName(); parser.nextToken(); SearchParseElement element = elementParsers.get(fieldName); if (element == null) { throw new SearchParseException( context, "No parser for element [" + fieldName + "]", parser.getTokenLocation()); } element.parse(parser, context); } else { if (token == null) { throw new ElasticsearchParseException( "End of query source reached but query is not complete."); } else { throw new ElasticsearchParseException( "Expected field name but got " + token.name() + " \"" + parser.currentName() + "\""); } } } } catch (Throwable e) { String sSource = "_na_"; try { sSource = XContentHelper.convertToJson(source, false); } catch (Throwable e1) { // ignore } throw new SearchParseException( context, "Failed to parse source [" + sSource + "]", parser.getTokenLocation(), e); } finally { if (parser != null) { parser.close(); } } }
/** Sets the aliases that will be associated with the index when it gets created */ public CreateIndexRequest aliases(BytesReference source) { try (XContentParser parser = XContentHelper.createParser(source)) { // move to the first alias parser.nextToken(); while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { alias(Alias.fromXContent(parser)); } return this; } catch (IOException e) { throw new ElasticsearchParseException("Failed to parse aliases", e); } }
@Override public PhraseTooLargeActionModule parse(QueryParseContext parseContext) throws IOException { PhraseTooLargeActionModule module = new PhraseTooLargeActionModule(); XContentParser parser = parseContext.parser(); String currentFieldName = null; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { switch (currentFieldName) { case "max_terms_per_query": case "maxTermsPerQuery": module.maxTermsPerQuery(parser.intValue()); break; case "max_terms_in_all_queries": case "maxTermsInAllQueries": module.maxTermsInAllQueries(parser.intValue()); break; case "phrase_too_large_action": case "phraseTooLargeAction": module.phraseTooLargeAction(PhraseTooLargeAction.parse(parser.text())); break; default: throw new QueryParsingException( parseContext.index(), "[safer][phrase] query does not support the field [" + currentFieldName + "]"); } } else { throw new QueryParsingException( parseContext.index(), "[safer][phrase] only supports values, not objects."); } } return module; }
private void serializeArray(ParseContext context, String lastFieldName) throws IOException { String arrayFieldName = lastFieldName; Mapper mapper = mappers.get(lastFieldName); if (mapper != null && mapper instanceof ArrayValueMapperParser) { mapper.parse(context); } else { XContentParser parser = context.parser(); XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.START_OBJECT) { serializeObject(context, lastFieldName); } else if (token == XContentParser.Token.START_ARRAY) { serializeArray(context, lastFieldName); } else if (token == XContentParser.Token.FIELD_NAME) { lastFieldName = parser.currentName(); } else if (token == XContentParser.Token.VALUE_NULL) { serializeNullValue(context, lastFieldName); } else if (token == null) { throw new MapperParsingException( "object mapping for [" + name + "] with array for [" + arrayFieldName + "] tried to parse as array, but got EOF, is there a mismatch in types for the same field?"); } else { serializeValue(context, lastFieldName, token); } } } }
public static TemplateContext parse( XContentParser parser, String templateFieldname, String paramsFieldname) throws IOException { Map<String, Object> params = null; String templateNameOrTemplateContent = null; String currentFieldName = null; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (templateFieldname.equals(currentFieldName)) { if (token == XContentParser.Token.START_OBJECT && !parser.hasTextCharacters()) { XContentBuilder builder = XContentBuilder.builder(parser.contentType().xContent()); builder.copyCurrentStructure(parser); templateNameOrTemplateContent = builder.string(); } else { templateNameOrTemplateContent = parser.text(); } } else if (paramsFieldname.equals(currentFieldName)) { params = parser.map(); } } return new TemplateContext(templateNameOrTemplateContent, params); }
private AbstractDistanceScoreFunction parseVariable( String fieldName, XContentParser parser, QueryShardContext context, MultiValueMode mode) throws IOException { // the field must exist, else we cannot read the value for the doc later MappedFieldType fieldType = context.fieldMapper(fieldName); if (fieldType == null) { throw new ParsingException(parser.getTokenLocation(), "unknown field [{}]", fieldName); } // dates and time need special handling parser.nextToken(); if (fieldType instanceof DateFieldMapper.DateFieldType) { return parseDateVariable(parser, context, (DateFieldMapper.DateFieldType) fieldType, mode); } else if (fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType) { return parseGeoVariable( parser, context, (BaseGeoPointFieldMapper.GeoPointFieldType) fieldType, mode); } else if (fieldType instanceof NumberFieldMapper.NumberFieldType) { return parseNumberVariable( parser, context, (NumberFieldMapper.NumberFieldType) fieldType, mode); } else { throw new ParsingException( parser.getTokenLocation(), "field [{}] is of type [{}], but only numeric types are supported.", fieldName, fieldType); } }
public void testGeoDistanceSortCanBeParsedFromGeoHash() throws IOException { String json = "{\n" + " \"VDcvDuFjE\" : [ \"7umzzv8eychg\", \"dmdgmt5z13uw\", " + " \"ezu09wxw6v4c\", \"kc7s3515p6k6\", \"jgeuvjwrmfzn\", \"kcpcfj7ruyf8\" ],\n" + " \"unit\" : \"m\",\n" + " \"distance_type\" : \"sloppy_arc\",\n" + " \"mode\" : \"MAX\",\n" + " \"nested_filter\" : {\n" + " \"ids\" : {\n" + " \"type\" : [ ],\n" + " \"values\" : [ ],\n" + " \"boost\" : 5.711116\n" + " }\n" + " },\n" + " \"validation_method\" : \"STRICT\"\n" + " }"; XContentParser itemParser = XContentHelper.createParser(new BytesArray(json)); itemParser.nextToken(); QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, itemParser, ParseFieldMatcher.STRICT); GeoDistanceSortBuilder result = GeoDistanceSortBuilder.fromXContent(context, json); assertEquals( "[-19.700583312660456, -2.8225036337971687, " + "31.537466906011105, -74.63590376079082, " + "43.71844606474042, -5.548660643398762, " + "-37.20467280596495, 38.71751043945551, " + "-69.44606635719538, 84.25200328230858, " + "-39.03717711567879, 44.74099852144718]", Arrays.toString(result.points())); }
@Override public void parse(XContentParser parser, SearchContext context) throws Exception { XContentParser.Token token = parser.currentToken(); List<SortField> sortFields = Lists.newArrayListWithCapacity(2); if (token == XContentParser.Token.START_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.START_OBJECT) { addCompoundSortField(parser, context, sortFields); } else if (token == XContentParser.Token.VALUE_STRING) { addSortField(context, sortFields, parser.text(), false, false, null, null, null, null); } } } else { addCompoundSortField(parser, context, sortFields); } if (!sortFields.isEmpty()) { // optimize if we just sort on score non reversed, we don't really need sorting boolean sort; if (sortFields.size() > 1) { sort = true; } else { SortField sortField = sortFields.get(0); if (sortField.getType() == SortField.Type.SCORE && !sortField.getReverse()) { sort = false; } else { sort = true; } } if (sort) { context.sort(new Sort(sortFields.toArray(new SortField[sortFields.size()]))); } } }
public static FieldMaskingSpanQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); float boost = AbstractQueryBuilder.DEFAULT_BOOST; SpanQueryBuilder inner = null; String field = null; String queryName = null; String currentFieldName = null; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if (parseContext.getParseFieldMatcher().match(currentFieldName, QUERY_FIELD)) { QueryBuilder query = parseContext.parseInnerQueryBuilder(); if (!(query instanceof SpanQueryBuilder)) { throw new ParsingException( parser.getTokenLocation(), "[field_masking_span] query must be of type span query"); } inner = (SpanQueryBuilder) query; } else { throw new ParsingException( parser.getTokenLocation(), "[field_masking_span] query does not support [" + currentFieldName + "]"); } } else { if (parseContext .getParseFieldMatcher() .match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) { boost = parser.floatValue(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, FIELD_FIELD)) { field = parser.text(); } else if (parseContext .getParseFieldMatcher() .match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) { queryName = parser.text(); } else { throw new ParsingException( parser.getTokenLocation(), "[field_masking_span] query does not support [" + currentFieldName + "]"); } } } if (inner == null) { throw new ParsingException( parser.getTokenLocation(), "field_masking_span must have [query] span query clause"); } if (field == null) { throw new ParsingException( parser.getTokenLocation(), "field_masking_span must have [field] set for it"); } FieldMaskingSpanQueryBuilder queryBuilder = new FieldMaskingSpanQueryBuilder(inner, field); queryBuilder.boost(boost); queryBuilder.queryName(queryName); return queryBuilder; }
@Override public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException { XContentParser parser = parseContext.parser(); String fieldPattern = null; String filterName = null; XContentParser.Token token; String currentFieldName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { if ("field".equals(currentFieldName)) { fieldPattern = parser.text(); } else if ("_name".equals(currentFieldName)) { filterName = parser.text(); } else { throw new QueryParsingException( parseContext.index(), "[exists] filter does not support [" + currentFieldName + "]"); } } } if (fieldPattern == null) { throw new QueryParsingException( parseContext.index(), "exists must be provided with a [field]"); } return newFilter(parseContext, fieldPattern, filterName); }
@Override public SignificanceHeuristic parse( XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context) throws IOException, QueryShardException { parser.nextToken(); return new SimpleHeuristic(); }
@Override public void login(final RestRequest request, final ActionListener<String[]> listener) { String username = request.param(usernameKey); String password = request.param(passwordKey); final BytesReference content = request.content(); final XContentType xContentType = XContentFactory.xContentType(content); XContentParser parser = null; try { parser = XContentFactory.xContent(xContentType).createParser(content); final XContentParser.Token t = parser.nextToken(); if (t != null) { final Map<String, Object> contentMap = parser.map(); username = MapUtil.getAsString(contentMap, usernameKey, username); password = MapUtil.getAsString(contentMap, passwordKey, password); } } catch (final Exception e) { listener.onFailure(e); return; } finally { if (parser != null) { parser.close(); } } if (username == null) { listener.onResponse(new String[0]); return; } processLogin(username, password, listener); }
@Override public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException { XContentParser parser = parseContext.parser(); Filter filter = null; boolean cache = true; String currentFieldName = null; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("filter".equals(currentFieldName)) { filter = parseContext.parseInnerFilter(); } } else if (token.isValue()) { if ("cache".equals(currentFieldName)) { cache = parser.booleanValue(); } } } if (filter == null) { throw new QueryParsingException(index, "filter is required when using `not` filter"); } if (cache) { filter = parseContext.cacheFilterIfPossible(filter); } // no need to cache this one return new NotFilter(filter); }
private static GeoDistanceSortBuilder parse(XContentBuilder sortBuilder) throws Exception { XContentParser parser = XContentHelper.createParser(sortBuilder.bytes()); QueryParseContext parseContext = new QueryParseContext(new IndicesQueriesRegistry(), parser, ParseFieldMatcher.STRICT); parser.nextToken(); return GeoDistanceSortBuilder.fromXContent(parseContext, null); }
public void testTwoTypes() throws Exception { String source = JsonXContent.contentBuilder() .startObject() .startObject("in_stock") .startObject("filter") .startObject("range") .startObject("stock") .field("gt", 0) .endObject() .endObject() .endObject() .startObject("terms") .field("field", "stock") .endObject() .endObject() .endObject() .string(); try { XContentParser parser = XContentFactory.xContent(source).createParser(source); QueryParseContext parseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); aggParsers.parseAggregators(parseContext); fail(); } catch (ParsingException e) { assertThat( e.toString(), containsString( "Found two aggregation type definitions in [in_stock]: [filter] and [terms]")); } }
public void testMissingType() throws Exception { String source = JsonXContent.contentBuilder() .startObject() .startObject("by_date") .startObject("date_histogram") .field("field", "timestamp") .field("interval", "month") .endObject() .startObject("aggs") .startObject("tag_count") // the aggregation type is missing // .startObject("cardinality") .field("field", "tag") // .endObject() .endObject() .endObject() .endObject() .endObject() .string(); try { XContentParser parser = XContentFactory.xContent(source).createParser(source); QueryParseContext parseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); aggParsers.parseAggregators(parseContext); fail(); } catch (ParsingException e) { // All Good } }
public static Optional<SpanWithinQueryBuilder> fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; SpanQueryBuilder big = null; SpanQueryBuilder little = null; String currentFieldName = null; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if (parseContext.getParseFieldMatcher().match(currentFieldName, BIG_FIELD)) { Optional<QueryBuilder> query = parseContext.parseInnerQueryBuilder(); if (query.isPresent() == false || query.get() instanceof SpanQueryBuilder == false) { throw new ParsingException( parser.getTokenLocation(), "span_within [big] must be of type span query"); } big = (SpanQueryBuilder) query.get(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, LITTLE_FIELD)) { Optional<QueryBuilder> query = parseContext.parseInnerQueryBuilder(); if (query.isPresent() == false || query.get() instanceof SpanQueryBuilder == false) { throw new ParsingException( parser.getTokenLocation(), "span_within [little] must be of type span query"); } little = (SpanQueryBuilder) query.get(); } else { throw new ParsingException( parser.getTokenLocation(), "[span_within] query does not support [" + currentFieldName + "]"); } } else if (parseContext .getParseFieldMatcher() .match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) { boost = parser.floatValue(); } else if (parseContext .getParseFieldMatcher() .match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) { queryName = parser.text(); } else { throw new ParsingException( parser.getTokenLocation(), "[span_within] query does not support [" + currentFieldName + "]"); } } if (big == null) { throw new ParsingException(parser.getTokenLocation(), "span_within must include [big]"); } if (little == null) { throw new ParsingException(parser.getTokenLocation(), "span_within must include [little]"); } SpanWithinQueryBuilder query = new SpanWithinQueryBuilder(big, little); query.boost(boost).queryName(queryName); return Optional.of(query); }
public void testSameAggregationName() throws Exception { final String name = randomAsciiOfLengthBetween(1, 10); String source = JsonXContent.contentBuilder() .startObject() .startObject(name) .startObject("terms") .field("field", "a") .endObject() .endObject() .startObject(name) .startObject("terms") .field("field", "b") .endObject() .endObject() .endObject() .string(); try { XContentParser parser = XContentFactory.xContent(source).createParser(source); QueryParseContext parseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); aggParsers.parseAggregators(parseContext); fail(); } catch (IllegalArgumentException e) { assertThat( e.toString(), containsString("Two sibling aggregations cannot have the same name: [" + name + "]")); } }
/** * Parse a field as though it were a string. * * @param context parse context used during parsing * @param nullValue value to use for null * @param defaultBoost default boost value returned unless overwritten in the field * @return the parsed field and the boost either parsed or defaulted * @throws IOException if thrown while parsing */ public static ValueAndBoost parseCreateFieldForString( ParseContext context, String nullValue, float defaultBoost) throws IOException { if (context.externalValueSet()) { return new ValueAndBoost(context.externalValue().toString(), defaultBoost); } XContentParser parser = context.parser(); if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { return new ValueAndBoost(nullValue, defaultBoost); } if (parser.currentToken() == XContentParser.Token.START_OBJECT && Version.indexCreated(context.indexSettings()).before(Version.V_3_0_0)) { XContentParser.Token token; String currentFieldName = null; String value = nullValue; float boost = defaultBoost; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else { if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { value = parser.textOrNull(); } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { boost = parser.floatValue(); } else { throw new IllegalArgumentException("unknown property [" + currentFieldName + "]"); } } } return new ValueAndBoost(value, boost); } return new ValueAndBoost(parser.textOrNull(), defaultBoost); }
@Override protected ShardSuggestResponse shardOperation(ShardSuggestRequest request) throws ElasticSearchException { IndexService indexService = indicesService.indexServiceSafe(request.index()); IndexShard indexShard = indexService.shardSafe(request.shardId()); final Engine.Searcher searcher = indexShard.searcher(); XContentParser parser = null; try { BytesReference suggest = request.suggest(); if (suggest != null && suggest.length() > 0) { parser = XContentFactory.xContent(suggest).createParser(suggest); if (parser.nextToken() != XContentParser.Token.START_OBJECT) { throw new ElasticSearchIllegalArgumentException("suggest content missing"); } final SuggestionSearchContext context = suggestPhase .parseElement() .parseInternal( parser, indexService.mapperService(), request.index(), request.shardId()); final Suggest result = suggestPhase.execute(context, searcher.reader()); return new ShardSuggestResponse(request.index(), request.shardId(), result); } return new ShardSuggestResponse(request.index(), request.shardId(), new Suggest()); } catch (Throwable ex) { throw new ElasticSearchException("failed to execute suggest", ex); } finally { searcher.release(); if (parser != null) { parser.close(); } } }
private static ObjectMapper parseNonDynamicArray( ParseContext context, ObjectMapper mapper, String lastFieldName, String arrayFieldName) throws IOException { XContentParser parser = context.parser(); XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.START_OBJECT) { return parseObject(context, mapper, lastFieldName); } else if (token == XContentParser.Token.START_ARRAY) { return parseArray(context, mapper, lastFieldName); } else if (token == XContentParser.Token.FIELD_NAME) { lastFieldName = parser.currentName(); } else if (token == XContentParser.Token.VALUE_NULL) { parseNullValue(context, mapper, lastFieldName); } else if (token == null) { throw new MapperParsingException( "object mapping for [" + mapper.name() + "] with array for [" + arrayFieldName + "] tried to parse as array, but got EOF, is there a mismatch in types for the same field?"); } else { return parseValue(context, mapper, lastFieldName, token); } } return null; }