static Query parseQuery(QueryBuilder queryBuilder) throws IOException { QueryParseContext context = new QueryParseContext(new Index("test"), SearchContext.current().queryParserService()); XContentParser parser = XContentHelper.createParser(queryBuilder.buildAsBytes()); context.reset(parser); return context.parseInnerQuery(); }
@Override public PhraseTooLargeActionModule parse(QueryParseContext parseContext) throws IOException { PhraseTooLargeActionModule module = new PhraseTooLargeActionModule(); XContentParser parser = parseContext.parser(); String currentFieldName = null; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { switch (currentFieldName) { case "max_terms_per_query": case "maxTermsPerQuery": module.maxTermsPerQuery(parser.intValue()); break; case "max_terms_in_all_queries": case "maxTermsInAllQueries": module.maxTermsInAllQueries(parser.intValue()); break; case "phrase_too_large_action": case "phraseTooLargeAction": module.phraseTooLargeAction(PhraseTooLargeAction.parse(parser.text())); break; default: throw new QueryParsingException( parseContext.index(), "[safer][phrase] query does not support the field [" + currentFieldName + "]"); } } else { throw new QueryParsingException( parseContext.index(), "[safer][phrase] only supports values, not objects."); } } return module; }
private void assertQueryBuilder(BytesRef actual, QueryBuilder expected) throws IOException { XContentParser sourceParser = PercolatorFieldMapper.QUERY_BUILDER_CONTENT_TYPE .xContent() .createParser(actual.bytes, actual.offset, actual.length); QueryParseContext qsc = indexService.newQueryShardContext().newParseContext(sourceParser); assertThat(qsc.parseInnerQueryBuilder().get(), equalTo(expected)); }
private void setPathLevel() { ObjectMapper objectMapper = parseContext.nestedScope().getObjectMapper(); if (objectMapper == null) { parentFilter = parseContext.fixedBitSetFilter(NonNestedDocsFilter.INSTANCE); } else { parentFilter = parseContext.fixedBitSetFilter(objectMapper.nestedTypeFilter()); } childFilter = parseContext.fixedBitSetFilter(nestedObjectMapper.nestedTypeFilter()); parentObjectMapper = parseContext.nestedScope().nextLevel(nestedObjectMapper); }
public void filter() throws IOException { if (path != null) { setPathLevel(); try { innerFilter = parseContext.parseInnerFilter(); } finally { resetPathLevel(); } filterParsed = true; } else { source = XContentFactory.smileBuilder().copyCurrentStructure(parseContext.parser()).bytes(); } filterFound = true; }
@Override public Filter termFilter(Object value, @Nullable QueryParseContext context) { if (context == null) { return super.termFilter(value, context); } BytesRef bValue = BytesRefs.toBytesRef(value); // we use all types, cause we don't know if its exact or not... BytesRef[] typesValues = new BytesRef[context.mapperService().types().size()]; int i = 0; for (String type : context.mapperService().types()) { typesValues[i++] = Uid.createUidAsBytes(type, bValue); } return new TermsFilter(names.indexName(), typesValues); }
@Override public Filter termsFilter(List values, @Nullable QueryParseContext context) { if (fieldType.indexOptions() != IndexOptions.NONE || context == null) { return super.termsFilter(values, context); } return new TermsFilter(UidFieldMapper.NAME, Uid.createTypeUids(context.queryTypes(), values)); }
/* * `timeZone` parameter is only applied when: * - not null * - the object to parse is a String (does not apply to ms since epoch which are UTC based time values) * - the String to parse does not have already a timezone defined (ie. `2014-01-01T00:00:00+03:00`) */ public Filter rangeFilter( QueryParseContext parseContext, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, @Nullable QueryParseContext context, @Nullable Boolean explicitCaching) { IndexNumericFieldData fieldData = parseContext != null ? (IndexNumericFieldData) parseContext.getForField(this) : null; // If the current search context is null we're parsing percolator query or a index alias filter. if (SearchContext.current() == null) { return new LateParsingFilter( fieldData, lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser, explicitCaching); } else { return innerRangeFilter( fieldData, lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser, explicitCaching); } }
@Override public Query prefixQuery( Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { if (fieldType.indexOptions() != IndexOptions.NONE || context == null) { return super.prefixQuery(value, method, context); } Collection<String> queryTypes = context.queryTypes(); if (queryTypes.size() == 1) { PrefixQuery prefixQuery = new PrefixQuery( new Term( UidFieldMapper.NAME, Uid.createUidAsBytes( Iterables.getFirst(queryTypes, null), BytesRefs.toBytesRef(value)))); if (method != null) { prefixQuery.setRewriteMethod(method); } return prefixQuery; } BooleanQuery query = new BooleanQuery(); for (String queryType : queryTypes) { PrefixQuery prefixQuery = new PrefixQuery( new Term( UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value)))); if (method != null) { prefixQuery.setRewriteMethod(method); } query.add(prefixQuery, BooleanClause.Occur.SHOULD); } return query; }
@Override public Filter regexpFilter( Object value, int flags, int maxDeterminizedStates, @Nullable QueryParseContext context) { if (fieldType.indexOptions() != IndexOptions.NONE || context == null) { return super.regexpFilter(value, flags, maxDeterminizedStates, context); } Collection<String> queryTypes = context.queryTypes(); if (queryTypes.size() == 1) { return new RegexpFilter( new Term( UidFieldMapper.NAME, Uid.createUidAsBytes( Iterables.getFirst(queryTypes, null), BytesRefs.toBytesRef(value))), flags, maxDeterminizedStates); } XBooleanFilter filter = new XBooleanFilter(); for (String queryType : queryTypes) { filter.add( new RegexpFilter( new Term( UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value))), flags, maxDeterminizedStates), BooleanClause.Occur.SHOULD); } return filter; }
/** * Parse a string GapPolicy into the byte enum * * @param context SearchContext this is taking place in * @param text GapPolicy in string format (e.g. "ignore") * @return GapPolicy enum */ public static GapPolicy parse( QueryParseContext context, String text, XContentLocation tokenLocation) { GapPolicy result = null; for (GapPolicy policy : values()) { if (context.getParseFieldMatcher().match(text, policy.parseField)) { if (result == null) { result = policy; } else { throw new IllegalStateException( "Text can be parsed to 2 different gap policies: text=[" + text + "], " + "policies=" + Arrays.asList(result, policy)); } } } if (result == null) { final List<String> validNames = new ArrayList<>(); for (GapPolicy policy : values()) { validNames.add(policy.getName()); } throw new ParsingException( tokenLocation, "Invalid gap policy: [" + text + "], accepted values: " + validNames); } return result; }
@Override public Query termQuery(Object value, @Nullable QueryParseContext context) { if (indexOptions() != IndexOptions.NONE || context == null) { return super.termQuery(value, context); } final BytesRef[] uids = Uid.createUidsForTypesAndId(context.queryTypes(), value); return new TermsQuery(UidFieldMapper.NAME, uids); }
@Override public Query termsQuery(List values, @Nullable QueryParseContext context) { if (indexOptions() != IndexOptions.NONE || context == null) { return super.termsQuery(values, context); } return new TermsQuery( UidFieldMapper.NAME, Uid.createUidsForTypesAndIds(context.queryTypes(), values)); }
public void setPath(String path) { this.path = path; MapperService.SmartNameObjectMapper smart = parseContext.smartObjectMapper(path); if (smart == null) { throw new QueryParsingException( parseContext.index(), "[nested] failed to find nested object under path [" + path + "]"); } childDocumentMapper = smart.docMapper(); nestedObjectMapper = smart.mapper(); if (nestedObjectMapper == null) { throw new QueryParsingException( parseContext.index(), "[nested] failed to find nested object under path [" + path + "]"); } if (!nestedObjectMapper.nested().isNested()) { throw new QueryParsingException( parseContext.index(), "[nested] nested object under path [" + path + "] is not of nested type"); } }
private static QueryBuilder parseQueryBuilder( QueryParseContext context, XContentLocation location) { try { return context .parseInnerQueryBuilder() .orElseThrow( () -> new ParsingException(location, "Failed to parse inner query, was empty")); } catch (IOException e) { throw new ParsingException(location, "Failed to parse", e); } }
private Query parseQuery(String type, BytesReference querySource, XContentParser parser) { if (type == null) { if (parser != null) { return queryParserService.parse(parser).query(); } else { return queryParserService.parse(querySource).query(); } } String[] previousTypes = QueryParseContext.setTypesWithPrevious(new String[] {type}); try { if (parser != null) { return queryParserService.parse(parser).query(); } else { return queryParserService.parse(querySource).query(); } } finally { QueryParseContext.setTypes(previousTypes); } }
/** * creates random suggestion builder, renders it to xContent and back to new instance that should * be equal to original */ public void testFromXContent() throws IOException { Suggesters suggesters = new Suggesters(Collections.emptyMap()); QueryParseContext context = new QueryParseContext(null); context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY)); for (int runs = 0; runs < NUMBER_OF_RUNS; runs++) { SuggestBuilder suggestBuilder = createTestModel(); XContentBuilder xContentBuilder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { xContentBuilder.prettyPrint(); } suggestBuilder.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); XContentParser parser = XContentHelper.createParser(xContentBuilder.bytes()); context.reset(parser); SuggestBuilder secondSuggestBuilder = SuggestBuilder.fromXContent(context, suggesters); assertNotSame(suggestBuilder, secondSuggestBuilder); assertEquals(suggestBuilder, secondSuggestBuilder); assertEquals(suggestBuilder.hashCode(), secondSuggestBuilder.hashCode()); } }
public Filter getInnerFilter() throws IOException { if (filterParsed) { return innerFilter; } else { if (path == null) { throw new QueryParsingException(parseContext.index(), "[nested] requires 'path' field"); } if (!filterFound) { throw new QueryParsingException( parseContext.index(), "[nested] requires either 'query' or 'filter' field"); } setPathLevel(); XContentParser old = parseContext.parser(); try { XContentParser innerParser = XContentHelper.createParser(source); parseContext.parser(innerParser); innerFilter = parseContext.parseInnerFilter(); filterParsed = true; return innerFilter; } finally { resetPathLevel(); parseContext.parser(old); } } }
public static NestedAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { String path = null; XContentParser.Token token; String currentFieldName = null; XContentParser parser = context.parser(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.VALUE_STRING) { if (context.getParseFieldMatcher().match(currentFieldName, NestedAggregator.PATH_FIELD)) { path = parser.text(); } else { throw new ParsingException( parser.getTokenLocation(), "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); } } else { throw new ParsingException( parser.getTokenLocation(), "Unexpected token " + token + " in [" + aggregationName + "]."); } } if (path == null) { // "field" doesn't exist, so we fall back to the context of the ancestors throw new ParsingException( parser.getTokenLocation(), "Missing [path] field for nested aggregation [" + aggregationName + "]"); } return new NestedAggregationBuilder(aggregationName, path); }
public static Query wrapSmartNameQuery( Query query, @Nullable MapperService.SmartNameFieldMappers smartFieldMappers, QueryParseContext parseContext) { if (smartFieldMappers == null) { return query; } if (!smartFieldMappers.hasDocMapper()) { return query; } DocumentMapper docMapper = smartFieldMappers.docMapper(); return new FilteredQuery(query, parseContext.cacheFilter(docMapper.typeFilter(), null)); }
public static Filter wrapSmartNameFilter( Filter filter, @Nullable MapperService.SmartNameFieldMappers smartFieldMappers, QueryParseContext parseContext) { if (smartFieldMappers == null) { return filter; } if (!smartFieldMappers.hasDocMapper()) { return filter; } DocumentMapper docMapper = smartFieldMappers.docMapper(); return new AndFilter( ImmutableList.of(parseContext.cacheFilter(docMapper.typeFilter(), null), filter)); }
@Override public Filter termsFilter(List values, @Nullable QueryParseContext context) { if (context == null) { return super.termsFilter(values, context); } List<BytesRef> bValues = new ArrayList<BytesRef>(values.size()); for (Object value : values) { BytesRef bValue = BytesRefs.toBytesRef(value); // we use all types, cause we don't know if its exact or not... for (String type : context.mapperService().types()) { bValues.add(Uid.createUidAsBytes(type, bValue)); } } return new TermsFilter(names.indexName(), bValues); }
@Override public Filter rangeFilter( QueryParseContext parseContext, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { return NumericRangeFieldDataFilter.newLongRange( (IndexNumericFieldData) parseContext.getForField(this), lowerTerm == null ? null : parseValue(lowerTerm), upperTerm == null ? null : parseValue(upperTerm), includeLower, includeUpper); }
public static IncludeExclude parseInclude(XContentParser parser, QueryParseContext context) throws IOException { XContentParser.Token token = parser.currentToken(); if (token == XContentParser.Token.VALUE_STRING) { return new IncludeExclude(parser.text(), null); } else if (token == XContentParser.Token.START_ARRAY) { return new IncludeExclude(new TreeSet<>(parseArrayToSet(parser)), null); } else if (token == XContentParser.Token.START_OBJECT) { ParseFieldMatcher parseFieldMatcher = context.getParseFieldMatcher(); String currentFieldName = null; Integer partition = null, numPartitions = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else // This "include":{"pattern":"foo.*"} syntax is undocumented since 2.0 // Regexes should be "include":"foo.*" if (parseFieldMatcher.match(currentFieldName, PATTERN_FIELD)) { return new IncludeExclude(parser.text(), null); } else if (parseFieldMatcher.match(currentFieldName, NUM_PARTITIONS_FIELD)) { numPartitions = parser.intValue(); } else if (parseFieldMatcher.match(currentFieldName, PARTITION_FIELD)) { partition = parser.intValue(); } else { throw new ElasticsearchParseException( "Unknown parameter in Include/Exclude clause: " + currentFieldName); } } if (partition == null) { throw new IllegalArgumentException( "Missing [" + PARTITION_FIELD.getPreferredName() + "] parameter for partition-based include"); } if (numPartitions == null) { throw new IllegalArgumentException( "Missing [" + NUM_PARTITIONS_FIELD.getPreferredName() + "] parameter for partition-based include"); } return new IncludeExclude(partition, numPartitions); } else { throw new IllegalArgumentException("Unrecognized token for an include [" + token + "]"); } }
@Override public Query regexpQuery( String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { if (indexOptions() != IndexOptions.NONE || context == null) { return super.regexpQuery(value, flags, maxDeterminizedStates, method, context); } Collection<String> queryTypes = context.queryTypes(); if (queryTypes.size() == 1) { RegexpQuery regexpQuery = new RegexpQuery( new Term( UidFieldMapper.NAME, Uid.createUidAsBytes( Iterables.getFirst(queryTypes, null), BytesRefs.toBytesRef(value))), flags, maxDeterminizedStates); if (method != null) { regexpQuery.setRewriteMethod(method); } return regexpQuery; } BooleanQuery.Builder query = new BooleanQuery.Builder(); for (String queryType : queryTypes) { RegexpQuery regexpQuery = new RegexpQuery( new Term( UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value))), flags, maxDeterminizedStates); if (method != null) { regexpQuery.setRewriteMethod(method); } query.add(regexpQuery, BooleanClause.Occur.SHOULD); } return query.build(); }
@Override public Query prefixQuery( String value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { if (indexOptions() != IndexOptions.NONE || context == null) { return super.prefixQuery(value, method, context); } Collection<String> queryTypes = context.queryTypes(); BooleanQuery.Builder query = new BooleanQuery.Builder(); for (String queryType : queryTypes) { PrefixQuery prefixQuery = new PrefixQuery( new Term( UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value)))); if (method != null) { prefixQuery.setRewriteMethod(method); } query.add(prefixQuery, BooleanClause.Occur.SHOULD); } return query.build(); }
@Override public void handleRequest( final RestRequest request, final RestChannel channel, final Client client) throws IOException { DeleteByQueryRequest delete = new DeleteByQueryRequest(Strings.splitStringByCommaToArray(request.param("index"))); delete.indicesOptions(IndicesOptions.fromRequest(request, delete.indicesOptions())); delete.routing(request.param("routing")); if (request.hasParam("timeout")) { delete.timeout(request.paramAsTime("timeout", null)); } if (request.hasContent()) { XContentParser requestParser = XContentFactory.xContent(request.content()).createParser(request.content()); QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); context.reset(requestParser); context.parseFieldMatcher(parseFieldMatcher); final QueryBuilder<?> builder = context.parseInnerQueryBuilder(); delete.query(builder); } else { String source = request.param("source"); if (source != null) { XContentParser requestParser = XContentFactory.xContent(source).createParser(source); QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); context.reset(requestParser); final QueryBuilder<?> builder = context.parseInnerQueryBuilder(); delete.query(builder); } else { QueryBuilder<?> queryBuilder = RestActions.urlParamsToQueryBuilder(request); if (queryBuilder != null) { delete.query(queryBuilder); } } } delete.types(Strings.splitStringByCommaToArray(request.param("type"))); client.execute(INSTANCE, delete, new RestToXContentListener<DeleteByQueryResponse>(channel)); }
public static ScriptedMetricAggregationBuilder parse( String aggregationName, QueryParseContext context) throws IOException { Script initScript = null; Script mapScript = null; Script combineScript = null; Script reduceScript = null; Map<String, Object> params = null; XContentParser.Token token; String currentFieldName = null; Set<String> scriptParameters = new HashSet<>(); scriptParameters.add(INIT_SCRIPT_FIELD.getPreferredName()); scriptParameters.add(MAP_SCRIPT_FIELD.getPreferredName()); scriptParameters.add(COMBINE_SCRIPT_FIELD.getPreferredName()); scriptParameters.add(REDUCE_SCRIPT_FIELD.getPreferredName()); XContentParser parser = context.parser(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.VALUE_STRING) { if (context.getParseFieldMatcher().match(currentFieldName, INIT_SCRIPT_FIELD)) { initScript = Script.parse( parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage()); } else if (context.getParseFieldMatcher().match(currentFieldName, MAP_SCRIPT_FIELD)) { mapScript = Script.parse( parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage()); } else if (context.getParseFieldMatcher().match(currentFieldName, COMBINE_SCRIPT_FIELD)) { combineScript = Script.parse( parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage()); } else if (context.getParseFieldMatcher().match(currentFieldName, REDUCE_SCRIPT_FIELD)) { reduceScript = Script.parse( parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage()); } else if (token == XContentParser.Token.START_OBJECT && context.getParseFieldMatcher().match(currentFieldName, PARAMS_FIELD)) { params = parser.map(); } else { throw new ParsingException( parser.getTokenLocation(), "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); } } else { throw new ParsingException( parser.getTokenLocation(), "Unexpected token " + token + " in [" + aggregationName + "]."); } } if (mapScript == null) { throw new ParsingException( parser.getTokenLocation(), "map_script field is required in [" + aggregationName + "]."); } ScriptedMetricAggregationBuilder factory = new ScriptedMetricAggregationBuilder(aggregationName); if (initScript != null) { factory.initScript(initScript); } if (mapScript != null) { factory.mapScript(mapScript); } if (combineScript != null) { factory.combineScript(combineScript); } if (reduceScript != null) { factory.reduceScript(reduceScript); } if (params != null) { factory.params(params); } return factory; }
public NestedInnerQueryParseSupport(XContentParser parser, SearchContext searchContext) { parseContext = searchContext.queryParserService().getParseContext(); parseContext.reset(parser); }
@Override protected ShardCountResponse shardOperation(ShardCountRequest request) throws ElasticsearchException { IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); IndexShard indexShard = indexService.shardSafe(request.shardId().id()); SearchShardTarget shardTarget = new SearchShardTarget( clusterService.localNode().id(), request.shardId().getIndex(), request.shardId().id()); SearchContext context = new DefaultSearchContext( 0, new ShardSearchLocalRequest( request.types(), request.nowInMillis(), request.filteringAliases()), shardTarget, indexShard.acquireSearcher("count"), indexService, indexShard, scriptService, cacheRecycler, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter()); SearchContext.setCurrent(context); try { // TODO: min score should move to be "null" as a value that is not initialized... if (request.minScore() != -1) { context.minimumScore(request.minScore()); } BytesReference source = request.querySource(); if (source != null && source.length() > 0) { try { QueryParseContext.setTypes(request.types()); context.parsedQuery(indexService.queryParserService().parseQuery(source)); } finally { QueryParseContext.removeTypes(); } } final boolean hasTerminateAfterCount = request.terminateAfter() != DEFAULT_TERMINATE_AFTER; boolean terminatedEarly = false; context.preProcess(); try { long count; if (hasTerminateAfterCount) { final Lucene.EarlyTerminatingCollector countCollector = Lucene.createCountBasedEarlyTerminatingCollector(request.terminateAfter()); terminatedEarly = Lucene.countWithEarlyTermination(context.searcher(), context.query(), countCollector); count = countCollector.count(); } else { count = Lucene.count(context.searcher(), context.query()); } return new ShardCountResponse(request.shardId(), count, terminatedEarly); } catch (Exception e) { throw new QueryPhaseExecutionException(context, "failed to execute count", e); } } finally { // this will also release the index searcher context.close(); SearchContext.removeCurrent(); } }