@Override public Mapper parse(ParseContext context) throws IOException { QueryShardContext queryShardContext = new QueryShardContext(this.queryShardContext); if (context.doc().getField(queryBuilderField.name()) != null) { // If a percolator query has been defined in an array object then multiple percolator queries // could be provided. In order to prevent this we fail if we try to parse more than one query // for the current document. throw new IllegalArgumentException("a document can only contain one percolator query"); } XContentParser parser = context.parser(); QueryBuilder queryBuilder = parseQueryBuilder(queryShardContext.newParseContext(parser), parser.getTokenLocation()); verifyQuery(queryBuilder); // Fetching of terms, shapes and indexed scripts happen during this rewrite: queryBuilder = queryBuilder.rewrite(queryShardContext); try (XContentBuilder builder = XContentFactory.contentBuilder(QUERY_BUILDER_CONTENT_TYPE)) { queryBuilder.toXContent(builder, new MapParams(Collections.emptyMap())); builder.flush(); byte[] queryBuilderAsBytes = BytesReference.toBytes(builder.bytes()); context .doc() .add( new Field( queryBuilderField.name(), queryBuilderAsBytes, queryBuilderField.fieldType())); } Query query = toQuery(queryShardContext, mapUnmappedFieldAsString, queryBuilder); processQuery(query, context); return null; }
static Query toQuery( QueryShardContext context, boolean mapUnmappedFieldsAsString, QueryBuilder queryBuilder) throws IOException { // This means that fields in the query need to exist in the mapping prior to registering this // query // The reason that this is required, is that if a field doesn't exist then the query assumes // defaults, which may be undesired. // // Even worse when fields mentioned in percolator queries do go added to map after the queries // have been registered // then the percolator queries don't work as expected any more. // // Query parsing can't introduce new fields in mappings (which happens when registering a // percolator query), // because field type can't be inferred from queries (like document do) so the best option here // is to disallow // the usage of unmapped fields in percolator queries to avoid unexpected behaviour // // if index.percolator.map_unmapped_fields_as_string is set to true, query can contain unmapped // fields which will be mapped // as an analyzed string. context.setAllowUnmappedFields(false); context.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString); return queryBuilder.toQuery(context); }
private AbstractDistanceScoreFunction parseDateVariable( XContentParser parser, QueryShardContext context, DateFieldMapper.DateFieldType dateFieldType, MultiValueMode mode) throws IOException { XContentParser.Token token; String parameterName = null; String scaleString = null; String originString = null; String offsetString = "0d"; double decay = 0.5; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { parameterName = parser.currentName(); } else if (DecayFunctionBuilder.SCALE.equals(parameterName)) { scaleString = parser.text(); } else if (DecayFunctionBuilder.ORIGIN.equals(parameterName)) { originString = parser.text(); } else if (DecayFunctionBuilder.DECAY.equals(parameterName)) { decay = parser.doubleValue(); } else if (DecayFunctionBuilder.OFFSET.equals(parameterName)) { offsetString = parser.text(); } else { throw new ElasticsearchParseException("parameter [{}] not supported!", parameterName); } } long origin; if (originString == null) { origin = context.nowInMillis(); } else { origin = dateFieldType.parseToMilliseconds(originString, false, null, null); } if (scaleString == null) { throw new ElasticsearchParseException( "[{}] must be set for date fields.", DecayFunctionBuilder.SCALE); } TimeValue val = TimeValue.parseTimeValue( scaleString, TimeValue.timeValueHours(24), DecayFunctionParser.class.getSimpleName() + ".scale"); double scale = val.getMillis(); val = TimeValue.parseTimeValue( offsetString, TimeValue.timeValueHours(24), DecayFunctionParser.class.getSimpleName() + ".offset"); double offset = val.getMillis(); IndexNumericFieldData numericFieldData = context.getForField(dateFieldType); return new NumericFieldDataScoreFunction( origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode); }
@Override protected ScoreFunction doToFunction(QueryShardContext context) { try { SearchScript searchScript = context .getScriptService() .search(context.lookup(), script, ScriptContext.Standard.SEARCH); return new ScriptScoreFunction(script, searchScript); } catch (Exception e) { throw new QueryShardException(context, "script_score: the script could not be loaded", e); } }
@Override protected ShardValidateQueryResponse shardOperation(ShardValidateQueryRequest request) { IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); IndexShard indexShard = indexService.getShard(request.shardId().id()); final QueryShardContext queryShardContext = indexService.newQueryShardContext(); queryShardContext.setTypes(request.types()); boolean valid; String explanation = null; String error = null; Engine.Searcher searcher = indexShard.acquireSearcher("validate_query"); DefaultSearchContext searchContext = new DefaultSearchContext( 0, new ShardSearchLocalRequest( request.types(), request.nowInMillis(), request.filteringAliases()), null, searcher, indexService, indexShard, scriptService, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter(), parseFieldMatcher, SearchService.NO_TIMEOUT); SearchContext.setCurrent(searchContext); try { searchContext.parsedQuery(queryShardContext.toQuery(request.query())); searchContext.preProcess(); valid = true; if (request.rewrite()) { explanation = getRewrittenQuery(searcher.searcher(), searchContext.query()); } else if (request.explain()) { explanation = searchContext.filteredQuery().query().toString(); } } catch (QueryShardException | ParsingException e) { valid = false; error = e.getDetailedMessage(); } catch (AssertionError | IOException e) { valid = false; error = e.getMessage(); } finally { searchContext.close(); SearchContext.removeCurrent(); } return new ShardValidateQueryResponse(request.shardId(), valid, explanation, error); }
protected Analyzer getAnalyzer(MappedFieldType fieldType) { if (this.analyzer == null) { if (fieldType != null) { return context.getSearchAnalyzer(fieldType); } return context.getMapperService().searchAnalyzer(); } else { Analyzer analyzer = context.getMapperService().analysisService().analyzer(this.analyzer); if (analyzer == null) { throw new IllegalArgumentException("No analyzer found for [" + this.analyzer + "]"); } return analyzer; } }
public void testMustRewrite() throws Exception { String templateString = "{ \"file\": \"storedTemplate\" ,\"params\":{\"template\":\"all\" } } "; XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString); context.reset(); templateSourceParser.nextToken(); try { TemplateQueryBuilder.fromXContent(context.newParseContext(templateSourceParser)) .toQuery(context); fail(); } catch (UnsupportedOperationException ex) { assertEquals("this query must be rewritten first", ex.getMessage()); } }
public void testParserCanExtractTemplateNames() throws Exception { String templateString = "{ \"file\": \"storedTemplate\" ,\"params\":{\"template\":\"all\" } } "; XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString); context.reset(); templateSourceParser.nextToken(); Query query = QueryBuilder.rewriteQuery( TemplateQueryBuilder.fromXContent(context.newParseContext(templateSourceParser)), context) .toQuery(context); assertTrue("Parsing template query failed.", query instanceof MatchAllDocsQuery); }
private AbstractDistanceScoreFunction parseVariable( String fieldName, XContentParser parser, QueryShardContext context, MultiValueMode mode) throws IOException { // the field must exist, else we cannot read the value for the doc later MappedFieldType fieldType = context.fieldMapper(fieldName); if (fieldType == null) { throw new ParsingException(parser.getTokenLocation(), "unknown field [{}]", fieldName); } // dates and time need special handling parser.nextToken(); if (fieldType instanceof DateFieldMapper.DateFieldType) { return parseDateVariable(parser, context, (DateFieldMapper.DateFieldType) fieldType, mode); } else if (fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType) { return parseGeoVariable( parser, context, (BaseGeoPointFieldMapper.GeoPointFieldType) fieldType, mode); } else if (fieldType instanceof NumberFieldMapper.NumberFieldType) { return parseNumberVariable( parser, context, (NumberFieldMapper.NumberFieldType) fieldType, mode); } else { throw new ParsingException( parser.getTokenLocation(), "field [{}] is of type [{}], but only numeric types are supported.", fieldName, fieldType); } }
public void testParser() throws IOException { String templateString = "{" + "\"query\":{\"match_{{template}}\": {}}," + "\"params\":{\"template\":\"all\"}" + "}"; XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString); context.reset(); templateSourceParser.nextToken(); Query query = QueryBuilder.rewriteQuery( TemplateQueryBuilder.fromXContent(context.newParseContext(templateSourceParser)), context) .toQuery(context); assertTrue("Parsing template query failed.", query instanceof MatchAllDocsQuery); }
private Query parse(AliasMetaData alias, QueryShardContext parseContext) { if (alias.filter() == null) { return null; } try { byte[] filterSource = alias.filter().uncompressed(); try (XContentParser parser = XContentFactory.xContent(filterSource).createParser(filterSource)) { ParsedQuery parsedFilter = parseContext.parseInnerFilter(parser); return parsedFilter == null ? null : parsedFilter.query(); } } catch (IOException ex) { throw new AliasFilterParsingException( parseContext.index(), alias.getAlias(), "Invalid alias filter", ex); } }
@Override public Query termsQuery(List values, @Nullable QueryShardContext context) { if (context == null) { return super.termsQuery(values, context); } List<String> types = new ArrayList<>(context.getMapperService().types().size()); for (DocumentMapper documentMapper : context.getMapperService().docMappers(false)) { if (!documentMapper.parentFieldMapper().active()) { types.add(documentMapper.type()); } } List<BytesRef> bValues = new ArrayList<>(values.size()); for (Object value : values) { BytesRef bValue = BytesRefs.toBytesRef(value); if (Uid.hasDelimiter(bValue)) { bValues.add(bValue); } else { // we use all non child types, cause we don't know if its exact or not... for (String type : types) { bValues.add(Uid.createUidAsBytes(type, bValue)); } } } return new TermsQuery(name(), bValues); }
private Query parse(AliasMetaData alias, QueryShardContext shardContext) { if (alias.filter() == null) { return null; } try { byte[] filterSource = alias.filter().uncompressed(); try (XContentParser parser = XContentFactory.xContent(filterSource).createParser(filterSource)) { Optional<QueryBuilder> innerQueryBuilder = shardContext.newParseContext(parser).parseInnerQueryBuilder(); if (innerQueryBuilder.isPresent()) { return shardContext.toFilter(innerQueryBuilder.get()).query(); } return null; } } catch (IOException ex) { throw new AliasFilterParsingException( shardContext.index(), alias.getAlias(), "Invalid alias filter", ex); } }
public void testParseTemplateAsSingleStringWithConditionalClause() throws IOException { String templateString = "{" + " \"inline\" : \"{ \\\"match_{{#use_it}}{{template}}{{/use_it}}\\\":{} }\"," + " \"params\":{" + " \"template\":\"all\"," + " \"use_it\": true" + " }" + "}"; XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString); context.reset(); Query query = QueryBuilder.rewriteQuery( TemplateQueryBuilder.fromXContent(context.newParseContext(templateSourceParser)), context) .toQuery(context); assertTrue("Parsing template query failed.", query instanceof MatchAllDocsQuery); }
public Query parse(Type type, String fieldName, Object value) throws IOException { final String field; MappedFieldType fieldType = context.fieldMapper(fieldName); if (fieldType != null) { field = fieldType.name(); } else { field = fieldName; } /* * If the user forced an analyzer we really don't care if they are * searching a type that wants term queries to be used with query string * because the QueryBuilder will take care of it. If they haven't forced * an analyzer then types like NumberFieldType that want terms with * query string will blow up because their analyzer isn't capable of * passing through QueryBuilder. */ boolean noForcedAnalyzer = this.analyzer == null; if (fieldType != null && fieldType.tokenized() == false && noForcedAnalyzer) { return termQuery(fieldType, value); } Analyzer analyzer = getAnalyzer(fieldType); assert analyzer != null; MatchQueryBuilder builder = new MatchQueryBuilder(analyzer, fieldType); builder.setEnablePositionIncrements(this.enablePositionIncrements); Query query = null; switch (type) { case BOOLEAN: if (commonTermsCutoff == null) { query = builder.createBooleanQuery(field, value.toString(), occur); } else { query = builder.createCommonTermsQuery( field, value.toString(), occur, occur, commonTermsCutoff, fieldType); } break; case PHRASE: query = builder.createPhraseQuery(field, value.toString(), phraseSlop); break; case PHRASE_PREFIX: query = builder.createPhrasePrefixQuery(field, value.toString(), phraseSlop, maxExpansions); break; default: throw new IllegalStateException("No type found for [" + type + "]"); } if (query == null) { return zeroTermsQuery(); } else { return query; } }
/** * Test that the template query parser can parse and evaluate template expressed as a single * string but still it expects only the query specification (thus this test should fail with * specific exception). */ public void testParseTemplateFailsToParseCompleteQueryAsSingleString() throws IOException { String templateString = "{" + " \"inline\" : \"{ \\\"size\\\": \\\"{{size}}\\\", \\\"query\\\":{\\\"match_all\\\":{}}}\"," + " \"params\":{" + " \"size\":2" + " }\n" + "}"; XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString); context.reset(); try { TemplateQueryBuilder.fromXContent(context.newParseContext(templateSourceParser)) .rewrite(context); fail("Expected ParsingException"); } catch (ParsingException e) { assertThat(e.getMessage(), containsString("query malformed, no field after start_object")); } }
@Override protected ScriptedMetricAggregatorFactory doBuild( AggregationContext context, AggregatorFactory<?> parent, Builder subfactoriesBuilder) throws IOException { QueryShardContext queryShardContext = context.searchContext().getQueryShardContext(); Function<Map<String, Object>, ExecutableScript> executableInitScript; if (initScript != null) { executableInitScript = queryShardContext.getLazyExecutableScript(initScript, ScriptContext.Standard.AGGS); } else { executableInitScript = (p) -> null; ; } Function<Map<String, Object>, SearchScript> searchMapScript = queryShardContext.getLazySearchScript(mapScript, ScriptContext.Standard.AGGS); Function<Map<String, Object>, ExecutableScript> executableCombineScript; if (combineScript != null) { executableCombineScript = queryShardContext.getLazyExecutableScript(combineScript, ScriptContext.Standard.AGGS); } else { executableCombineScript = (p) -> null; } return new ScriptedMetricAggregatorFactory( name, type, searchMapScript, executableInitScript, executableCombineScript, reduceScript, params, context, parent, subfactoriesBuilder, metaData); }
@Override public SuggestionContext build(QueryShardContext context) throws IOException { TermSuggestionContext suggestionContext = new TermSuggestionContext(context); // copy over common settings to each suggestion builder populateCommonFields(context.getMapperService(), suggestionContext); // Transfers the builder settings to the target TermSuggestionContext DirectSpellcheckerSettings settings = suggestionContext.getDirectSpellCheckerSettings(); settings.accuracy(accuracy); settings.maxEdits(maxEdits); settings.maxInspections(maxInspections); settings.maxTermFreq(maxTermFreq); settings.minDocFreq(minDocFreq); settings.minWordLength(minWordLength); settings.prefixLength(prefixLength); settings.sort(sort); settings.stringDistance(stringDistance.toLucene()); settings.suggestMode(suggestMode.toLucene()); settings.exactMatch(exactMatch); return suggestionContext; }
private AbstractDistanceScoreFunction parseNumberVariable( XContentParser parser, QueryShardContext context, NumberFieldMapper.NumberFieldType fieldType, MultiValueMode mode) throws IOException { XContentParser.Token token; String parameterName = null; double scale = 0; double origin = 0; double decay = 0.5; double offset = 0.0d; boolean scaleFound = false; boolean refFound = false; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { parameterName = parser.currentName(); } else if (DecayFunctionBuilder.SCALE.equals(parameterName)) { scale = parser.doubleValue(); scaleFound = true; } else if (DecayFunctionBuilder.DECAY.equals(parameterName)) { decay = parser.doubleValue(); } else if (DecayFunctionBuilder.ORIGIN.equals(parameterName)) { origin = parser.doubleValue(); refFound = true; } else if (DecayFunctionBuilder.OFFSET.equals(parameterName)) { offset = parser.doubleValue(); } else { throw new ElasticsearchParseException("parameter [{}] not supported!", parameterName); } } if (!scaleFound || !refFound) { throw new ElasticsearchParseException( "both [{}] and [{}] must be set for numeric fields.", DecayFunctionBuilder.SCALE, DecayFunctionBuilder.ORIGIN); } IndexNumericFieldData numericFieldData = context.getForField(fieldType); return new NumericFieldDataScoreFunction( origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode); }
private AbstractDistanceScoreFunction parseGeoVariable( XContentParser parser, QueryShardContext context, BaseGeoPointFieldMapper.GeoPointFieldType fieldType, MultiValueMode mode) throws IOException { XContentParser.Token token; String parameterName = null; GeoPoint origin = new GeoPoint(); String scaleString = null; String offsetString = "0km"; double decay = 0.5; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { parameterName = parser.currentName(); } else if (DecayFunctionBuilder.SCALE.equals(parameterName)) { scaleString = parser.text(); } else if (DecayFunctionBuilder.ORIGIN.equals(parameterName)) { origin = GeoUtils.parseGeoPoint(parser); } else if (DecayFunctionBuilder.DECAY.equals(parameterName)) { decay = parser.doubleValue(); } else if (DecayFunctionBuilder.OFFSET.equals(parameterName)) { offsetString = parser.text(); } else { throw new ElasticsearchParseException("parameter [{}] not supported!", parameterName); } } if (origin == null || scaleString == null) { throw new ElasticsearchParseException( "[{}] and [{}] must be set for geo fields.", DecayFunctionBuilder.ORIGIN, DecayFunctionBuilder.SCALE); } double scale = DistanceUnit.DEFAULT.parse(scaleString, DistanceUnit.DEFAULT); double offset = DistanceUnit.DEFAULT.parse(offsetString, DistanceUnit.DEFAULT); IndexGeoPointFieldData indexFieldData = context.getForField(fieldType); return new GeoFieldDataScoreFunction( origin, scale, decay, offset, getDecayFunction(), indexFieldData, mode); }
public void testToFilter() throws IOException { Directory dir = new RAMDirectory(); try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) { writer.commit(); } QueryShardContext context = mock(QueryShardContext.class); try (IndexReader reader = DirectoryReader.open(dir)) { MappedFieldType fieldType = new MappedFieldType() { @Override public MappedFieldType clone() { return null; } @Override public String typeName() { return null; } @Override public Query termQuery(Object value, @Nullable QueryShardContext context) { return null; } }; fieldType.setName(UidFieldMapper.NAME); fieldType.setHasDocValues(false); when(context.fieldMapper(UidFieldMapper.NAME)).thenReturn(fieldType); when(context.getIndexReader()).thenReturn(reader); SliceBuilder builder = new SliceBuilder(5, 10); Query query = builder.toFilter(context, 0, 1); assertThat(query, instanceOf(TermsSliceQuery.class)); assertThat(builder.toFilter(context, 0, 1), equalTo(query)); try (IndexReader newReader = DirectoryReader.open(dir)) { when(context.getIndexReader()).thenReturn(newReader); assertThat(builder.toFilter(context, 0, 1), equalTo(query)); } } try (IndexReader reader = DirectoryReader.open(dir)) { MappedFieldType fieldType = new MappedFieldType() { @Override public MappedFieldType clone() { return null; } @Override public String typeName() { return null; } @Override public Query termQuery(Object value, @Nullable QueryShardContext context) { return null; } }; fieldType.setName("field_doc_values"); fieldType.setHasDocValues(true); fieldType.setDocValuesType(DocValuesType.SORTED_NUMERIC); when(context.fieldMapper("field_doc_values")).thenReturn(fieldType); when(context.getIndexReader()).thenReturn(reader); IndexNumericFieldData fd = mock(IndexNumericFieldData.class); when(context.getForField(fieldType)).thenReturn(fd); SliceBuilder builder = new SliceBuilder("field_doc_values", 5, 10); Query query = builder.toFilter(context, 0, 1); assertThat(query, instanceOf(DocValuesSliceQuery.class)); assertThat(builder.toFilter(context, 0, 1), equalTo(query)); try (IndexReader newReader = DirectoryReader.open(dir)) { when(context.getIndexReader()).thenReturn(newReader); assertThat(builder.toFilter(context, 0, 1), equalTo(query)); } // numSlices > numShards int numSlices = randomIntBetween(10, 100); int numShards = randomIntBetween(1, 9); Map<Integer, AtomicInteger> numSliceMap = new HashMap<>(); for (int i = 0; i < numSlices; i++) { for (int j = 0; j < numShards; j++) { SliceBuilder slice = new SliceBuilder("_uid", i, numSlices); Query q = slice.toFilter(context, j, numShards); if (q instanceof TermsSliceQuery || q instanceof MatchAllDocsQuery) { AtomicInteger count = numSliceMap.get(j); if (count == null) { count = new AtomicInteger(0); numSliceMap.put(j, count); } count.incrementAndGet(); if (q instanceof MatchAllDocsQuery) { assertThat(count.get(), equalTo(1)); } } else { assertThat(q, instanceOf(MatchNoDocsQuery.class)); } } } int total = 0; for (Map.Entry<Integer, AtomicInteger> e : numSliceMap.entrySet()) { total += e.getValue().get(); } assertThat(total, equalTo(numSlices)); // numShards > numSlices numShards = randomIntBetween(4, 100); numSlices = randomIntBetween(2, numShards - 1); List<Integer> targetShards = new ArrayList<>(); for (int i = 0; i < numSlices; i++) { for (int j = 0; j < numShards; j++) { SliceBuilder slice = new SliceBuilder("_uid", i, numSlices); Query q = slice.toFilter(context, j, numShards); if (q instanceof MatchNoDocsQuery == false) { assertThat(q, instanceOf(MatchAllDocsQuery.class)); targetShards.add(j); } } } assertThat(targetShards.size(), equalTo(numShards)); assertThat(new HashSet<>(targetShards).size(), equalTo(numShards)); // numShards == numSlices numShards = randomIntBetween(2, 10); numSlices = numShards; for (int i = 0; i < numSlices; i++) { for (int j = 0; j < numShards; j++) { SliceBuilder slice = new SliceBuilder("_uid", i, numSlices); Query q = slice.toFilter(context, j, numShards); if (i == j) { assertThat(q, instanceOf(MatchAllDocsQuery.class)); } else { assertThat(q, instanceOf(MatchNoDocsQuery.class)); } } } } try (IndexReader reader = DirectoryReader.open(dir)) { MappedFieldType fieldType = new MappedFieldType() { @Override public MappedFieldType clone() { return null; } @Override public String typeName() { return null; } @Override public Query termQuery(Object value, @Nullable QueryShardContext context) { return null; } }; fieldType.setName("field_without_doc_values"); when(context.fieldMapper("field_without_doc_values")).thenReturn(fieldType); when(context.getIndexReader()).thenReturn(reader); SliceBuilder builder = new SliceBuilder("field_without_doc_values", 5, 10); IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> builder.toFilter(context, 0, 1)); assertThat(exc.getMessage(), containsString("cannot load numeric doc values")); } }
public static Query parseQuery( QueryShardContext context, boolean mapUnmappedFieldsAsString, XContentParser parser) throws IOException { return parseQuery(context, mapUnmappedFieldsAsString, context.newParseContext(parser), parser); }
@Override public SortFieldAndFormat build(QueryShardContext context) throws IOException { final SearchScript searchScript = context.getSearchScript(script, ScriptContext.Standard.SEARCH, Collections.emptyMap()); MultiValueMode valueMode = null; if (sortMode != null) { valueMode = MultiValueMode.fromString(sortMode.toString()); } boolean reverse = (order == SortOrder.DESC); if (valueMode == null) { valueMode = reverse ? MultiValueMode.MAX : MultiValueMode.MIN; } final Nested nested = resolveNested(context, nestedPath, nestedFilter); final IndexFieldData.XFieldComparatorSource fieldComparatorSource; switch (type) { case STRING: fieldComparatorSource = new BytesRefFieldComparatorSource(null, null, valueMode, nested) { LeafSearchScript leafScript; @Override protected SortedBinaryDocValues getValues(LeafReaderContext context) throws IOException { leafScript = searchScript.getLeafSearchScript(context); final BinaryDocValues values = new BinaryDocValues() { final BytesRefBuilder spare = new BytesRefBuilder(); @Override public BytesRef get(int docID) { leafScript.setDocument(docID); spare.copyChars(leafScript.run().toString()); return spare.get(); } }; return FieldData.singleton(values, null); } @Override protected void setScorer(Scorer scorer) { leafScript.setScorer(scorer); } }; break; case NUMBER: fieldComparatorSource = new DoubleValuesComparatorSource(null, Double.MAX_VALUE, valueMode, nested) { LeafSearchScript leafScript; @Override protected SortedNumericDoubleValues getValues(LeafReaderContext context) throws IOException { leafScript = searchScript.getLeafSearchScript(context); final NumericDoubleValues values = new NumericDoubleValues() { @Override public double get(int docID) { leafScript.setDocument(docID); return leafScript.runAsDouble(); } }; return FieldData.singleton(values, null); } @Override protected void setScorer(Scorer scorer) { leafScript.setScorer(scorer); } }; break; default: throw new QueryShardException( context, "custom script sort type [" + type + "] not supported"); } return new SortFieldAndFormat( new SortField("_script", fieldComparatorSource, reverse), DocValueFormat.RAW); }