@Override public Query apply(Function input, Context context) throws IOException { List<Symbol> arguments = input.arguments(); assert arguments.size() == 4 : "invalid number of arguments"; assert Symbol.isLiteral(arguments.get(0), DataTypes.OBJECT); assert Symbol.isLiteral(arguments.get(1), DataTypes.STRING); assert Symbol.isLiteral(arguments.get(2), DataTypes.STRING); assert Symbol.isLiteral(arguments.get(3), DataTypes.OBJECT); @SuppressWarnings("unchecked") Map<String, Object> fields = (Map) ((Literal) arguments.get(0)).value(); BytesRef queryString = (BytesRef) ((Literal) arguments.get(1)).value(); BytesRef matchType = (BytesRef) ((Literal) arguments.get(2)).value(); Map options = (Map) ((Literal) arguments.get(3)).value(); checkArgument(queryString != null, "cannot use NULL as query term in match predicate"); MatchQueryBuilder queryBuilder; if (fields.size() == 1) { queryBuilder = new MatchQueryBuilder(context.mapperService, context.indexCache, matchType, options); } else { queryBuilder = new MultiMatchQueryBuilder( context.mapperService, context.indexCache, matchType, options); } return queryBuilder.query(fields, queryString); }
private static Filter genericFunctionFilter(Function function, Context context) { if (function.valueType() != DataTypes.BOOLEAN) { raiseUnsupported(function); } // avoid field-cache // reason1: analyzed columns or columns with index off wouldn't work // substr(n, 1, 1) in the case of n => analyzed would throw an error because n would be an // array // reason2: would have to load each value into the field cache function = (Function) DocReferenceConverter.convertIf(function); final CollectInputSymbolVisitor.Context ctx = context.inputSymbolVisitor.extractImplementations(function); assert ctx.topLevelInputs().size() == 1; @SuppressWarnings("unchecked") final Input<Boolean> condition = (Input<Boolean>) ctx.topLevelInputs().get(0); @SuppressWarnings("unchecked") final List<LuceneCollectorExpression> expressions = ctx.docLevelExpressions(); final CollectorContext collectorContext = new CollectorContext( context.mapperService, context.fieldDataService, new CollectorFieldsVisitor(expressions.size())); for (LuceneCollectorExpression expression : expressions) { expression.startCollect(collectorContext); } return new Filter() { @Override public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException { for (LuceneCollectorExpression expression : expressions) { expression.setNextReader(context.reader().getContext()); } return BitsFilteredDocIdSet.wrap( new FunctionDocSet( context.reader(), collectorContext.visitor(), condition, expressions, context.reader().maxDoc(), acceptDocs), acceptDocs); } }; }