@Override public Mapper parse(ParseContext context) throws IOException { QueryShardContext queryShardContext = new QueryShardContext(this.queryShardContext); if (context.doc().getField(queryBuilderField.name()) != null) { // If a percolator query has been defined in an array object then multiple percolator queries // could be provided. In order to prevent this we fail if we try to parse more than one query // for the current document. throw new IllegalArgumentException("a document can only contain one percolator query"); } XContentParser parser = context.parser(); QueryBuilder queryBuilder = parseQueryBuilder(queryShardContext.newParseContext(parser), parser.getTokenLocation()); verifyQuery(queryBuilder); // Fetching of terms, shapes and indexed scripts happen during this rewrite: queryBuilder = queryBuilder.rewrite(queryShardContext); try (XContentBuilder builder = XContentFactory.contentBuilder(QUERY_BUILDER_CONTENT_TYPE)) { queryBuilder.toXContent(builder, new MapParams(Collections.emptyMap())); builder.flush(); byte[] queryBuilderAsBytes = BytesReference.toBytes(builder.bytes()); context .doc() .add( new Field( queryBuilderField.name(), queryBuilderAsBytes, queryBuilderField.fieldType())); } Query query = toQuery(queryShardContext, mapUnmappedFieldAsString, queryBuilder); processQuery(query, context); return null; }
@Test public void testThatPercolationNotificationWorks() throws IOException, InterruptedException { SimpleNotifier notifier = new SimpleNotifier(); MetricFilter percolationFilter = new MetricFilter() { @Override public boolean matches(String name, Metric metric) { return name.startsWith(prefix + ".foo"); } }; elasticsearchReporter = createElasticsearchReporterBuilder() .percolationFilter(percolationFilter) .percolationNotifier(notifier) .build(); final Counter evictions = registry.counter("foo"); evictions.inc(18); reportAndRefresh(); QueryBuilder queryBuilder = QueryBuilders.boolQuery() .must(QueryBuilders.matchAllQuery()) .filter( QueryBuilders.boolQuery() .must(QueryBuilders.rangeQuery("count").gte(20)) .must(QueryBuilders.termQuery("name", prefix + ".foo"))); String json = String.format("{ \"query\" : %s }", queryBuilder.buildAsBytes().toUtf8()); client() .prepareIndex(indexWithDate, ".percolator", "myName") .setRefresh(true) .setSource(json) .execute() .actionGet(); evictions.inc(1); reportAndRefresh(); assertThat(notifier.metrics.size(), is(0)); evictions.inc(2); reportAndRefresh(); org.assertj.core.api.Assertions.assertThat(notifier.metrics.size()).isEqualTo(1); org.assertj.core.api.Assertions.assertThat(notifier.metrics).containsKey("myName"); org.assertj.core.api.Assertions.assertThat(notifier.metrics.get("myName").name()) .isEqualTo(prefix + ".foo"); notifier.metrics.clear(); evictions.dec(2); reportAndRefresh(); org.assertj.core.api.Assertions.assertThat(notifier.metrics.size()).isEqualTo(0); }
static Query toQuery( QueryShardContext context, boolean mapUnmappedFieldsAsString, QueryBuilder queryBuilder) throws IOException { // This means that fields in the query need to exist in the mapping prior to registering this // query // The reason that this is required, is that if a field doesn't exist then the query assumes // defaults, which may be undesired. // // Even worse when fields mentioned in percolator queries do go added to map after the queries // have been registered // then the percolator queries don't work as expected any more. // // Query parsing can't introduce new fields in mappings (which happens when registering a // percolator query), // because field type can't be inferred from queries (like document do) so the best option here // is to disallow // the usage of unmapped fields in percolator queries to avoid unexpected behaviour // // if index.percolator.map_unmapped_fields_as_string is set to true, query can contain unmapped // fields which will be mapped // as an analyzed string. context.setAllowUnmappedFields(false); context.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString); return queryBuilder.toQuery(context); }
static Query parseQuery(QueryBuilder queryBuilder) throws IOException { QueryParseContext context = new QueryParseContext(new Index("test"), SearchContext.current().queryParserService()); XContentParser parser = XContentHelper.createParser(queryBuilder.buildAsBytes()); context.reset(parser); return context.parseInnerQuery(); }
@Override public String explain() { String baseExplain = super.explain(); Where where = this.connectedWhere; QueryBuilder explan = null; try { if (where != null) explan = QueryMaker.explan(where, false); } catch (SqlParseException e) { } String conditions = explan == null ? "Could not parse conditions" : explan.toString(); String nestedExplain = "Nested Loops \n run first query , and for each result run second query with additional conditions :\n" + conditions + "\n" + baseExplain; return nestedExplain; }
/** * The query source to execute. * * @see org.elasticsearch.index.query.xcontent.QueryBuilders */ @Required public DeleteByQueryRequest query(QueryBuilder queryBuilder) { FastByteArrayOutputStream bos = queryBuilder.buildAsUnsafeBytes(); this.querySource = bos.unsafeByteArray(); this.querySourceOffset = 0; this.querySourceLength = bos.size(); this.querySourceUnsafe = true; return this; }
public List<String> autocomplete(String prefix, int limit) { SearchRequestBuilder builder = new SearchRequestBuilder(factory.client()); builder.setIndices(Constants.INDEX).setTypes(Constants.TYPE); builder.setSearchType(SearchType.DEFAULT); builder.setFrom(0).setSize(limit).setExplain(IS_EXPLAIN); QueryBuilder queryBuilder = QueryBuilders.matchPhraseQuery("title", prefix); builder.setQuery(queryBuilder.toString()); logger.info("Search qeury"); logger.info(builder.toString()); SearchResponse sr = builder.execute().actionGet(); logger.info("Search response"); logger.info(sr.toString()); Iterator<SearchHit> it = sr.getHits().iterator(); while (it.hasNext()) { SearchHit hit = it.next(); hit.getSource(); } return Arrays.asList("a", "b", "c"); }
public void testQueryWithRewrite() throws Exception { addQueryMapping(); client().prepareIndex("remote", "type", "1").setSource("field", "value").get(); QueryBuilder queryBuilder = termsLookupQuery("field", new TermsLookup("remote", "type", "1", "field")); ParsedDocument doc = mapperService .documentMapper(typeName) .parse( "test", typeName, "1", XContentFactory.jsonBuilder() .startObject() .field(fieldName, queryBuilder) .endObject() .bytes()); BytesRef qbSource = doc.rootDoc().getFields(fieldType.getQueryBuilderFieldName())[0].binaryValue(); assertQueryBuilder(qbSource, queryBuilder.rewrite(indexService.newQueryShardContext())); }
public void testParserCanExtractTemplateNames() throws Exception { String templateString = "{ \"file\": \"storedTemplate\" ,\"params\":{\"template\":\"all\" } } "; XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString); context.reset(); templateSourceParser.nextToken(); Query query = QueryBuilder.rewriteQuery( TemplateQueryBuilder.fromXContent(context.newParseContext(templateSourceParser)), context) .toQuery(context); assertTrue("Parsing template query failed.", query instanceof MatchAllDocsQuery); }
public void testParser() throws IOException { String templateString = "{" + "\"query\":{\"match_{{template}}\": {}}," + "\"params\":{\"template\":\"all\"}" + "}"; XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString); context.reset(); templateSourceParser.nextToken(); Query query = QueryBuilder.rewriteQuery( TemplateQueryBuilder.fromXContent(context.newParseContext(templateSourceParser)), context) .toQuery(context); assertTrue("Parsing template query failed.", query instanceof MatchAllDocsQuery); }
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); if (docBuilder != null) { docBuilder.toXContent(builder, params); } if (queryBuilder != null) { builder.field("query"); queryBuilder.toXContent(builder, params); } if (size != null) { builder.field("size", size); } if (sorts != null) { builder.startArray("sort"); for (SortBuilder<?> sort : sorts) { sort.toXContent(builder, params); } builder.endArray(); } if (trackScores != null) { builder.field("track_scores", trackScores); } if (highlightBuilder != null) { builder.field(SearchSourceBuilder.HIGHLIGHT_FIELD.getPreferredName(), highlightBuilder); } if (aggregationBuilders != null || pipelineAggregationBuilders != null) { builder.field("aggregations"); builder.startObject(); if (aggregationBuilders != null) { for (AggregatorBuilder<?> aggregation : aggregationBuilders) { aggregation.toXContent(builder, params); } } if (pipelineAggregationBuilders != null) { for (PipelineAggregatorBuilder<?> aggregation : pipelineAggregationBuilders) { aggregation.toXContent(builder, params); } } builder.endObject(); } builder.endObject(); return builder; }
public void testParseTemplateAsSingleStringWithConditionalClause() throws IOException { String templateString = "{" + " \"inline\" : \"{ \\\"match_{{#use_it}}{{template}}{{/use_it}}\\\":{} }\"," + " \"params\":{" + " \"template\":\"all\"," + " \"use_it\": true" + " }" + "}"; XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString); context.reset(); Query query = QueryBuilder.rewriteQuery( TemplateQueryBuilder.fromXContent(context.newParseContext(templateSourceParser)), context) .toQuery(context); assertTrue("Parsing template query failed.", query instanceof MatchAllDocsQuery); }