Ejemplo n.º 1
0
 private void parseSort(XContentParser parser, PercolateContext context) throws Exception {
   sortParseElement.parse(parser, context);
   // null, means default sorting by relevancy
   if (context.sort() == null) {
     context.doSort = true;
   } else {
     throw new ElasticsearchParseException("Only _score desc is supported");
   }
 }
Ejemplo n.º 2
0
  private ParsedDocument parseFetchedDoc(
      PercolateContext context,
      BytesReference fetchedDoc,
      IndexService documentIndexService,
      String type) {
    ParsedDocument doc = null;
    XContentParser parser = null;
    try {
      parser = XContentFactory.xContent(fetchedDoc).createParser(fetchedDoc);
      MapperService mapperService = documentIndexService.mapperService();
      DocumentMapper docMapper = mapperService.documentMapperWithAutoCreate(type);
      doc = docMapper.parse(source(parser).type(type).flyweight(true));

      if (context.highlight() != null) {
        doc.setSource(fetchedDoc);
      }
    } catch (Throwable e) {
      throw new ElasticsearchParseException("failed to parse request", e);
    } finally {
      if (parser != null) {
        parser.close();
      }
    }

    if (doc == null) {
      throw new ElasticsearchParseException("No doc to percolate in the request");
    }

    return doc;
  }
Ejemplo n.º 3
0
        @Override
        public PercolateShardResponse doPercolate(
            PercolateShardRequest request, PercolateContext context) {
          long count = 0;
          Lucene.ExistsCollector collector = new Lucene.ExistsCollector();
          for (Map.Entry<HashedBytesRef, Query> entry : context.percolateQueries().entrySet()) {
            collector.reset();
            try {
              context.docSearcher().search(entry.getValue(), collector);
            } catch (IOException e) {
              logger.warn("[" + entry.getKey() + "] failed to execute query", e);
            }

            if (collector.exists()) {
              count++;
            }
          }
          return new PercolateShardResponse(count, context, request.index(), request.shardId());
        }
Ejemplo n.º 4
0
        @Override
        public PercolateShardResponse doPercolate(
            PercolateShardRequest request, PercolateContext context) {
          Engine.Searcher percolatorSearcher = context.indexShard().acquireSearcher("percolate");
          try {
            Match match = match(logger, context, highlightPhase);
            queryBasedPercolating(percolatorSearcher, context, match);
            List<BytesRef> matches = match.matches();
            List<Map<String, HighlightField>> hls = match.hls();
            long count = match.counter();

            BytesRef[] finalMatches = matches.toArray(new BytesRef[matches.size()]);
            return new PercolateShardResponse(
                finalMatches, hls, count, context, request.index(), request.shardId());
          } catch (Throwable e) {
            logger.debug("failed to execute", e);
            throw new PercolateException(context.indexShard().shardId(), "failed to execute", e);
          } finally {
            percolatorSearcher.release();
          }
        }
Ejemplo n.º 5
0
        @Override
        public PercolateShardResponse doPercolate(
            PercolateShardRequest request, PercolateContext context) {
          long count = 0;
          List<BytesRef> matches = new ArrayList<>();
          List<Map<String, HighlightField>> hls = new ArrayList<>();
          Lucene.ExistsCollector collector = new Lucene.ExistsCollector();

          for (Map.Entry<HashedBytesRef, Query> entry : context.percolateQueries().entrySet()) {
            collector.reset();
            if (context.highlight() != null) {
              context.parsedQuery(
                  new ParsedQuery(entry.getValue(), ImmutableMap.<String, Filter>of()));
              context.hitContext().cache().clear();
            }
            try {
              context.docSearcher().search(entry.getValue(), collector);
            } catch (Throwable e) {
              logger.debug("[" + entry.getKey() + "] failed to execute query", e);
              throw new PercolateException(context.indexShard().shardId(), "failed to execute", e);
            }

            if (collector.exists()) {
              if (!context.limit || count < context.size()) {
                matches.add(entry.getKey().bytes);
                if (context.highlight() != null) {
                  highlightPhase.hitExecute(context, context.hitContext());
                  hls.add(context.hitContext().hit().getHighlightFields());
                }
              }
              count++;
            }
          }

          BytesRef[] finalMatches = matches.toArray(new BytesRef[matches.size()]);
          return new PercolateShardResponse(
              finalMatches, hls, count, context, request.index(), request.shardId());
        }
Ejemplo n.º 6
0
  private void queryBasedPercolating(
      Engine.Searcher percolatorSearcher,
      PercolateContext context,
      QueryCollector percolateCollector)
      throws IOException {
    Filter percolatorTypeFilter =
        context.indexService().mapperService().documentMapper(TYPE_NAME).typeFilter();
    percolatorTypeFilter = context.indexService().cache().filter().cache(percolatorTypeFilter);
    FilteredQuery query = new FilteredQuery(context.percolateQuery(), percolatorTypeFilter);
    percolatorSearcher.searcher().search(query, percolateCollector);

    for (Collector queryCollector : percolateCollector.facetAndAggregatorCollector) {
      if (queryCollector instanceof XCollector) {
        ((XCollector) queryCollector).postCollection();
      }
    }
    if (context.facets() != null) {
      facetPhase.execute(context);
    }
    if (context.aggregations() != null) {
      aggregationPhase.execute(context);
    }
  }
Ejemplo n.º 7
0
 @Override
 public PercolateShardResponse doPercolate(
     PercolateShardRequest request, PercolateContext context) {
   long count = 0;
   Engine.Searcher percolatorSearcher = context.indexShard().acquireSearcher("percolate");
   try {
     Count countCollector = count(logger, context);
     queryBasedPercolating(percolatorSearcher, context, countCollector);
     count = countCollector.counter();
   } catch (Throwable e) {
     logger.warn("failed to execute", e);
   } finally {
     percolatorSearcher.release();
   }
   return new PercolateShardResponse(count, context, request.index(), request.shardId());
 }
Ejemplo n.º 8
0
        @Override
        public PercolateShardResponse doPercolate(
            PercolateShardRequest request, PercolateContext context) {
          Engine.Searcher percolatorSearcher = context.indexShard().acquireSearcher("percolate");
          try {
            MatchAndSort matchAndSort = QueryCollector.matchAndSort(logger, context);
            queryBasedPercolating(percolatorSearcher, context, matchAndSort);
            TopDocs topDocs = matchAndSort.topDocs();
            long count = topDocs.totalHits;
            List<BytesRef> matches = new ArrayList<BytesRef>(topDocs.scoreDocs.length);
            float[] scores = new float[topDocs.scoreDocs.length];
            List<Map<String, HighlightField>> hls = null;
            if (context.highlight() != null) {
              hls = new ArrayList<Map<String, HighlightField>>(topDocs.scoreDocs.length);
            }

            final FieldMapper<?> idMapper =
                context.mapperService().smartNameFieldMapper(IdFieldMapper.NAME);
            final IndexFieldData<?> idFieldData = context.fieldData().getForField(idMapper);
            int i = 0;
            final HashedBytesRef spare = new HashedBytesRef(new BytesRef());
            for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
              int segmentIdx =
                  ReaderUtil.subIndex(scoreDoc.doc, percolatorSearcher.reader().leaves());
              AtomicReaderContext atomicReaderContext =
                  percolatorSearcher.reader().leaves().get(segmentIdx);
              BytesValues values = idFieldData.load(atomicReaderContext).getBytesValues(true);
              final int localDocId = scoreDoc.doc - atomicReaderContext.docBase;
              final int numValues = values.setDocument(localDocId);
              assert numValues == 1;
              spare.bytes = values.nextValue();
              spare.hash = values.currentValueHash();
              matches.add(values.copyShared());
              if (hls != null) {
                Query query = context.percolateQueries().get(spare);
                context.parsedQuery(new ParsedQuery(query, ImmutableMap.<String, Filter>of()));
                context.hitContext().cache().clear();
                highlightPhase.hitExecute(context, context.hitContext());
                hls.add(i, context.hitContext().hit().getHighlightFields());
              }
              scores[i++] = scoreDoc.score;
            }
            if (hls != null) {
              return new PercolateShardResponse(
                  matches.toArray(new BytesRef[matches.size()]),
                  hls,
                  count,
                  scores,
                  context,
                  request.index(),
                  request.shardId());
            } else {
              return new PercolateShardResponse(
                  matches.toArray(new BytesRef[matches.size()]),
                  count,
                  scores,
                  context,
                  request.index(),
                  request.shardId());
            }
          } catch (Throwable e) {
            logger.debug("failed to execute", e);
            throw new PercolateException(context.indexShard().shardId(), "failed to execute", e);
          } finally {
            percolatorSearcher.release();
          }
        }
Ejemplo n.º 9
0
  private ParsedDocument parseRequest(
      IndexService documentIndexService, PercolateShardRequest request, PercolateContext context)
      throws ElasticsearchException {
    BytesReference source = request.source();
    if (source == null || source.length() == 0) {
      return null;
    }

    // TODO: combine all feature parse elements into one map
    Map<String, ? extends SearchParseElement> hlElements = highlightPhase.parseElements();
    Map<String, ? extends SearchParseElement> facetElements = facetPhase.parseElements();
    Map<String, ? extends SearchParseElement> aggregationElements =
        aggregationPhase.parseElements();

    ParsedDocument doc = null;
    XContentParser parser = null;

    // Some queries (function_score query when for decay functions) rely on a SearchContext being
    // set:
    // We switch types because this context needs to be in the context of the percolate queries in
    // the shard and
    // not the in memory percolate doc
    String[] previousTypes = context.types();
    context.types(new String[] {TYPE_NAME});
    SearchContext.setCurrent(context);
    try {
      parser = XContentFactory.xContent(source).createParser(source);
      String currentFieldName = null;
      XContentParser.Token token;
      while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
        if (token == XContentParser.Token.FIELD_NAME) {
          currentFieldName = parser.currentName();
          // we need to check the "doc" here, so the next token will be START_OBJECT which is
          // the actual document starting
          if ("doc".equals(currentFieldName)) {
            if (doc != null) {
              throw new ElasticsearchParseException("Either specify doc or get, not both");
            }

            MapperService mapperService = documentIndexService.mapperService();
            DocumentMapper docMapper =
                mapperService.documentMapperWithAutoCreate(request.documentType());
            doc = docMapper.parse(source(parser).type(request.documentType()).flyweight(true));
            // the document parsing exists the "doc" object, so we need to set the new current
            // field.
            currentFieldName = parser.currentName();
          }
        } else if (token == XContentParser.Token.START_OBJECT) {
          SearchParseElement element = hlElements.get(currentFieldName);
          if (element == null) {
            element = facetElements.get(currentFieldName);
            if (element == null) {
              element = aggregationElements.get(currentFieldName);
            }
          }

          if ("query".equals(currentFieldName)) {
            if (context.percolateQuery() != null) {
              throw new ElasticsearchParseException("Either specify query or filter, not both");
            }
            context.percolateQuery(documentIndexService.queryParserService().parse(parser).query());
          } else if ("filter".equals(currentFieldName)) {
            if (context.percolateQuery() != null) {
              throw new ElasticsearchParseException("Either specify query or filter, not both");
            }
            Filter filter =
                documentIndexService.queryParserService().parseInnerFilter(parser).filter();
            context.percolateQuery(new XConstantScoreQuery(filter));
          } else if ("sort".equals(currentFieldName)) {
            parseSort(parser, context);
          } else if (element != null) {
            element.parse(parser, context);
          }
        } else if (token == XContentParser.Token.START_ARRAY) {
          if ("sort".equals(currentFieldName)) {
            parseSort(parser, context);
          }
        } else if (token == null) {
          break;
        } else if (token.isValue()) {
          if ("size".equals(currentFieldName)) {
            context.size(parser.intValue());
            if (context.size() < 0) {
              throw new ElasticsearchParseException(
                  "size is set to ["
                      + context.size()
                      + "] and is expected to be higher or equal to 0");
            }
          } else if ("sort".equals(currentFieldName)) {
            parseSort(parser, context);
          } else if ("track_scores".equals(currentFieldName)
              || "trackScores".equals(currentFieldName)) {
            context.trackScores(parser.booleanValue());
          }
        }
      }

      // We need to get the actual source from the request body for highlighting, so parse the
      // request body again
      // and only get the doc source.
      if (context.highlight() != null) {
        parser.close();
        currentFieldName = null;
        parser = XContentFactory.xContent(source).createParser(source);
        token = parser.nextToken();
        assert token == XContentParser.Token.START_OBJECT;
        while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
          if (token == XContentParser.Token.FIELD_NAME) {
            currentFieldName = parser.currentName();
          } else if (token == XContentParser.Token.START_OBJECT) {
            if ("doc".equals(currentFieldName)) {
              BytesStreamOutput bStream = new BytesStreamOutput();
              XContentBuilder builder = XContentFactory.contentBuilder(XContentType.SMILE, bStream);
              builder.copyCurrentStructure(parser);
              builder.close();
              doc.setSource(bStream.bytes());
              break;
            } else {
              parser.skipChildren();
            }
          } else if (token == null) {
            break;
          }
        }
      }

    } catch (Throwable e) {
      throw new ElasticsearchParseException("failed to parse request", e);
    } finally {
      context.types(previousTypes);
      SearchContext.removeCurrent();
      if (parser != null) {
        parser.close();
      }
    }

    return doc;
  }
Ejemplo n.º 10
0
  public PercolateShardResponse percolate(PercolateShardRequest request) {
    IndexService percolateIndexService = indicesService.indexServiceSafe(request.index());
    IndexShard indexShard = percolateIndexService.shardSafe(request.shardId());

    ShardPercolateService shardPercolateService = indexShard.shardPercolateService();
    shardPercolateService.prePercolate();
    long startTime = System.nanoTime();

    SearchShardTarget searchShardTarget =
        new SearchShardTarget(clusterService.localNode().id(), request.index(), request.shardId());
    final PercolateContext context =
        new PercolateContext(
            request,
            searchShardTarget,
            indexShard,
            percolateIndexService,
            cacheRecycler,
            pageCacheRecycler,
            bigArrays,
            scriptService);
    try {
      ParsedDocument parsedDocument = parseRequest(percolateIndexService, request, context);
      if (context.percolateQueries().isEmpty()) {
        return new PercolateShardResponse(context, request.index(), request.shardId());
      }

      if (request.docSource() != null && request.docSource().length() != 0) {
        parsedDocument =
            parseFetchedDoc(
                context, request.docSource(), percolateIndexService, request.documentType());
      } else if (parsedDocument == null) {
        throw new ElasticsearchIllegalArgumentException("Nothing to percolate");
      }

      if (context.percolateQuery() == null
          && (context.trackScores()
              || context.doSort
              || context.facets() != null
              || context.aggregations() != null)) {
        context.percolateQuery(new MatchAllDocsQuery());
      }

      if (context.doSort && !context.limit) {
        throw new ElasticsearchIllegalArgumentException("Can't sort if size isn't specified");
      }

      if (context.highlight() != null && !context.limit) {
        throw new ElasticsearchIllegalArgumentException("Can't highlight if size isn't specified");
      }

      if (context.size() < 0) {
        context.size(0);
      }

      // parse the source either into one MemoryIndex, if it is a single document or index multiple
      // docs if nested
      PercolatorIndex percolatorIndex;
      if (indexShard.mapperService().documentMapper(request.documentType()).hasNestedObjects()) {
        percolatorIndex = multi;
      } else {
        percolatorIndex = single;
      }

      PercolatorType action;
      if (request.onlyCount()) {
        action = context.percolateQuery() != null ? queryCountPercolator : countPercolator;
      } else {
        if (context.doSort) {
          action = topMatchingPercolator;
        } else if (context.percolateQuery() != null) {
          action = context.trackScores() ? scoringPercolator : queryPercolator;
        } else {
          action = matchPercolator;
        }
      }
      context.percolatorTypeId = action.id();

      percolatorIndex.prepare(context, parsedDocument);

      indexShard.readAllowed();
      return action.doPercolate(request, context);
    } finally {
      context.release();
      shardPercolateService.postPercolate(System.nanoTime() - startTime);
    }
  }