Esempio n. 1
0
      @Override
      public Query apply(Function input, Context context) throws IOException {
        List<Symbol> arguments = input.arguments();
        assert arguments.size() == 4 : "invalid number of arguments";
        assert Symbol.isLiteral(arguments.get(0), DataTypes.OBJECT);
        assert Symbol.isLiteral(arguments.get(1), DataTypes.STRING);
        assert Symbol.isLiteral(arguments.get(2), DataTypes.STRING);
        assert Symbol.isLiteral(arguments.get(3), DataTypes.OBJECT);

        @SuppressWarnings("unchecked")
        Map<String, Object> fields = (Map) ((Literal) arguments.get(0)).value();
        BytesRef queryString = (BytesRef) ((Literal) arguments.get(1)).value();
        BytesRef matchType = (BytesRef) ((Literal) arguments.get(2)).value();
        Map options = (Map) ((Literal) arguments.get(3)).value();

        checkArgument(queryString != null, "cannot use NULL as query term in match predicate");

        MatchQueryBuilder queryBuilder;
        if (fields.size() == 1) {
          queryBuilder =
              new MatchQueryBuilder(context.mapperService, context.indexCache, matchType, options);
        } else {
          queryBuilder =
              new MultiMatchQueryBuilder(
                  context.mapperService, context.indexCache, matchType, options);
        }
        return queryBuilder.query(fields, queryString);
      }
Esempio n. 2
0
    private static Filter genericFunctionFilter(Function function, Context context) {
      if (function.valueType() != DataTypes.BOOLEAN) {
        raiseUnsupported(function);
      }
      // avoid field-cache
      // reason1: analyzed columns or columns with index off wouldn't work
      //   substr(n, 1, 1) in the case of n => analyzed would throw an error because n would be an
      // array
      // reason2: would have to load each value into the field cache
      function = (Function) DocReferenceConverter.convertIf(function);

      final CollectInputSymbolVisitor.Context ctx =
          context.inputSymbolVisitor.extractImplementations(function);
      assert ctx.topLevelInputs().size() == 1;
      @SuppressWarnings("unchecked")
      final Input<Boolean> condition = (Input<Boolean>) ctx.topLevelInputs().get(0);
      @SuppressWarnings("unchecked")
      final List<LuceneCollectorExpression> expressions = ctx.docLevelExpressions();
      final CollectorContext collectorContext =
          new CollectorContext(
              context.mapperService,
              context.fieldDataService,
              new CollectorFieldsVisitor(expressions.size()));

      for (LuceneCollectorExpression expression : expressions) {
        expression.startCollect(collectorContext);
      }
      return new Filter() {
        @Override
        public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs)
            throws IOException {
          for (LuceneCollectorExpression expression : expressions) {
            expression.setNextReader(context.reader().getContext());
          }
          return BitsFilteredDocIdSet.wrap(
              new FunctionDocSet(
                  context.reader(),
                  collectorContext.visitor(),
                  condition,
                  expressions,
                  context.reader().maxDoc(),
                  acceptDocs),
              acceptDocs);
        }
      };
    }
Esempio n. 3
0
 public Sort toSort(List<? extends OrderSpecifier<?>> orderBys) {
   List<SortField> sorts = new ArrayList<SortField>(orderBys.size());
   for (OrderSpecifier<?> order : orderBys) {
     if (!(order.getTarget() instanceof Path<?>)) {
       throw new IllegalArgumentException("argument was not of type Path.");
     }
     Class<?> type = order.getTarget().getType();
     boolean reverse = !order.isAscending();
     Path<?> path = getPath(order.getTarget());
     if (Number.class.isAssignableFrom(type)) {
       sorts.add(new SortField(toField(path), sortFields.get(type), reverse));
     } else {
       sorts.add(new SortField(toField(path), sortLocale, reverse));
     }
   }
   Sort sort = new Sort();
   sort.setSort(sorts.toArray(new SortField[sorts.size()]));
   return sort;
 }
Esempio n. 4
0
  @Override
  public void prepare(ResponseBuilder rb) throws IOException {

    SolrQueryRequest req = rb.req;
    SolrParams params = req.getParams();
    if (!params.getBool(COMPONENT_NAME, true)) {
      return;
    }
    SolrQueryResponse rsp = rb.rsp;

    // Set field flags
    String fl = params.get(CommonParams.FL);
    int fieldFlags = 0;
    if (fl != null) {
      fieldFlags |= SolrPluginUtils.setReturnFields(fl, rsp);
    }
    rb.setFieldFlags(fieldFlags);

    String defType = params.get(QueryParsing.DEFTYPE, QParserPlugin.DEFAULT_QTYPE);

    // get it from the response builder to give a different component a chance
    // to set it.
    String queryString = rb.getQueryString();
    if (queryString == null) {
      // this is the normal way it's set.
      queryString = params.get(CommonParams.Q);
      rb.setQueryString(queryString);
    }

    try {
      QParser parser = QParser.getParser(rb.getQueryString(), defType, req);
      Query q = parser.getQuery();
      if (q == null) {
        // normalize a null query to a query that matches nothing
        q = new BooleanQuery();
      }
      rb.setQuery(q);
      rb.setSortSpec(parser.getSort(true));
      rb.setQparser(parser);

      String[] fqs = req.getParams().getParams(CommonParams.FQ);
      if (fqs != null && fqs.length != 0) {
        List<Query> filters = rb.getFilters();
        if (filters == null) {
          filters = new ArrayList<Query>();
          rb.setFilters(filters);
        }
        for (String fq : fqs) {
          if (fq != null && fq.trim().length() != 0) {
            QParser fqp = QParser.getParser(fq, null, req);
            filters.add(fqp.getQuery());
          }
        }
      }
    } catch (ParseException e) {
      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
    }

    // TODO: temporary... this should go in a different component.
    String shards = params.get(ShardParams.SHARDS);
    if (shards != null) {
      List<String> lst = StrUtils.splitSmart(shards, ",", true);
      rb.shards = lst.toArray(new String[lst.size()]);
    }
    String shards_rows = params.get(ShardParams.SHARDS_ROWS);
    if (shards_rows != null) {
      rb.shards_rows = Integer.parseInt(shards_rows);
    }
    String shards_start = params.get(ShardParams.SHARDS_START);
    if (shards_start != null) {
      rb.shards_start = Integer.parseInt(shards_start);
    }
  }
Esempio n. 5
0
  /** Actually run the query */
  @Override
  public void process(ResponseBuilder rb) throws IOException {
    SolrQueryRequest req = rb.req;
    SolrQueryResponse rsp = rb.rsp;
    SolrParams params = req.getParams();
    if (!params.getBool(COMPONENT_NAME, true)) {
      return;
    }
    SolrIndexSearcher searcher = req.getSearcher();

    if (rb.getQueryCommand().getOffset() < 0) {
      throw new SolrException(
          SolrException.ErrorCode.BAD_REQUEST, "'start' parameter cannot be negative");
    }

    // -1 as flag if not set.
    long timeAllowed = (long) params.getInt(CommonParams.TIME_ALLOWED, -1);

    // Optional: This could also be implemented by the top-level searcher sending
    // a filter that lists the ids... that would be transparent to
    // the request handler, but would be more expensive (and would preserve score
    // too if desired).
    String ids = params.get(ShardParams.IDS);
    if (ids != null) {
      SchemaField idField = req.getSchema().getUniqueKeyField();
      List<String> idArr = StrUtils.splitSmart(ids, ",", true);
      int[] luceneIds = new int[idArr.size()];
      int docs = 0;
      for (int i = 0; i < idArr.size(); i++) {
        int id =
            req.getSearcher()
                .getFirstMatch(
                    new Term(idField.getName(), idField.getType().toInternal(idArr.get(i))));
        if (id >= 0) luceneIds[docs++] = id;
      }

      DocListAndSet res = new DocListAndSet();
      res.docList = new DocSlice(0, docs, luceneIds, null, docs, 0);
      if (rb.isNeedDocSet()) {
        List<Query> queries = new ArrayList<Query>();
        queries.add(rb.getQuery());
        List<Query> filters = rb.getFilters();
        if (filters != null) queries.addAll(filters);
        res.docSet = searcher.getDocSet(queries);
      }
      rb.setResults(res);
      rsp.add("response", rb.getResults().docList);
      return;
    }

    SolrIndexSearcher.QueryCommand cmd = rb.getQueryCommand();
    cmd.setTimeAllowed(timeAllowed);
    SolrIndexSearcher.QueryResult result = new SolrIndexSearcher.QueryResult();

    //
    // grouping / field collapsing
    //
    boolean doGroup = params.getBool(GroupParams.GROUP, false);
    if (doGroup) {
      try {
        cmd.groupCommands = new ArrayList<Grouping.Command>();

        String[] fields = params.getParams(GroupParams.GROUP_FIELD);
        String[] funcs = params.getParams(GroupParams.GROUP_FUNC);
        String[] queries = params.getParams(GroupParams.GROUP_QUERY);
        String groupSortStr = params.get(GroupParams.GROUP_SORT);
        Sort groupSort =
            groupSortStr != null ? QueryParsing.parseSort(groupSortStr, req.getSchema()) : null;

        int limitDefault = cmd.getLen(); // this is normally from "rows"
        int docsPerGroupDefault = params.getInt(GroupParams.GROUP_LIMIT, 1);

        // temporary: implement all group-by-field as group-by-func
        if (funcs == null) {
          funcs = fields;
        } else if (fields != null) {
          // catenate functions and fields
          String[] both = new String[fields.length + funcs.length];
          System.arraycopy(fields, 0, both, 0, fields.length);
          System.arraycopy(funcs, 0, both, fields.length, funcs.length);
          funcs = both;
        }

        if (funcs != null) {
          for (String groupByStr : funcs) {
            QParser parser = QParser.getParser(groupByStr, "func", rb.req);
            Query q = parser.getQuery();
            Grouping.CommandFunc gc = new Grouping.CommandFunc();
            gc.groupSort = groupSort;

            if (q instanceof FunctionQuery) {
              gc.groupBy = ((FunctionQuery) q).getValueSource();
            } else {
              gc.groupBy = new QueryValueSource(q, 0.0f);
            }
            gc.key = groupByStr;
            gc.groupLimit = limitDefault;
            gc.docsPerGroup = docsPerGroupDefault;

            cmd.groupCommands.add(gc);
          }
        }

        if (cmd.groupCommands.size() == 0) cmd.groupCommands = null;

        if (cmd.groupCommands != null) {
          if (rb.doHighlights || rb.isDebug()) {
            // we need a single list of the returned docs
            cmd.setFlags(SolrIndexSearcher.GET_DOCLIST);
          }

          searcher.search(result, cmd);
          rb.setResult(result);
          rsp.add("grouped", result.groupedResults);
          // TODO: get "hits" a different way to log
          return;
        }
      } catch (ParseException e) {
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
      }
    }

    // normal search result
    searcher.search(result, cmd);
    rb.setResult(result);

    rsp.add("response", rb.getResults().docList);
    rsp.getToLog().add("hits", rb.getResults().docList.matches());

    doFieldSortValues(rb, searcher);
    doPrefetch(rb);
  }
  public InternalSearchResponse merge(
      ScoreDoc[] sortedDocs,
      AtomicArray<? extends QuerySearchResultProvider> queryResultsArr,
      AtomicArray<? extends FetchSearchResultProvider> fetchResultsArr) {

    List<? extends AtomicArray.Entry<? extends QuerySearchResultProvider>> queryResults =
        queryResultsArr.asList();
    List<? extends AtomicArray.Entry<? extends FetchSearchResultProvider>> fetchResults =
        fetchResultsArr.asList();

    if (queryResults.isEmpty()) {
      return InternalSearchResponse.empty();
    }

    QuerySearchResult firstResult = queryResults.get(0).value.queryResult();

    boolean sorted = false;
    int sortScoreIndex = -1;
    if (firstResult.topDocs() instanceof TopFieldDocs) {
      sorted = true;
      TopFieldDocs fieldDocs = (TopFieldDocs) firstResult.queryResult().topDocs();
      for (int i = 0; i < fieldDocs.fields.length; i++) {
        if (fieldDocs.fields[i].getType() == SortField.Type.SCORE) {
          sortScoreIndex = i;
        }
      }
    }

    // merge facets
    InternalFacets facets = null;
    if (!queryResults.isEmpty()) {
      // we rely on the fact that the order of facets is the same on all query results
      if (firstResult.facets() != null
          && firstResult.facets().facets() != null
          && !firstResult.facets().facets().isEmpty()) {
        List<Facet> aggregatedFacets = Lists.newArrayList();
        List<Facet> namedFacets = Lists.newArrayList();
        for (Facet facet : firstResult.facets()) {
          // aggregate each facet name into a single list, and aggregate it
          namedFacets.clear();
          for (AtomicArray.Entry<? extends QuerySearchResultProvider> entry : queryResults) {
            for (Facet facet1 : entry.value.queryResult().facets()) {
              if (facet.getName().equals(facet1.getName())) {
                namedFacets.add(facet1);
              }
            }
          }
          if (!namedFacets.isEmpty()) {
            Facet aggregatedFacet =
                ((InternalFacet) namedFacets.get(0))
                    .reduce(new InternalFacet.ReduceContext(cacheRecycler, namedFacets));
            aggregatedFacets.add(aggregatedFacet);
          }
        }
        facets = new InternalFacets(aggregatedFacets);
      }
    }

    // count the total (we use the query result provider here, since we might not get any hits (we
    // scrolled past them))
    long totalHits = 0;
    float maxScore = Float.NEGATIVE_INFINITY;
    boolean timedOut = false;
    Boolean terminatedEarly = null;
    for (AtomicArray.Entry<? extends QuerySearchResultProvider> entry : queryResults) {
      QuerySearchResult result = entry.value.queryResult();
      if (result.searchTimedOut()) {
        timedOut = true;
      }
      if (result.terminatedEarly() != null) {
        if (terminatedEarly == null) {
          terminatedEarly = result.terminatedEarly();
        } else if (result.terminatedEarly()) {
          terminatedEarly = true;
        }
      }
      totalHits += result.topDocs().totalHits;
      if (!Float.isNaN(result.topDocs().getMaxScore())) {
        maxScore = Math.max(maxScore, result.topDocs().getMaxScore());
      }
    }
    if (Float.isInfinite(maxScore)) {
      maxScore = Float.NaN;
    }

    // clean the fetch counter
    for (AtomicArray.Entry<? extends FetchSearchResultProvider> entry : fetchResults) {
      entry.value.fetchResult().initCounter();
    }

    // merge hits
    List<InternalSearchHit> hits = new ArrayList<>();
    if (!fetchResults.isEmpty()) {
      for (ScoreDoc shardDoc : sortedDocs) {
        FetchSearchResultProvider fetchResultProvider = fetchResultsArr.get(shardDoc.shardIndex);
        if (fetchResultProvider == null) {
          continue;
        }
        FetchSearchResult fetchResult = fetchResultProvider.fetchResult();
        int index = fetchResult.counterGetAndIncrement();
        if (index < fetchResult.hits().internalHits().length) {
          InternalSearchHit searchHit = fetchResult.hits().internalHits()[index];
          searchHit.score(shardDoc.score);
          searchHit.shard(fetchResult.shardTarget());

          if (sorted) {
            FieldDoc fieldDoc = (FieldDoc) shardDoc;
            searchHit.sortValues(fieldDoc.fields);
            if (sortScoreIndex != -1) {
              searchHit.score(((Number) fieldDoc.fields[sortScoreIndex]).floatValue());
            }
          }

          hits.add(searchHit);
        }
      }
    }

    // merge suggest results
    Suggest suggest = null;
    if (!queryResults.isEmpty()) {
      final Map<String, List<Suggest.Suggestion>> groupedSuggestions = new HashMap<>();
      boolean hasSuggestions = false;
      for (AtomicArray.Entry<? extends QuerySearchResultProvider> entry : queryResults) {
        Suggest shardResult = entry.value.queryResult().queryResult().suggest();

        if (shardResult == null) {
          continue;
        }
        hasSuggestions = true;
        Suggest.group(groupedSuggestions, shardResult);
      }

      suggest =
          hasSuggestions
              ? new Suggest(Suggest.Fields.SUGGEST, Suggest.reduce(groupedSuggestions))
              : null;
    }

    // merge addAggregation
    InternalAggregations aggregations = null;
    if (!queryResults.isEmpty()) {
      if (firstResult.aggregations() != null && firstResult.aggregations().asList() != null) {
        List<InternalAggregations> aggregationsList = new ArrayList<>(queryResults.size());
        for (AtomicArray.Entry<? extends QuerySearchResultProvider> entry : queryResults) {
          aggregationsList.add((InternalAggregations) entry.value.queryResult().aggregations());
        }
        aggregations =
            InternalAggregations.reduce(
                aggregationsList, new ReduceContext(null, bigArrays, scriptService));
      }
    }

    InternalSearchHits searchHits =
        new InternalSearchHits(
            hits.toArray(new InternalSearchHit[hits.size()]), totalHits, maxScore);

    return new InternalSearchResponse(
        searchHits, facets, aggregations, suggest, timedOut, terminatedEarly);
  }