Beispiel #1
0
  public void handleRequest(SolrQueryRequest req, SolrQueryResponse rsp) {
    numRequests++;

    Query query = null;
    Filter filter = null;

    List<String> commands = StrUtils.splitSmart(req.getQueryString(), ';');

    String qs = commands.size() >= 1 ? commands.get(0) : "";
    query = QueryParsing.parseQuery(qs, req.getSchema());

    // If the first non-query, non-filter command is a simple sort on an indexed field, then
    // we can use the Lucene sort ability.
    Sort sort = null;
    if (commands.size() >= 2) {
      sort = QueryParsing.parseSort(commands.get(1), req);
    }

    try {

      int numHits;
      ScoreDoc[] scoreDocs;
      if (sort != null) {
        TopFieldDocs hits =
            req.getSearcher().search(query, filter, req.getStart() + req.getLimit(), sort);
        scoreDocs = hits.scoreDocs;
        numHits = hits.totalHits;
      } else {
        TopDocs hits = req.getSearcher().search(query, filter, req.getStart() + req.getLimit());
        scoreDocs = hits.scoreDocs;
        numHits = hits.totalHits;
      }

      int startRow = Math.min(numHits, req.getStart());
      int endRow = Math.min(numHits, req.getStart() + req.getLimit());
      int numRows = endRow - startRow;

      int[] ids = new int[numRows];
      Document[] data = new Document[numRows];
      for (int i = startRow; i < endRow; i++) {
        ids[i] = scoreDocs[i].doc;
        data[i] = req.getSearcher().doc(ids[i]);
      }

      rsp.add(null, new DocSlice(0, numRows, ids, null, numHits, 0.0f));

      /**
       * ********************* rsp.setResults(new DocSlice(0,numRows,ids,null,numHits));
       *
       * <p>// Setting the actual document objects is optional rsp.setResults(data);
       * **********************
       */
    } catch (IOException e) {
      rsp.setException(e);
      numErrors++;
      return;
    }
  }
 /** Builds a list of String which are stringified versions of a list of Queries */
 public static List<String> toString(List<Query> queries, IndexSchema schema) {
   List<String> out = new ArrayList<String>(queries.size());
   for (Query q : queries) {
     out.add(QueryParsing.toString(q, schema));
   }
   return out;
 }
 @Override
 public SortSpec getSort(boolean useGlobal) throws ParseException {
   SortSpec sort = super.getSort(useGlobal);
   if (sortStr != null && sortStr.length() > 0 && sort.getSort() == null) {
     Sort oldSort = QueryParsing.parseSort(sortStr, getReq());
     if (oldSort != null) {
       sort.sort = oldSort;
     }
   }
   return sort;
 }
  /**
   * Called by FacetComponent's impl of {@link
   * org.apache.solr.handler.component.SearchComponent#modifyRequest(ResponseBuilder,
   * SearchComponent, ShardRequest)}.
   */
  public static void distribModifyRequest(
      ShardRequest sreq, LinkedHashMap<String, HeatmapFacet> heatmapFacets) {
    // Set the format to PNG because it's compressed and it's the only format we have code to read
    // at the moment.
    //  Changing a param is sadly tricky because field-specific params can show up as local-params
    // (highest precedence)
    //  or as f.key.facet.heatmap.whatever. Ugh. So we re-write the facet.heatmap list with the
    // local-params
    //  moved out to the "f.key." prefix, but we need to keep the key local-param because that's the
    // only way to
    //  set an output key. This approach means we only need to know about the parameter we're
    // changing, not of
    //  all possible heatmap params.

    // Remove existing heatmap field param vals; we will rewrite
    sreq.params.remove(FacetParams.FACET_HEATMAP);
    for (Map.Entry<String, HeatmapFacet> entry : heatmapFacets.entrySet()) {
      final String key = entry.getKey();
      final HeatmapFacet facet = entry.getValue();
      // add heatmap field param
      if (!key.equals(facet.facetOn)) {
        sreq.params.add(
            FacetParams.FACET_HEATMAP,
            "{!"
                + CommonParams.OUTPUT_KEY
                + "="
                + QueryParsing.encodeLocalParamVal(key)
                + "}"
                + facet.facetOn);
      } else {
        sreq.params.add(FacetParams.FACET_HEATMAP, facet.facetOn);
      }
      // Turn local-params into top-level f.key.param=value style params
      if (facet.localParams != null) {
        final Iterator<String> localNameIter = facet.localParams.getParameterNamesIterator();
        while (localNameIter.hasNext()) {
          String pname = localNameIter.next();
          if (!pname.startsWith(FacetParams.FACET_HEATMAP)) {
            continue; // could be 'key', or 'v' even
          }
          String pval = facet.localParams.get(pname);
          sreq.params.set("f." + key + "." + pname, pval);
        }
      }
      // Set format to PNG; it's the only one we parse
      sreq.params.set("f." + key + "." + FacetParams.FACET_HEATMAP_FORMAT, FORMAT_PNG);
    }
  }
  /**
   * "foo" returns null "{!prefix f=myfield}yes" returns type="prefix",f="myfield",v="yes" "{!prefix
   * f=myfield v=$p}" returns type="prefix",f="myfield",v=params.get("p")
   */
  public static SolrParams getLocalParams(String txt, SolrParams params) throws SyntaxError {
    if (txt == null || !txt.startsWith(LOCALPARAM_START)) {
      return null;
    }
    Map<String, String> localParams = new HashMap<String, String>();
    int start = QueryParsing.parseLocalParams(txt, 0, localParams, params);

    String val = localParams.get(V);
    if (val == null) {
      val = txt.substring(start);
      localParams.put(V, val);
    } else {
      // localParams.put(VAL_EXPLICIT, "true");
    }
    return new MapSolrParams(localParams);
  }
  public Query parse() throws ParseException {
    String qstr = getString();
    if (qstr == null) return null;

    String defaultField = getParam(CommonParams.DF);
    if (defaultField == null) {
      defaultField = getReq().getSchema().getDefaultSearchFieldName();
    }
    lparser = new SolrQueryParser(this, defaultField);

    lparser.setDefaultOperator(
        QueryParsing.getQueryParserDefaultOperator(
            getReq().getSchema(), getParam(QueryParsing.OP)));

    return lparser.parse(qstr);
  }
  protected ParsedParams parseParams(String type, String param) throws SyntaxError, IOException {
    SolrParams localParams = QueryParsing.getLocalParams(param, req.getParams());
    DocSet docs = docsOrig;
    String facetValue = param;
    String key = param;
    List<String> tags = Collections.emptyList();
    int threads = -1;

    if (localParams == null) {
      SolrParams params = global;
      SolrParams required = new RequiredSolrParams(params);
      return new ParsedParams(localParams, params, required, facetValue, docs, key, tags, threads);
    }

    SolrParams params = SolrParams.wrapDefaults(localParams, global);
    SolrParams required = new RequiredSolrParams(params);

    // remove local params unless it's a query
    if (type != FacetParams.FACET_QUERY) { // TODO Cut over to an Enum here
      facetValue = localParams.get(CommonParams.VALUE);
    }

    // reset set the default key now that localParams have been removed
    key = facetValue;

    // allow explicit set of the key
    key = localParams.get(CommonParams.OUTPUT_KEY, key);

    String tagStr = localParams.get(CommonParams.TAG);
    tags = tagStr == null ? Collections.<String>emptyList() : StrUtils.splitSmart(tagStr, ',');

    String threadStr = localParams.get(CommonParams.THREADS);
    if (threadStr != null) {
      threads = Integer.parseInt(threadStr);
    }

    // figure out if we need a new base DocSet
    String excludeStr = localParams.get(CommonParams.EXCLUDE);
    if (excludeStr == null)
      return new ParsedParams(localParams, params, required, facetValue, docs, key, tags, threads);

    List<String> excludeTagList = StrUtils.splitSmart(excludeStr, ',');
    docs = computeDocSet(docs, excludeTagList);
    return new ParsedParams(localParams, params, required, facetValue, docs, key, tags, threads);
  }
    public SimpleQParser(
        String qstr, SolrParams localParams, SolrParams params, SolrQueryRequest req) {

      super(qstr, localParams, params, req);
      // Some of the parameters may come in through localParams, so combine them with params.
      SolrParams defaultParams = SolrParams.wrapDefaults(localParams, params);

      // This will be used to specify what fields and boosts will be used by SimpleQueryParser.
      Map<String, Float> queryFields =
          SolrPluginUtils.parseFieldBoosts(defaultParams.get(SimpleParams.QF));

      if (queryFields.isEmpty()) {
        // It qf is not specified setup up the queryFields map to use the defaultField.
        String defaultField =
            QueryParsing.getDefaultField(req.getSchema(), defaultParams.get(CommonParams.DF));

        if (defaultField == null) {
          // A query cannot be run without having a field or set of fields to run against.
          throw new IllegalStateException(
              "Neither "
                  + SimpleParams.QF
                  + ", "
                  + CommonParams.DF
                  + ", nor the default search field are present.");
        }

        queryFields.put(defaultField, 1.0F);
      } else {
        for (Map.Entry<String, Float> queryField : queryFields.entrySet()) {
          if (queryField.getValue() == null) {
            // Some fields may be specified without a boost, so default the boost to 1.0 since a
            // null value
            // will not be accepted by SimpleQueryParser.
            queryField.setValue(1.0F);
          }
        }
      }

      // Setup the operations that are enabled for the query.
      int enabledOps = 0;
      String opParam = defaultParams.get(SimpleParams.QO);

      if (opParam == null) {
        // All operations will be enabled.
        enabledOps = -1;
      } else {
        // Parse the specified enabled operations to be used by the query.
        String[] operations = opParam.split(",");

        for (String operation : operations) {
          Integer enabledOp = OPERATORS.get(operation.trim().toUpperCase(Locale.ROOT));

          if (enabledOp != null) {
            enabledOps |= enabledOp;
          }
        }
      }

      // Create a SimpleQueryParser using the analyzer from the schema.
      final IndexSchema schema = req.getSchema();
      parser =
          new SolrSimpleQueryParser(
              req.getSchema().getQueryAnalyzer(), queryFields, enabledOps, this, schema);

      // Set the default operator to be either 'AND' or 'OR' for the query.
      QueryParser.Operator defaultOp =
          QueryParsing.getQueryParserDefaultOperator(
              req.getSchema(), defaultParams.get(QueryParsing.OP));

      if (defaultOp == QueryParser.Operator.AND) {
        parser.setDefaultOperator(BooleanClause.Occur.MUST);
      }
    }
  /** Actually run the query */
  @Override
  public void process(ResponseBuilder rb) throws IOException {
    SolrQueryRequest req = rb.req;
    SolrQueryResponse rsp = rb.rsp;
    SolrParams params = req.getParams();
    if (!params.getBool(COMPONENT_NAME, true)) {
      return;
    }
    SolrIndexSearcher searcher = req.getSearcher();

    if (rb.getQueryCommand().getOffset() < 0) {
      throw new SolrException(
          SolrException.ErrorCode.BAD_REQUEST, "'start' parameter cannot be negative");
    }

    // -1 as flag if not set.
    long timeAllowed = (long) params.getInt(CommonParams.TIME_ALLOWED, -1);

    // Optional: This could also be implemented by the top-level searcher sending
    // a filter that lists the ids... that would be transparent to
    // the request handler, but would be more expensive (and would preserve score
    // too if desired).
    String ids = params.get(ShardParams.IDS);
    if (ids != null) {
      SchemaField idField = req.getSchema().getUniqueKeyField();
      List<String> idArr = StrUtils.splitSmart(ids, ",", true);
      int[] luceneIds = new int[idArr.size()];
      int docs = 0;
      for (int i = 0; i < idArr.size(); i++) {
        int id =
            req.getSearcher()
                .getFirstMatch(
                    new Term(idField.getName(), idField.getType().toInternal(idArr.get(i))));
        if (id >= 0) luceneIds[docs++] = id;
      }

      DocListAndSet res = new DocListAndSet();
      res.docList = new DocSlice(0, docs, luceneIds, null, docs, 0);
      if (rb.isNeedDocSet()) {
        List<Query> queries = new ArrayList<Query>();
        queries.add(rb.getQuery());
        List<Query> filters = rb.getFilters();
        if (filters != null) queries.addAll(filters);
        res.docSet = searcher.getDocSet(queries);
      }
      rb.setResults(res);
      rsp.add("response", rb.getResults().docList);
      return;
    }

    SolrIndexSearcher.QueryCommand cmd = rb.getQueryCommand();
    cmd.setTimeAllowed(timeAllowed);
    SolrIndexSearcher.QueryResult result = new SolrIndexSearcher.QueryResult();

    //
    // grouping / field collapsing
    //
    boolean doGroup = params.getBool(GroupParams.GROUP, false);
    if (doGroup) {
      try {
        cmd.groupCommands = new ArrayList<Grouping.Command>();

        String[] fields = params.getParams(GroupParams.GROUP_FIELD);
        String[] funcs = params.getParams(GroupParams.GROUP_FUNC);
        String[] queries = params.getParams(GroupParams.GROUP_QUERY);
        String groupSortStr = params.get(GroupParams.GROUP_SORT);
        Sort groupSort =
            groupSortStr != null ? QueryParsing.parseSort(groupSortStr, req.getSchema()) : null;

        int limitDefault = cmd.getLen(); // this is normally from "rows"
        int docsPerGroupDefault = params.getInt(GroupParams.GROUP_LIMIT, 1);

        // temporary: implement all group-by-field as group-by-func
        if (funcs == null) {
          funcs = fields;
        } else if (fields != null) {
          // catenate functions and fields
          String[] both = new String[fields.length + funcs.length];
          System.arraycopy(fields, 0, both, 0, fields.length);
          System.arraycopy(funcs, 0, both, fields.length, funcs.length);
          funcs = both;
        }

        if (funcs != null) {
          for (String groupByStr : funcs) {
            QParser parser = QParser.getParser(groupByStr, "func", rb.req);
            Query q = parser.getQuery();
            Grouping.CommandFunc gc = new Grouping.CommandFunc();
            gc.groupSort = groupSort;

            if (q instanceof FunctionQuery) {
              gc.groupBy = ((FunctionQuery) q).getValueSource();
            } else {
              gc.groupBy = new QueryValueSource(q, 0.0f);
            }
            gc.key = groupByStr;
            gc.groupLimit = limitDefault;
            gc.docsPerGroup = docsPerGroupDefault;

            cmd.groupCommands.add(gc);
          }
        }

        if (cmd.groupCommands.size() == 0) cmd.groupCommands = null;

        if (cmd.groupCommands != null) {
          if (rb.doHighlights || rb.isDebug()) {
            // we need a single list of the returned docs
            cmd.setFlags(SolrIndexSearcher.GET_DOCLIST);
          }

          searcher.search(result, cmd);
          rb.setResult(result);
          rsp.add("grouped", result.groupedResults);
          // TODO: get "hits" a different way to log
          return;
        }
      } catch (ParseException e) {
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
      }
    }

    // normal search result
    searcher.search(result, cmd);
    rb.setResult(result);

    rsp.add("response", rb.getResults().docList);
    rsp.getToLog().add("hits", rb.getResults().docList.matches());

    doFieldSortValues(rb, searcher);
    doPrefetch(rb);
  }