Esempio n. 1
0
 private void processAliases(Aliases aliases, List<String> collectionsList) {
   String collection = solrReq.getParams().get(COLLECTION_PROP);
   if (collection != null) {
     collectionsList = StrUtils.splitSmart(collection, ",", true);
   }
   if (collectionsList != null) {
     Set<String> newCollectionsList = new HashSet<>(collectionsList.size());
     for (String col : collectionsList) {
       String al = aliases.getCollectionAlias(col);
       if (al != null) {
         List<String> aliasList = StrUtils.splitSmart(al, ",", true);
         newCollectionsList.addAll(aliasList);
       } else {
         newCollectionsList.add(col);
       }
     }
     if (newCollectionsList.size() > 0) {
       StringBuilder collectionString = new StringBuilder();
       Iterator<String> it = newCollectionsList.iterator();
       int sz = newCollectionsList.size();
       for (int i = 0; i < sz; i++) {
         collectionString.append(it.next());
         if (i < newCollectionsList.size() - 1) {
           collectionString.append(",");
         }
       }
       ModifiableSolrParams params = new ModifiableSolrParams(solrReq.getParams());
       params.set(COLLECTION_PROP, collectionString.toString());
       solrReq.setParams(params);
     }
   }
 }
Esempio n. 2
0
  public FacetRange parse(Object arg) throws SyntaxError {
    parseCommonParams(arg);

    if (!(arg instanceof Map)) {
      throw err("Missing range facet arguments");
    }

    Map<String, Object> m = (Map<String, Object>) arg;

    facet.field = getString(m, "field", null);

    facet.start = m.get("start");
    facet.end = m.get("end");
    facet.gap = m.get("gap");
    facet.hardend = getBoolean(m, "hardend", facet.hardend);
    facet.mincount = getLong(m, "mincount", 0);

    // TODO: refactor list-of-options code

    Object o = m.get("include");
    String[] includeList = null;
    if (o != null) {
      List lst = null;

      if (o instanceof List) {
        lst = (List) o;
      } else if (o instanceof String) {
        lst = StrUtils.splitSmart((String) o, ',');
      }

      includeList = (String[]) lst.toArray(new String[lst.size()]);
    }
    facet.include = FacetParams.FacetRangeInclude.parseParam(includeList);

    facet.others = EnumSet.noneOf(FacetParams.FacetRangeOther.class);

    o = m.get("other");
    if (o != null) {
      List<String> lst = null;

      if (o instanceof List) {
        lst = (List) o;
      } else if (o instanceof String) {
        lst = StrUtils.splitSmart((String) o, ',');
      }

      for (String otherStr : lst) {
        facet.others.add(FacetParams.FacetRangeOther.get(otherStr));
      }
    }

    Object facetObj = m.get("facet");
    parseSubs(facetObj);

    return facet;
  }
  public Query parse() throws ParseException {
    // handle legacy "query;sort" syntax
    if (getLocalParams() == null) {
      String qstr = getString();
      sortStr = getParams().get(CommonParams.SORT);
      if (sortStr == null) {
        // sort may be legacy form, included in the query string
        List<String> commands = StrUtils.splitSmart(qstr, ';');
        if (commands.size() == 2) {
          qstr = commands.get(0);
          sortStr = commands.get(1);
        } else if (commands.size() == 1) {
          // This is need to support the case where someone sends: "q=query;"
          qstr = commands.get(0);
        } else if (commands.size() > 2) {
          throw new SolrException(
              SolrException.ErrorCode.BAD_REQUEST,
              "If you want to use multiple ';' in the query, use the 'sort' param.");
        }
      }
      setString(qstr);
    }

    return super.parse();
  }
Esempio n. 4
0
  public void handleRequest(SolrQueryRequest req, SolrQueryResponse rsp) {
    numRequests++;

    Query query = null;
    Filter filter = null;

    List<String> commands = StrUtils.splitSmart(req.getQueryString(), ';');

    String qs = commands.size() >= 1 ? commands.get(0) : "";
    query = QueryParsing.parseQuery(qs, req.getSchema());

    // If the first non-query, non-filter command is a simple sort on an indexed field, then
    // we can use the Lucene sort ability.
    Sort sort = null;
    if (commands.size() >= 2) {
      sort = QueryParsing.parseSort(commands.get(1), req);
    }

    try {

      int numHits;
      ScoreDoc[] scoreDocs;
      if (sort != null) {
        TopFieldDocs hits =
            req.getSearcher().search(query, filter, req.getStart() + req.getLimit(), sort);
        scoreDocs = hits.scoreDocs;
        numHits = hits.totalHits;
      } else {
        TopDocs hits = req.getSearcher().search(query, filter, req.getStart() + req.getLimit());
        scoreDocs = hits.scoreDocs;
        numHits = hits.totalHits;
      }

      int startRow = Math.min(numHits, req.getStart());
      int endRow = Math.min(numHits, req.getStart() + req.getLimit());
      int numRows = endRow - startRow;

      int[] ids = new int[numRows];
      Document[] data = new Document[numRows];
      for (int i = startRow; i < endRow; i++) {
        ids[i] = scoreDocs[i].doc;
        data[i] = req.getSearcher().doc(ids[i]);
      }

      rsp.add(null, new DocSlice(0, numRows, ids, null, numHits, 0.0f));

      /**
       * ********************* rsp.setResults(new DocSlice(0,numRows,ids,null,numHits));
       *
       * <p>// Setting the actual document objects is optional rsp.setResults(data);
       * **********************
       */
    } catch (IOException e) {
      rsp.setException(e);
      numErrors++;
      return;
    }
  }
Esempio n. 5
0
  protected ParsedParams parseParams(String type, String param) throws SyntaxError, IOException {
    SolrParams localParams = QueryParsing.getLocalParams(param, req.getParams());
    DocSet docs = docsOrig;
    String facetValue = param;
    String key = param;
    List<String> tags = Collections.emptyList();
    int threads = -1;

    if (localParams == null) {
      SolrParams params = global;
      SolrParams required = new RequiredSolrParams(params);
      return new ParsedParams(localParams, params, required, facetValue, docs, key, tags, threads);
    }

    SolrParams params = SolrParams.wrapDefaults(localParams, global);
    SolrParams required = new RequiredSolrParams(params);

    // remove local params unless it's a query
    if (type != FacetParams.FACET_QUERY) { // TODO Cut over to an Enum here
      facetValue = localParams.get(CommonParams.VALUE);
    }

    // reset set the default key now that localParams have been removed
    key = facetValue;

    // allow explicit set of the key
    key = localParams.get(CommonParams.OUTPUT_KEY, key);

    String tagStr = localParams.get(CommonParams.TAG);
    tags = tagStr == null ? Collections.<String>emptyList() : StrUtils.splitSmart(tagStr, ',');

    String threadStr = localParams.get(CommonParams.THREADS);
    if (threadStr != null) {
      threads = Integer.parseInt(threadStr);
    }

    // figure out if we need a new base DocSet
    String excludeStr = localParams.get(CommonParams.EXCLUDE);
    if (excludeStr == null)
      return new ParsedParams(localParams, params, required, facetValue, docs, key, tags, threads);

    List<String> excludeTagList = StrUtils.splitSmart(excludeStr, ',');
    docs = computeDocSet(docs, excludeTagList);
    return new ParsedParams(localParams, params, required, facetValue, docs, key, tags, threads);
  }
Esempio n. 6
0
  private void createRetrieveDocs(ResponseBuilder rb) {

    // TODO: in a system with nTiers > 2, we could be passed "ids" here
    // unless those requests always go to the final destination shard

    // for each shard, collect the documents for that shard.
    HashMap<String, Collection<ShardDoc>> shardMap = new HashMap<String, Collection<ShardDoc>>();
    for (ShardDoc sdoc : rb.resultIds.values()) {
      Collection<ShardDoc> shardDocs = shardMap.get(sdoc.shard);
      if (shardDocs == null) {
        shardDocs = new ArrayList<ShardDoc>();
        shardMap.put(sdoc.shard, shardDocs);
      }
      shardDocs.add(sdoc);
    }

    SchemaField uniqueField = rb.req.getSchema().getUniqueKeyField();

    // Now create a request for each shard to retrieve the stored fields
    for (Collection<ShardDoc> shardDocs : shardMap.values()) {
      ShardRequest sreq = new ShardRequest();
      sreq.purpose = ShardRequest.PURPOSE_GET_FIELDS;

      sreq.shards = new String[] {shardDocs.iterator().next().shard};

      sreq.params = new ModifiableSolrParams();

      // add original params
      sreq.params.add(rb.req.getParams());

      // no need for a sort, we already have order
      sreq.params.remove(CommonParams.SORT);

      // we already have the field sort values
      sreq.params.remove(ResponseBuilder.FIELD_SORT_VALUES);

      // make sure that the id is returned for correlation.
      String fl = sreq.params.get(CommonParams.FL);
      if (fl != null) {
        fl = fl.trim();
        // currently, "score" is synonymous with "*,score" so
        // don't add "id" if the fl is empty or "score" or it would change the meaning.
        if (fl.length() != 0 && !"score".equals(fl) && !"*".equals(fl)) {
          sreq.params.set(CommonParams.FL, fl + ',' + uniqueField.getName());
        }
      }

      ArrayList<String> ids = new ArrayList<String>(shardDocs.size());
      for (ShardDoc shardDoc : shardDocs) {
        // TODO: depending on the type, we may need more tha a simple toString()?
        ids.add(shardDoc.id.toString());
      }
      sreq.params.add(ShardParams.IDS, StrUtils.join(ids, ','));

      rb.addRequest(this, sreq);
    }
  }
 private Command(SolrQueryRequest req, SolrQueryResponse resp, String httpMethod) {
   this.req = req;
   this.resp = resp;
   this.method = httpMethod;
   path = (String) req.getContext().get("path");
   if (path == null) path = getDefaultPath();
   parts = StrUtils.splitSmart(path, '/');
   if (parts.get(0).isEmpty()) parts.remove(0);
 }
  public void processGetUpdates(ResponseBuilder rb) throws IOException {
    SolrQueryRequest req = rb.req;
    SolrQueryResponse rsp = rb.rsp;
    SolrParams params = req.getParams();

    if (!params.getBool(COMPONENT_NAME, true)) {
      return;
    }

    String versionsStr = params.get("getUpdates");
    if (versionsStr == null) return;

    UpdateLog ulog = req.getCore().getUpdateHandler().getUpdateLog();
    if (ulog == null) return;

    List<String> versions = StrUtils.splitSmart(versionsStr, ",", true);

    List<Object> updates = new ArrayList<Object>(versions.size());

    long minVersion = Long.MAX_VALUE;

    // TODO: get this from cache instead of rebuilding?
    UpdateLog.RecentUpdates recentUpdates = ulog.getRecentUpdates();
    try {
      for (String versionStr : versions) {
        long version = Long.parseLong(versionStr);
        try {
          Object o = recentUpdates.lookup(version);
          if (o == null) continue;

          if (version > 0) {
            minVersion = Math.min(minVersion, version);
          }

          // TODO: do any kind of validation here?
          updates.add(o);

        } catch (SolrException e) {
          log.warn("Exception reading log for updates", e);
        } catch (ClassCastException e) {
          log.warn("Exception reading log for updates", e);
        }
      }

      // Must return all delete-by-query commands that occur after the first add requested
      // since they may apply.
      updates.addAll(recentUpdates.getDeleteByQuery(minVersion));

      rb.rsp.add("updates", updates);

    } finally {
      recentUpdates.close(); // cache this somehow?
    }
  }
  public void processSync(ResponseBuilder rb, int nVersions, String sync) {
    List<String> replicas = StrUtils.splitSmart(sync, ",", true);

    boolean cantReachIsSuccess = rb.req.getParams().getBool("cantReachIsSuccess", false);

    PeerSync peerSync =
        new PeerSync(rb.req.getCore(), replicas, nVersions, cantReachIsSuccess, true);
    boolean success = peerSync.sync();

    // TODO: more complex response?
    rb.rsp.add("sync", success);
  }
  public static Map testForResponseElement(
      RestTestHarness harness,
      String testServerBaseUrl,
      String uri,
      CloudSolrClient cloudSolrClient,
      List<String> jsonPath,
      Object expected,
      long maxTimeoutSeconds)
      throws Exception {

    boolean success = false;
    long startTime = System.nanoTime();
    Map m = null;

    while (TimeUnit.SECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS)
        < maxTimeoutSeconds) {
      try {
        m =
            testServerBaseUrl == null
                ? getRespMap(uri, harness)
                : TestSolrConfigHandlerConcurrent.getAsMap(
                    testServerBaseUrl + uri, cloudSolrClient);
      } catch (Exception e) {
        Thread.sleep(100);
        continue;
      }
      if (Objects.equals(expected, Utils.getObjectByPath(m, false, jsonPath))) {
        success = true;
        break;
      }
      Thread.sleep(100);
    }
    assertTrue(
        StrUtils.formatString(
            "Could not get expected value  ''{0}'' for path ''{1}'' full output: {2},  from server:  {3}",
            expected, StrUtils.join(jsonPath, '/'), getAsString(m), testServerBaseUrl),
        success);

    return m;
  }
  private Set<String> getCollectionList(ClusterState clusterState, String collection) {
    // Extract each comma separated collection name and store in a List.
    List<String> rawCollectionsList = StrUtils.splitSmart(collection, ",", true);
    Set<String> collectionsList = new HashSet<>();
    // validate collections
    for (String collectionName : rawCollectionsList) {
      if (!clusterState.getCollections().contains(collectionName)) {
        Aliases aliases = zkStateReader.getAliases();
        String alias = aliases.getCollectionAlias(collectionName);
        if (alias != null) {
          List<String> aliasList = StrUtils.splitSmart(alias, ",", true);
          collectionsList.addAll(aliasList);
          continue;
        }

        throw new SolrException(ErrorCode.BAD_REQUEST, "Collection not found: " + collectionName);
      }

      collectionsList.add(collectionName);
    }
    return collectionsList;
  }
Esempio n. 12
0
  /**
   * Examines a Node from the DOM representation of a NamedList and adds the contents of that node
   * to both the specified NamedList and List passed as arguments.
   *
   * @param nd The Node whose type will be used to determine how to parse the text content. If there
   *     is a 'name' attribute it will be used when adding to the NamedList
   * @param nlst A NamedList to add the item to with name if application. If this param is null it
   *     will be ignored.
   * @param arr A List to add the item to. If this param is null it will be ignored.
   */
  @SuppressWarnings("unchecked")
  public static void addToNamedList(Node nd, NamedList nlst, List arr) {
    // Nodes often include whitespace, etc... so just return if this
    // is not an Element.
    if (nd.getNodeType() != Node.ELEMENT_NODE) return;

    final String type = nd.getNodeName();

    final String name = getAttr(nd, "name");

    Object val = null;

    if ("lst".equals(type)) {
      val = childNodesToNamedList(nd);
    } else if ("arr".equals(type)) {
      val = childNodesToList(nd);
    } else {
      final String textValue = getText(nd);
      try {
        if ("str".equals(type)) {
          val = textValue;
        } else if ("int".equals(type)) {
          val = Integer.valueOf(textValue);
        } else if ("long".equals(type)) {
          val = Long.valueOf(textValue);
        } else if ("float".equals(type)) {
          val = Float.valueOf(textValue);
        } else if ("double".equals(type)) {
          val = Double.valueOf(textValue);
        } else if ("bool".equals(type)) {
          val = StrUtils.parseBool(textValue);
        }
        // :NOTE: Unexpected Node names are ignored
        // :TODO: should we generate an error here?
      } catch (NumberFormatException nfe) {
        throw new SolrException(
            SolrException.ErrorCode.SERVER_ERROR,
            "Value "
                + (null != name ? ("of '" + name + "' ") : "")
                + "can not be parsed as '"
                + type
                + "': \""
                + textValue
                + "\"",
            nfe);
      }
    }

    if (nlst != null) nlst.add(name, val);
    if (arr != null) arr.add(val);
  }
Esempio n. 13
0
  public List<String> getStringList(Map<String, Object> args, String paramName) {
    Object o = args.get(paramName);
    if (o == null) {
      return null;
    }
    if (o instanceof List) {
      return (List<String>) o;
    }
    if (o instanceof String) {
      return StrUtils.splitSmart((String) o, ",", true);
    }

    throw err("Expected list of string or comma separated string values.");
  }
Esempio n. 14
0
  @Override
  public void prepare(ResponseBuilder rb) throws IOException {
    SolrParams params = rb.req.getParams();
    if (params.getBool(TermsParams.TERMS, false)) {
      rb.doTerms = true;
    }

    // TODO: temporary... this should go in a different component.
    String shards = params.get(ShardParams.SHARDS);
    if (shards != null) {
      if (params.get(ShardParams.SHARDS_QT) == null) {
        throw new SolrException(
            SolrException.ErrorCode.BAD_REQUEST, "No shards.qt parameter specified");
      }
      List<String> lst = StrUtils.splitSmart(shards, ",", true);
      rb.shards = lst.toArray(new String[lst.size()]);
    }
  }
    /**
     * Retrieve all requests recorded by this queue which were sent to given collection and shard
     *
     * @param zkStateReader the {@link org.apache.solr.common.cloud.ZkStateReader} from which
     *     cluster state is read
     * @param collectionName the given collection name for which requests have to be extracted
     * @param shardId the given shard name for which requests have to be extracted
     * @return a list of {@link
     *     org.apache.solr.handler.component.TrackingShardHandlerFactory.ShardRequestAndParams} or
     *     empty list if none are found
     */
    public List<ShardRequestAndParams> getShardRequests(
        ZkStateReader zkStateReader, String collectionName, String shardId) {
      DocCollection collection = zkStateReader.getClusterState().getCollection(collectionName);
      assert collection != null;
      Slice slice = collection.getSlice(shardId);
      assert slice != null;

      for (Map.Entry<String, List<ShardRequestAndParams>> entry : requests.entrySet()) {
        // multiple shard addresses may be present separated by '|'
        List<String> list = StrUtils.splitSmart(entry.getKey(), '|');
        for (Map.Entry<String, Replica> replica : slice.getReplicasMap().entrySet()) {
          String coreUrl = new ZkCoreNodeProps(replica.getValue()).getCoreUrl();
          if (list.contains(coreUrl)) {
            return new ArrayList<>(entry.getValue());
          }
        }
      }
      return Collections.emptyList();
    }
  protected void issueCreateJob(
      Integer numberOfSlices,
      Integer replicationFactor,
      Integer maxShardsPerNode,
      List<String> createNodeList,
      boolean sendCreateNodeList,
      boolean createNodeSetShuffle) {
    Map<String, Object> propMap =
        ZkNodeProps.makeMap(
            Overseer.QUEUE_OPERATION,
            CollectionParams.CollectionAction.CREATE.toLower(),
            ZkStateReader.REPLICATION_FACTOR,
            replicationFactor.toString(),
            "name",
            COLLECTION_NAME,
            "collection.configName",
            CONFIG_NAME,
            OverseerCollectionProcessor.NUM_SLICES,
            numberOfSlices.toString(),
            ZkStateReader.MAX_SHARDS_PER_NODE,
            maxShardsPerNode.toString());
    if (sendCreateNodeList) {
      propMap.put(
          OverseerCollectionProcessor.CREATE_NODE_SET,
          (createNodeList != null) ? StrUtils.join(createNodeList, ',') : null);
      if (OverseerCollectionProcessor.CREATE_NODE_SET_SHUFFLE_DEFAULT != createNodeSetShuffle
          || random().nextBoolean()) {
        propMap.put(OverseerCollectionProcessor.CREATE_NODE_SET_SHUFFLE, createNodeSetShuffle);
      }
    }

    ZkNodeProps props = new ZkNodeProps(propMap);
    QueueEvent qe =
        new QueueEvent("id", ZkStateReader.toJSON(props), null) {
          @Override
          public void setBytes(byte[] bytes) {
            lastProcessMessageResult = SolrResponse.deserialize(bytes);
          }
        };
    queue.add(qe);
  }
  @Override
  public int distributedProcess(ResponseBuilder rb) throws IOException {
    int result = ResponseBuilder.STAGE_DONE;
    if (rb.stage == ResponseBuilder.STAGE_GET_FIELDS) {
      // Go ask each shard for it's vectors
      // for each shard, collect the documents for that shard.
      HashMap<String, Collection<ShardDoc>> shardMap = new HashMap<String, Collection<ShardDoc>>();
      for (ShardDoc sdoc : rb.resultIds.values()) {
        Collection<ShardDoc> shardDocs = shardMap.get(sdoc.shard);
        if (shardDocs == null) {
          shardDocs = new ArrayList<ShardDoc>();
          shardMap.put(sdoc.shard, shardDocs);
        }
        shardDocs.add(sdoc);
      }
      // Now create a request for each shard to retrieve the stored fields
      for (Collection<ShardDoc> shardDocs : shardMap.values()) {
        ShardRequest sreq = new ShardRequest();
        sreq.purpose = ShardRequest.PURPOSE_GET_FIELDS;

        sreq.shards = new String[] {shardDocs.iterator().next().shard};

        sreq.params = new ModifiableSolrParams();

        // add original params
        sreq.params.add(rb.req.getParams());
        sreq.params.remove(CommonParams.Q); // remove the query
        ArrayList<String> ids = new ArrayList<String>(shardDocs.size());
        for (ShardDoc shardDoc : shardDocs) {
          ids.add(shardDoc.id.toString());
        }
        sreq.params.add(TermVectorParams.DOC_IDS, StrUtils.join(ids, ','));

        rb.addRequest(this, sreq);
      }
      result = ResponseBuilder.STAGE_DONE;
    }
    return result;
  }
  public void inform(ResourceLoader loader) {
    mapping = args.get("mapping");

    if (mapping != null) {
      List<String> wlist = null;
      try {
        File mappingFile = new File(mapping);
        if (mappingFile.exists()) {
          wlist = loader.getLines(mapping);
        } else {
          List<String> files = StrUtils.splitFileNames(mapping);
          wlist = new ArrayList<String>();
          for (String file : files) {
            List<String> lines = loader.getLines(file.trim());
            wlist.addAll(lines);
          }
        }
      } catch (IOException e) {
        throw new RuntimeException(e);
      }
      normMap = new NormalizeCharMap();
      parseRules(wlist, normMap);
    }
  }
Esempio n. 19
0
  /**
   * Given the input stream, read a document
   *
   * @since solr 1.3
   */
  SolrInputDocument readDoc(XMLStreamReader parser) throws XMLStreamException {
    SolrInputDocument doc = new SolrInputDocument();

    String attrName = "";
    for (int i = 0; i < parser.getAttributeCount(); i++) {
      attrName = parser.getAttributeLocalName(i);
      if ("boost".equals(attrName)) {
        doc.setDocumentBoost(Float.parseFloat(parser.getAttributeValue(i)));
      } else {
        XmlUpdateRequestHandler.log.warn("Unknown attribute doc/@" + attrName);
      }
    }

    StringBuilder text = new StringBuilder();
    String name = null;
    float boost = 1.0f;
    boolean isNull = false;
    while (true) {
      int event = parser.next();
      switch (event) {
          // Add everything to the text
        case XMLStreamConstants.SPACE:
        case XMLStreamConstants.CDATA:
        case XMLStreamConstants.CHARACTERS:
          text.append(parser.getText());
          break;

        case XMLStreamConstants.END_ELEMENT:
          if ("doc".equals(parser.getLocalName())) {
            return doc;
          } else if ("field".equals(parser.getLocalName())) {
            if (!isNull) {
              doc.addField(name, text.toString(), boost);
              boost = 1.0f;
            }
          }
          break;

        case XMLStreamConstants.START_ELEMENT:
          text.setLength(0);
          String localName = parser.getLocalName();
          if (!"field".equals(localName)) {
            XmlUpdateRequestHandler.log.warn("unexpected XML tag doc/" + localName);
            throw new SolrException(
                SolrException.ErrorCode.BAD_REQUEST, "unexpected XML tag doc/" + localName);
          }
          boost = 1.0f;
          String attrVal = "";
          for (int i = 0; i < parser.getAttributeCount(); i++) {
            attrName = parser.getAttributeLocalName(i);
            attrVal = parser.getAttributeValue(i);
            if ("name".equals(attrName)) {
              name = attrVal;
            } else if ("boost".equals(attrName)) {
              boost = Float.parseFloat(attrVal);
            } else if ("null".equals(attrName)) {
              isNull = StrUtils.parseBoolean(attrVal);
            } else {
              XmlUpdateRequestHandler.log.warn("Unknown attribute doc/field/@" + attrName);
            }
          }
          break;
      }
    }
  }
Esempio n. 20
0
  /** @since solr 1.3 */
  void processDelete(UpdateRequestProcessor processor, XMLStreamReader parser, SolrParams params)
      throws XMLStreamException, IOException {
    // Parse the command
    DeleteUpdateCommand deleteCmd = new DeleteUpdateCommand();
    deleteCmd.fromPending = true;
    deleteCmd.fromCommitted = true;

    Boolean updateStore = params.getBool(UpdateParams.UPDATE_STORE);
    if (updateStore != null) {
      deleteCmd.setUpdateStore(updateStore.booleanValue());
    }

    for (int i = 0; i < parser.getAttributeCount(); i++) {
      String attrName = parser.getAttributeLocalName(i);
      String attrVal = parser.getAttributeValue(i);
      if ("fromPending".equals(attrName)) {
        deleteCmd.fromPending = StrUtils.parseBoolean(attrVal);
      } else if ("fromCommitted".equals(attrName)) {
        deleteCmd.fromCommitted = StrUtils.parseBoolean(attrVal);
      } else {
        XmlUpdateRequestHandler.log.warn("unexpected attribute delete/@" + attrName);
      }
    }

    StringBuilder text = new StringBuilder();
    while (true) {
      int event = parser.next();
      switch (event) {
        case XMLStreamConstants.START_ELEMENT:
          String mode = parser.getLocalName();
          if (!("id".equals(mode) || "query".equals(mode))) {
            XmlUpdateRequestHandler.log.warn("unexpected XML tag /delete/" + mode);
            throw new SolrException(
                SolrException.ErrorCode.BAD_REQUEST, "unexpected XML tag /delete/" + mode);
          }
          text.setLength(0);
          break;

        case XMLStreamConstants.END_ELEMENT:
          String currTag = parser.getLocalName();
          if ("id".equals(currTag)) {
            deleteCmd.id = text.toString();
          } else if ("query".equals(currTag)) {
            deleteCmd.query = text.toString();
          } else if ("delete".equals(currTag)) {
            return;
          } else {
            XmlUpdateRequestHandler.log.warn("unexpected XML tag /delete/" + currTag);
            throw new SolrException(
                SolrException.ErrorCode.BAD_REQUEST, "unexpected XML tag /delete/" + currTag);
          }
          processor.processDelete(deleteCmd);
          deleteCmd.id = null;
          deleteCmd.query = null;
          break;

          // Add everything to the text
        case XMLStreamConstants.SPACE:
        case XMLStreamConstants.CDATA:
        case XMLStreamConstants.CHARACTERS:
          text.append(parser.getText());
          break;
      }
    }
  }
Esempio n. 21
0
  public static ArrayList<Node> getNodesForNewShard(
      ClusterState clusterState,
      String collectionName,
      int numSlices,
      int maxShardsPerNode,
      int repFactor,
      String createNodeSetStr) {
    List<String> createNodeList =
        createNodeSetStr == null ? null : StrUtils.splitSmart(createNodeSetStr, ",", true);

    Set<String> nodes = clusterState.getLiveNodes();

    List<String> nodeList = new ArrayList<String>(nodes.size());
    nodeList.addAll(nodes);
    if (createNodeList != null) nodeList.retainAll(createNodeList);

    HashMap<String, Node> nodeNameVsShardCount = new HashMap<String, Node>();
    for (String s : nodeList) nodeNameVsShardCount.put(s, new Node(s));
    for (String s : clusterState.getCollections()) {
      DocCollection c = clusterState.getCollection(s);
      // identify suitable nodes  by checking the no:of cores in each of them
      for (Slice slice : c.getSlices()) {
        Collection<Replica> replicas = slice.getReplicas();
        for (Replica replica : replicas) {
          Node count = nodeNameVsShardCount.get(replica.getNodeName());
          if (count != null) {
            count.totalNodes++;
            if (s.equals(collectionName)) {
              count.thisCollectionNodes++;
              if (count.thisCollectionNodes >= maxShardsPerNode)
                nodeNameVsShardCount.remove(replica.getNodeName());
            }
          }
        }
      }
    }

    if (nodeNameVsShardCount.size() <= 0) {
      throw new SolrException(
          SolrException.ErrorCode.BAD_REQUEST,
          "Cannot create collection "
              + collectionName
              + ". No live Solr-instances"
              + ((createNodeList != null)
                  ? " among Solr-instances specified in " + CREATE_NODE_SET + ":" + createNodeSetStr
                  : ""));
    }

    if (repFactor > nodeNameVsShardCount.size()) {
      log.warn(
          "Specified "
              + REPLICATION_FACTOR
              + " of "
              + repFactor
              + " on collection "
              + collectionName
              + " is higher than or equal to the number of Solr instances currently live or part of your "
              + CREATE_NODE_SET
              + "("
              + nodeList.size()
              + "). Its unusual to run two replica of the same slice on the same Solr-instance.");
    }

    int maxCoresAllowedToCreate = maxShardsPerNode * nodeList.size();
    int requestedCoresToCreate = numSlices * repFactor;
    int minCoresToCreate = requestedCoresToCreate;
    if (maxCoresAllowedToCreate < minCoresToCreate) {
      throw new SolrException(
          SolrException.ErrorCode.BAD_REQUEST,
          "Cannot create shards "
              + collectionName
              + ". Value of "
              + MAX_SHARDS_PER_NODE
              + " is "
              + maxShardsPerNode
              + ", and the number of live nodes is "
              + nodeList.size()
              + ". This allows a maximum of "
              + maxCoresAllowedToCreate
              + " to be created. Value of "
              + NUM_SLICES
              + " is "
              + numSlices
              + " and value of "
              + REPLICATION_FACTOR
              + " is "
              + repFactor
              + ". This requires "
              + requestedCoresToCreate
              + " shards to be created (higher than the allowed number)");
    }

    ArrayList<Node> sortedNodeList = new ArrayList<>(nodeNameVsShardCount.values());
    Collections.sort(
        sortedNodeList,
        new Comparator<Node>() {
          @Override
          public int compare(Node x, Node y) {
            return (x.weight() < y.weight()) ? -1 : ((x.weight() == y.weight()) ? 0 : 1);
          }
        });
    return sortedNodeList;
  }
Esempio n. 22
0
  /** Actually run the query */
  @Override
  public void process(ResponseBuilder rb) throws IOException {
    SolrQueryRequest req = rb.req;
    SolrQueryResponse rsp = rb.rsp;
    SolrParams params = req.getParams();
    if (!params.getBool(COMPONENT_NAME, true)) {
      return;
    }
    SolrIndexSearcher searcher = req.getSearcher();

    if (rb.getQueryCommand().getOffset() < 0) {
      throw new SolrException(
          SolrException.ErrorCode.BAD_REQUEST, "'start' parameter cannot be negative");
    }

    // -1 as flag if not set.
    long timeAllowed = (long) params.getInt(CommonParams.TIME_ALLOWED, -1);

    // Optional: This could also be implemented by the top-level searcher sending
    // a filter that lists the ids... that would be transparent to
    // the request handler, but would be more expensive (and would preserve score
    // too if desired).
    String ids = params.get(ShardParams.IDS);
    if (ids != null) {
      SchemaField idField = req.getSchema().getUniqueKeyField();
      List<String> idArr = StrUtils.splitSmart(ids, ",", true);
      int[] luceneIds = new int[idArr.size()];
      int docs = 0;
      for (int i = 0; i < idArr.size(); i++) {
        int id =
            req.getSearcher()
                .getFirstMatch(
                    new Term(idField.getName(), idField.getType().toInternal(idArr.get(i))));
        if (id >= 0) luceneIds[docs++] = id;
      }

      DocListAndSet res = new DocListAndSet();
      res.docList = new DocSlice(0, docs, luceneIds, null, docs, 0);
      if (rb.isNeedDocSet()) {
        List<Query> queries = new ArrayList<Query>();
        queries.add(rb.getQuery());
        List<Query> filters = rb.getFilters();
        if (filters != null) queries.addAll(filters);
        res.docSet = searcher.getDocSet(queries);
      }
      rb.setResults(res);
      rsp.add("response", rb.getResults().docList);
      return;
    }

    SolrIndexSearcher.QueryCommand cmd = rb.getQueryCommand();
    cmd.setTimeAllowed(timeAllowed);
    SolrIndexSearcher.QueryResult result = new SolrIndexSearcher.QueryResult();

    //
    // grouping / field collapsing
    //
    boolean doGroup = params.getBool(GroupParams.GROUP, false);
    if (doGroup) {
      try {
        cmd.groupCommands = new ArrayList<Grouping.Command>();

        String[] fields = params.getParams(GroupParams.GROUP_FIELD);
        String[] funcs = params.getParams(GroupParams.GROUP_FUNC);
        String[] queries = params.getParams(GroupParams.GROUP_QUERY);
        String groupSortStr = params.get(GroupParams.GROUP_SORT);
        Sort groupSort =
            groupSortStr != null ? QueryParsing.parseSort(groupSortStr, req.getSchema()) : null;

        int limitDefault = cmd.getLen(); // this is normally from "rows"
        int docsPerGroupDefault = params.getInt(GroupParams.GROUP_LIMIT, 1);

        // temporary: implement all group-by-field as group-by-func
        if (funcs == null) {
          funcs = fields;
        } else if (fields != null) {
          // catenate functions and fields
          String[] both = new String[fields.length + funcs.length];
          System.arraycopy(fields, 0, both, 0, fields.length);
          System.arraycopy(funcs, 0, both, fields.length, funcs.length);
          funcs = both;
        }

        if (funcs != null) {
          for (String groupByStr : funcs) {
            QParser parser = QParser.getParser(groupByStr, "func", rb.req);
            Query q = parser.getQuery();
            Grouping.CommandFunc gc = new Grouping.CommandFunc();
            gc.groupSort = groupSort;

            if (q instanceof FunctionQuery) {
              gc.groupBy = ((FunctionQuery) q).getValueSource();
            } else {
              gc.groupBy = new QueryValueSource(q, 0.0f);
            }
            gc.key = groupByStr;
            gc.groupLimit = limitDefault;
            gc.docsPerGroup = docsPerGroupDefault;

            cmd.groupCommands.add(gc);
          }
        }

        if (cmd.groupCommands.size() == 0) cmd.groupCommands = null;

        if (cmd.groupCommands != null) {
          if (rb.doHighlights || rb.isDebug()) {
            // we need a single list of the returned docs
            cmd.setFlags(SolrIndexSearcher.GET_DOCLIST);
          }

          searcher.search(result, cmd);
          rb.setResult(result);
          rsp.add("grouped", result.groupedResults);
          // TODO: get "hits" a different way to log
          return;
        }
      } catch (ParseException e) {
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
      }
    }

    // normal search result
    searcher.search(result, cmd);
    rb.setResult(result);

    rsp.add("response", rb.getResults().docList);
    rsp.getToLog().add("hits", rb.getResults().docList.matches());

    doFieldSortValues(rb, searcher);
    doPrefetch(rb);
  }
  /** create the FieldAdders that control how each field is indexed */
  void prepareFields() {
    // Possible future optimization: for really rapid incremental indexing
    // from a POST, one could cache all of this setup info based on the params.
    // The link from FieldAdder to this would need to be severed for that to happen.

    adders = new CSVLoaderBase.FieldAdder[fieldnames.length];
    String skipStr = params.get(SKIP);
    List<String> skipFields = skipStr == null ? null : StrUtils.splitSmart(skipStr, ',');

    CSVLoaderBase.FieldAdder adder = new CSVLoaderBase.FieldAdder();
    CSVLoaderBase.FieldAdder adderKeepEmpty = new CSVLoaderBase.FieldAdderEmpty();

    for (int i = 0; i < fieldnames.length; i++) {
      String fname = fieldnames[i];
      // to skip a field, leave the entries in fields and addrs null
      if (fname.length() == 0 || (skipFields != null && skipFields.contains(fname))) continue;

      boolean keepEmpty = params.getFieldBool(fname, EMPTY, false);
      adders[i] = keepEmpty ? adderKeepEmpty : adder;

      // Order that operations are applied: split -> trim -> map -> add
      // so create in reverse order.
      // Creation of FieldAdders could be optimized and shared among fields

      String[] fmap = params.getFieldParams(fname, MAP);
      if (fmap != null) {
        for (String mapRule : fmap) {
          String[] mapArgs = colonSplit.split(mapRule, -1);
          if (mapArgs.length != 2)
            throw new SolrException(
                SolrException.ErrorCode.BAD_REQUEST,
                "Map rules must be of the form 'from:to' ,got '" + mapRule + "'");
          adders[i] = new CSVLoaderBase.FieldMapperSingle(mapArgs[0], mapArgs[1], adders[i]);
        }
      }

      if (params.getFieldBool(fname, TRIM, false)) {
        adders[i] = new CSVLoaderBase.FieldTrimmer(adders[i]);
      }

      if (params.getFieldBool(fname, SPLIT, false)) {
        String sepStr = params.getFieldParam(fname, SEPARATOR);
        char fsep = sepStr == null || sepStr.length() == 0 ? ',' : sepStr.charAt(0);
        String encStr = params.getFieldParam(fname, ENCAPSULATOR);
        char fenc = encStr == null || encStr.length() == 0 ? (char) -2 : encStr.charAt(0);
        String escStr = params.getFieldParam(fname, ESCAPE);
        char fesc =
            escStr == null || escStr.length() == 0 ? CSVStrategy.ESCAPE_DISABLED : escStr.charAt(0);

        CSVStrategy fstrat =
            new CSVStrategy(
                fsep, fenc, CSVStrategy.COMMENTS_DISABLED, fesc, false, false, false, false);
        adders[i] = new CSVLoaderBase.FieldSplitter(fstrat, adders[i]);
      }
    }
    // look for any literal fields - literal.foo=xyzzy
    Iterator<String> paramNames = params.getParameterNamesIterator();
    while (paramNames.hasNext()) {
      String pname = paramNames.next();
      if (!pname.startsWith(LITERALS_PREFIX)) continue;

      String name = pname.substring(LITERALS_PREFIX.length());
      literals.put(name, params.get(pname));
    }
  }
Esempio n. 24
0
  /** @since solr 1.2 */
  void processUpdate(
      UpdateRequestProcessor processor, XMLStreamReader parser, SolrParams solrParams)
      throws XMLStreamException, IOException, FactoryConfigurationError, InstantiationException,
          IllegalAccessException, TransformerConfigurationException {
    AddUpdateCommand addCmd = null;
    while (true) {
      int event = parser.next();
      switch (event) {
        case XMLStreamConstants.END_DOCUMENT:
          parser.close();
          return;

        case XMLStreamConstants.START_ELEMENT:
          String currTag = parser.getLocalName();
          if (currTag.equals(XmlUpdateRequestHandler.ADD)) {
            XmlUpdateRequestHandler.log.trace("SolrCore.update(add)");

            addCmd = new AddUpdateCommand();
            boolean overwrite = true; // the default

            Boolean overwritePending = null;
            Boolean overwriteCommitted = null;
            for (int i = 0; i < parser.getAttributeCount(); i++) {
              String attrName = parser.getAttributeLocalName(i);
              String attrVal = parser.getAttributeValue(i);
              if (XmlUpdateRequestHandler.OVERWRITE.equals(attrName)) {
                overwrite = StrUtils.parseBoolean(attrVal);
              } else if (XmlUpdateRequestHandler.ALLOW_DUPS.equals(attrName)) {
                overwrite = !StrUtils.parseBoolean(attrVal);
              } else if (XmlUpdateRequestHandler.COMMIT_WITHIN.equals(attrName)) {
                addCmd.commitWithin = Integer.parseInt(attrVal);
              } else if (XmlUpdateRequestHandler.OVERWRITE_PENDING.equals(attrName)) {
                overwritePending = StrUtils.parseBoolean(attrVal);
              } else if (XmlUpdateRequestHandler.OVERWRITE_COMMITTED.equals(attrName)) {
                overwriteCommitted = StrUtils.parseBoolean(attrVal);
              } else {
                XmlUpdateRequestHandler.log.warn("Unknown attribute id in add:" + attrName);
              }
            }

            // check if these flags are set
            if (overwritePending != null && overwriteCommitted != null) {
              if (overwritePending != overwriteCommitted) {
                throw new SolrException(
                    SolrException.ErrorCode.BAD_REQUEST,
                    "can't have different values for 'overwritePending' and 'overwriteCommitted'");
              }
              overwrite = overwritePending;
            }
            addCmd.overwriteCommitted = overwrite;
            addCmd.overwritePending = overwrite;
            addCmd.allowDups = !overwrite;
          } else if ("doc".equals(currTag)) {
            XmlUpdateRequestHandler.log.trace("adding doc...");
            addCmd.clear();
            addCmd.solrDoc = readDoc(parser);
            processor.processAdd(addCmd);
          } else if (XmlUpdateRequestHandler.COMMIT.equals(currTag)
              || XmlUpdateRequestHandler.OPTIMIZE.equals(currTag)) {
            XmlUpdateRequestHandler.log.trace("parsing " + currTag);

            CommitUpdateCommand cmd =
                new CommitUpdateCommand(XmlUpdateRequestHandler.OPTIMIZE.equals(currTag));

            boolean sawWaitSearcher = false, sawWaitFlush = false;
            for (int i = 0; i < parser.getAttributeCount(); i++) {
              String attrName = parser.getAttributeLocalName(i);
              String attrVal = parser.getAttributeValue(i);
              if (XmlUpdateRequestHandler.WAIT_FLUSH.equals(attrName)) {
                cmd.waitFlush = StrUtils.parseBoolean(attrVal);
                sawWaitFlush = true;
              } else if (XmlUpdateRequestHandler.WAIT_SEARCHER.equals(attrName)) {
                cmd.waitSearcher = StrUtils.parseBoolean(attrVal);
                sawWaitSearcher = true;
              } else if (UpdateParams.MAX_OPTIMIZE_SEGMENTS.equals(attrName)) {
                cmd.maxOptimizeSegments = Integer.parseInt(attrVal);
              } else if (UpdateParams.EXPUNGE_DELETES.equals(attrName)) {
                cmd.expungeDeletes = StrUtils.parseBoolean(attrVal);
              } else {
                XmlUpdateRequestHandler.log.warn("unexpected attribute commit/@" + attrName);
              }
            }

            // If waitFlush is specified and waitSearcher wasn't, then
            // clear waitSearcher.
            if (sawWaitFlush && !sawWaitSearcher) {
              cmd.waitSearcher = false;
            }
            processor.processCommit(cmd);
          } // end commit
          else if (XmlUpdateRequestHandler.ROLLBACK.equals(currTag)) {
            XmlUpdateRequestHandler.log.trace("parsing " + currTag);

            RollbackUpdateCommand cmd = new RollbackUpdateCommand();

            processor.processRollback(cmd);
          } // end rollback
          else if (XmlUpdateRequestHandler.DELETE.equals(currTag)) {
            XmlUpdateRequestHandler.log.trace("parsing delete");
            processDelete(processor, parser, solrParams);
          } // end delete
          break;
      }
    }
  }
Esempio n. 25
0
  public void handleRequest(RequestGetter requestGetter) {
    MDCLoggingContext.reset();
    MDCLoggingContext.setNode(cores);

    String path = requestGetter.getPath();
    solrParams = requestGetter.getSolrParams();
    SolrRequestHandler handler = null;
    String corename = "";
    String origCorename = null;
    try {
      // set a request timer which can be reused by requests if needed
      // req.setAttribute(SolrRequestParsers.REQUEST_TIMER_SERVLET_ATTRIBUTE, new RTimer());
      // put the core container in request attribute
      // req.setAttribute("org.apache.solr.CoreContainer", cores);
      // check for management path
      String alternate = cores.getManagementPath();
      if (alternate != null && path.startsWith(alternate)) {
        path = path.substring(0, alternate.length());
      }
      // unused feature ?
      int idx = path.indexOf(':');
      if (idx > 0) {
        // save the portion after the ':' for a 'handler' path parameter
        path = path.substring(0, idx);
      }

      boolean usingAliases = false;
      List<String> collectionsList = null;

      // Check for container handlers
      handler = cores.getRequestHandler(path);
      if (handler != null) {
        solrReq = parseSolrQueryRequest(SolrRequestParsers.DEFAULT, requestGetter);
        handleAdminRequest(handler, solrReq);
        return;
      } else {
        // otherwise, we should find a core from the path
        idx = path.indexOf("/", 1);
        if (idx > 1) {
          // try to get the corename as a request parameter first
          corename = path.substring(1, idx);

          // look at aliases
          if (cores.isZooKeeperAware()) {
            origCorename = corename;
            ZkStateReader reader = cores.getZkController().getZkStateReader();
            aliases = reader.getAliases();
            if (aliases != null && aliases.collectionAliasSize() > 0) {
              usingAliases = true;
              String alias = aliases.getCollectionAlias(corename);
              if (alias != null) {
                collectionsList = StrUtils.splitSmart(alias, ",", true);
                corename = collectionsList.get(0);
              }
            }
          }

          core = cores.getCore(corename);

          if (core != null) {
            path = path.substring(idx);
          }
        }

        // add collection name
        if (core == null && StringUtils.isNotBlank(requestGetter.getCollection())) {
          corename = requestGetter.getCollection();
          core = cores.getCore(corename);
        }

        if (core == null) {
          if (!cores.isZooKeeperAware()) {
            core = cores.getCore("");
          }
        }
      }

      if (core == null && cores.isZooKeeperAware()) {
        // we couldn't find the core - lets make sure a collection was not specified instead
        core = getCoreByCollection(cores, corename);

        if (core != null) {
          // we found a core, update the path
          path = path.substring(idx);
        }

        // try the default core
        if (core == null) {
          core = cores.getCore("");
          if (core != null) {}
        }
      }

      // With a valid core...
      if (core != null) {
        MDCLoggingContext.setCore(core);
        final SolrConfig config = core.getSolrConfig();
        // get or create/cache the parser for the core
        SolrRequestParsers parser = config.getRequestParsers();

        // Determine the handler from the url path if not set
        // (we might already have selected the cores handler)
        if (handler == null && path.length() > 1) { // don't match "" or "/" as valid path
          handler = core.getRequestHandler(path);

          if (handler == null) {
            // may be a restlet path
            // Handle /schema/* paths via Restlet
            if (path.equals("/schema") || path.startsWith("/schema/")) {
              throw new SolrException(
                  SolrException.ErrorCode.BAD_REQUEST, "unsupport /schema/**, use http solr");
            }
          }
          // no handler yet but allowed to handle select; let's check
          if (handler == null && parser.isHandleSelect()) {
            if ("/select".equals(path) || "/select/".equals(path)) {
              solrReq = parseSolrQueryRequest(parser, requestGetter);

              invalidStates =
                  checkStateIsValid(cores, solrReq.getParams().get(CloudSolrClient.STATE_VERSION));
              String qt = solrReq.getParams().get(CommonParams.QT);
              handler = core.getRequestHandler(qt);
              if (handler == null) {
                throw new SolrException(
                    SolrException.ErrorCode.BAD_REQUEST, "unknown handler: " + qt);
              }
              if (qt != null
                  && qt.startsWith("/")
                  && (handler instanceof ContentStreamHandlerBase)) {
                // For security reasons it's a bad idea to allow a leading '/', ex:
                // /select?qt=/update see SOLR-3161
                // There was no restriction from Solr 1.4 thru 3.5 and it's not supported for update
                // handlers.
                throw new SolrException(
                    SolrException.ErrorCode.BAD_REQUEST,
                    "Invalid Request Handler ('qt').  Do not use /select to access: " + qt);
              }
            }
          }
        }

        // With a valid handler and a valid core...
        if (handler != null) {
          // if not a /select, create the request
          if (solrReq == null) {
            solrReq = parseSolrQueryRequest(parser, requestGetter);
          }

          if (usingAliases) {
            processAliases(solrReq, aliases, collectionsList);
          }

          SolrQueryResponse solrRsp = new SolrQueryResponse();
          SolrRequestInfo.setRequestInfo(new SolrRequestInfo(solrReq, solrRsp));
          this.execute(handler, solrReq, solrRsp);
          QueryResponseWriter responseWriter = core.getQueryResponseWriter(solrReq);
          if (invalidStates != null)
            solrReq.getContext().put(CloudSolrClient.STATE_VERSION, invalidStates);
          writeResponse(solrRsp, responseWriter, solrReq);

          return; // we are done with a valid handler
        }
      }
      logger.debug("no handler or core retrieved for {}, follow through...", path);
      throw new SolrException(
          SolrException.ErrorCode.BAD_REQUEST, "no handler or core retrieved for " + path);
    } catch (Throwable ex) {
      sendError(core, solrReq, ex);
      // walk the the entire cause chain to search for an Error
      Throwable t = ex;
      while (t != null) {
        if (t instanceof Error) {
          if (t != ex) {
            logger.error(
                "An Error was wrapped in another exception - please report complete stacktrace on SOLR-6161",
                ex);
          }
          throw (Error) t;
        }
        t = t.getCause();
      }
      return;
    } finally {
      try {
        if (solrReq != null) {
          logger.debug("Closing out SolrRequest: {}", solrReq);
          solrReq.close();
        }
      } finally {
        try {
          if (core != null) {
            core.close();
          }
        } finally {
          SolrRequestInfo.clearRequestInfo();
        }
      }
      MDCLoggingContext.clear();
    }
  }
Esempio n. 26
0
  @Override
  public void prepare(ResponseBuilder rb) throws IOException {

    SolrQueryRequest req = rb.req;
    SolrParams params = req.getParams();
    if (!params.getBool(COMPONENT_NAME, true)) {
      return;
    }
    SolrQueryResponse rsp = rb.rsp;

    // Set field flags
    String fl = params.get(CommonParams.FL);
    int fieldFlags = 0;
    if (fl != null) {
      fieldFlags |= SolrPluginUtils.setReturnFields(fl, rsp);
    }
    rb.setFieldFlags(fieldFlags);

    String defType = params.get(QueryParsing.DEFTYPE, QParserPlugin.DEFAULT_QTYPE);

    // get it from the response builder to give a different component a chance
    // to set it.
    String queryString = rb.getQueryString();
    if (queryString == null) {
      // this is the normal way it's set.
      queryString = params.get(CommonParams.Q);
      rb.setQueryString(queryString);
    }

    try {
      QParser parser = QParser.getParser(rb.getQueryString(), defType, req);
      Query q = parser.getQuery();
      if (q == null) {
        // normalize a null query to a query that matches nothing
        q = new BooleanQuery();
      }
      rb.setQuery(q);
      rb.setSortSpec(parser.getSort(true));
      rb.setQparser(parser);

      String[] fqs = req.getParams().getParams(CommonParams.FQ);
      if (fqs != null && fqs.length != 0) {
        List<Query> filters = rb.getFilters();
        if (filters == null) {
          filters = new ArrayList<Query>();
          rb.setFilters(filters);
        }
        for (String fq : fqs) {
          if (fq != null && fq.trim().length() != 0) {
            QParser fqp = QParser.getParser(fq, null, req);
            filters.add(fqp.getQuery());
          }
        }
      }
    } catch (ParseException e) {
      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
    }

    // TODO: temporary... this should go in a different component.
    String shards = params.get(ShardParams.SHARDS);
    if (shards != null) {
      List<String> lst = StrUtils.splitSmart(shards, ",", true);
      rb.shards = lst.toArray(new String[lst.size()]);
    }
    String shards_rows = params.get(ShardParams.SHARDS_ROWS);
    if (shards_rows != null) {
      rb.shards_rows = Integer.parseInt(shards_rows);
    }
    String shards_start = params.get(ShardParams.SHARDS_START);
    if (shards_start != null) {
      rb.shards_start = Integer.parseInt(shards_start);
    }
  }
Esempio n. 27
0
  /**
   * Get a map of property name -&gt; value for this field type.
   *
   * @param showDefaults if true, include default properties.
   */
  public SimpleOrderedMap<Object> getNamedPropertyValues(boolean showDefaults) {
    SimpleOrderedMap<Object> namedPropertyValues = new SimpleOrderedMap<>();
    namedPropertyValues.add(TYPE_NAME, getTypeName());
    namedPropertyValues.add(CLASS_NAME, getClassArg());
    if (showDefaults) {
      Map<String, String> fieldTypeArgs = getNonFieldPropertyArgs();
      if (null != fieldTypeArgs) {
        for (String key : fieldTypeArgs.keySet()) {
          if (!CLASS_NAME.equals(key) && !TYPE_NAME.equals(key)) {
            namedPropertyValues.add(key, fieldTypeArgs.get(key));
          }
        }
      }
      if (this instanceof TextField) {
        namedPropertyValues.add(
            AUTO_GENERATE_PHRASE_QUERIES, ((TextField) this).getAutoGeneratePhraseQueries());
      }
      namedPropertyValues.add(getPropertyName(INDEXED), hasProperty(INDEXED));
      namedPropertyValues.add(getPropertyName(STORED), hasProperty(STORED));
      namedPropertyValues.add(getPropertyName(DOC_VALUES), hasProperty(DOC_VALUES));
      namedPropertyValues.add(getPropertyName(STORE_TERMVECTORS), hasProperty(STORE_TERMVECTORS));
      namedPropertyValues.add(
          getPropertyName(STORE_TERMPOSITIONS), hasProperty(STORE_TERMPOSITIONS));
      namedPropertyValues.add(getPropertyName(STORE_TERMOFFSETS), hasProperty(STORE_TERMOFFSETS));
      namedPropertyValues.add(getPropertyName(OMIT_NORMS), hasProperty(OMIT_NORMS));
      namedPropertyValues.add(getPropertyName(OMIT_TF_POSITIONS), hasProperty(OMIT_TF_POSITIONS));
      namedPropertyValues.add(getPropertyName(OMIT_POSITIONS), hasProperty(OMIT_POSITIONS));
      namedPropertyValues.add(getPropertyName(STORE_OFFSETS), hasProperty(STORE_OFFSETS));
      namedPropertyValues.add(getPropertyName(MULTIVALUED), hasProperty(MULTIVALUED));
      if (hasProperty(SORT_MISSING_FIRST)) {
        namedPropertyValues.add(getPropertyName(SORT_MISSING_FIRST), true);
      } else if (hasProperty(SORT_MISSING_LAST)) {
        namedPropertyValues.add(getPropertyName(SORT_MISSING_LAST), true);
      }
      namedPropertyValues.add(getPropertyName(TOKENIZED), isTokenized());
      // The BINARY property is always false
      // namedPropertyValues.add(getPropertyName(BINARY), hasProperty(BINARY));
      if (null != getPostingsFormat()) {
        namedPropertyValues.add(POSTINGS_FORMAT, getPostingsFormat());
      }
      if (null != getDocValuesFormat()) {
        namedPropertyValues.add(DOC_VALUES_FORMAT, getDocValuesFormat());
      }
    } else { // Don't show defaults
      Set<String> fieldProperties = new HashSet<>();
      for (String propertyName : FieldProperties.propertyNames) {
        fieldProperties.add(propertyName);
      }

      for (String key : args.keySet()) {
        if (fieldProperties.contains(key)) {
          namedPropertyValues.add(key, StrUtils.parseBool(args.get(key)));
        } else if (!CLASS_NAME.equals(key) && !TYPE_NAME.equals(key)) {
          namedPropertyValues.add(key, args.get(key));
        }
      }
    }

    if (null != getSimilarityFactory()) {
      namedPropertyValues.add(SIMILARITY, getSimilarityFactory().getNamedPropertyValues());
    }

    if (this instanceof HasImplicitIndexAnalyzer) {
      if (isExplicitQueryAnalyzer()) {
        namedPropertyValues.add(QUERY_ANALYZER, getAnalyzerProperties(getQueryAnalyzer()));
      }
    } else {
      if (isExplicitAnalyzer()) {
        String analyzerProperty = isExplicitQueryAnalyzer() ? INDEX_ANALYZER : ANALYZER;
        namedPropertyValues.add(analyzerProperty, getAnalyzerProperties(getIndexAnalyzer()));
      }
      if (isExplicitQueryAnalyzer()) {
        String analyzerProperty = isExplicitAnalyzer() ? QUERY_ANALYZER : ANALYZER;
        namedPropertyValues.add(analyzerProperty, getAnalyzerProperties(getQueryAnalyzer()));
      }
    }
    if (this instanceof TextField) {
      if (((TextField) this).isExplicitMultiTermAnalyzer()) {
        namedPropertyValues.add(
            MULTI_TERM_ANALYZER, getAnalyzerProperties(((TextField) this).getMultiTermAnalyzer()));
      }
    }

    return namedPropertyValues;
  }
Esempio n. 28
0
  private void init() throws Exception {
    // The states of client that is invalid in this request
    Aliases aliases = null;
    String corename = "";
    String origCorename = null;
    // set a request timer which can be reused by requests if needed
    req.setAttribute(SolrRequestParsers.REQUEST_TIMER_SERVLET_ATTRIBUTE, new RTimerTree());
    // put the core container in request attribute
    req.setAttribute("org.apache.solr.CoreContainer", cores);
    path = req.getServletPath();
    if (req.getPathInfo() != null) {
      // this lets you handle /update/commit when /update is a servlet
      path += req.getPathInfo();
    }
    // check for management path
    String alternate = cores.getManagementPath();
    if (alternate != null && path.startsWith(alternate)) {
      path = path.substring(0, alternate.length());
    }
    // unused feature ?
    int idx = path.indexOf(':');
    if (idx > 0) {
      // save the portion after the ':' for a 'handler' path parameter
      path = path.substring(0, idx);
    }

    boolean usingAliases = false;

    // Check for container handlers
    handler = cores.getRequestHandler(path);
    if (handler != null) {
      solrReq = SolrRequestParsers.DEFAULT.parse(null, path, req);
      solrReq.getContext().put(CoreContainer.class.getName(), cores);
      requestType = RequestType.ADMIN;
      action = ADMIN;
      return;
    } else {
      // otherwise, we should find a core from the path
      idx = path.indexOf("/", 1);
      if (idx > 1) {
        // try to get the corename as a request parameter first
        corename = path.substring(1, idx);

        // look at aliases
        if (cores.isZooKeeperAware()) {
          origCorename = corename;
          ZkStateReader reader = cores.getZkController().getZkStateReader();
          aliases = reader.getAliases();
          if (aliases != null && aliases.collectionAliasSize() > 0) {
            usingAliases = true;
            String alias = aliases.getCollectionAlias(corename);
            if (alias != null) {
              collectionsList = StrUtils.splitSmart(alias, ",", true);
              corename = collectionsList.get(0);
            }
          }
        }

        core = cores.getCore(corename);
        if (core != null) {
          path = path.substring(idx);
        } else if (cores.isCoreLoading(
            corename)) { // extra mem barriers, so don't look at this before trying to get core
          throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, "SolrCore is loading");
        } else {
          // the core may have just finished loading
          core = cores.getCore(corename);
          if (core != null) {
            path = path.substring(idx);
          }
        }
      }
      if (core == null) {
        if (!cores.isZooKeeperAware()) {
          core = cores.getCore("");
        }
      }
    }

    if (core == null && cores.isZooKeeperAware()) {
      // we couldn't find the core - lets make sure a collection was not specified instead
      core = getCoreByCollection(corename);
      if (core != null) {
        // we found a core, update the path
        path = path.substring(idx);
        if (collectionsList == null) collectionsList = new ArrayList<>();
        collectionsList.add(corename);
      }

      // if we couldn't find it locally, look on other nodes
      extractRemotePath(corename, origCorename, idx);
      if (action != null) return;
    }

    // With a valid core...
    if (core != null) {
      MDCLoggingContext.setCore(core);
      config = core.getSolrConfig();
      // get or create/cache the parser for the core
      SolrRequestParsers parser = config.getRequestParsers();

      // Determine the handler from the url path if not set
      // (we might already have selected the cores handler)
      extractHandlerFromURLPath(parser);
      if (action != null) return;

      // With a valid handler and a valid core...
      if (handler != null) {
        // if not a /select, create the request
        if (solrReq == null) {
          solrReq = parser.parse(core, path, req);
        }

        if (usingAliases) {
          processAliases(aliases, collectionsList);
        }

        action = PROCESS;
        return; // we are done with a valid handler
      }
    }
    log.debug("no handler or core retrieved for " + path + ", follow through...");

    action = PASSTHROUGH;
  }
  private void testCollectionsAPI() throws Exception {

    // TODO: fragile - because we dont pass collection.confName, it will only
    // find a default if a conf set with a name matching the collection name is found, or
    // if there is only one conf set. That and the fact that other tests run first in this
    // env make this pretty fragile

    // create new collections rapid fire
    Map<String, List<Integer>> collectionInfos = new HashMap<String, List<Integer>>();
    int cnt = random().nextInt(TEST_NIGHTLY ? 6 : 3) + 1;

    for (int i = 0; i < cnt; i++) {
      int numShards = _TestUtil.nextInt(random(), 0, shardCount) + 1;
      int replicationFactor = _TestUtil.nextInt(random(), 0, 3) + 1;
      int maxShardsPerNode =
          (((numShards * replicationFactor)
                  / getCommonCloudSolrServer()
                      .getZkStateReader()
                      .getClusterState()
                      .getLiveNodes()
                      .size()))
              + 1;

      CloudSolrServer client = null;
      try {
        if (i == 0) {
          // Test if we can create a collection through CloudSolrServer where
          // you havnt set default-collection
          // This is nice because you want to be able to create you first
          // collection using CloudSolrServer, and in such case there is
          // nothing reasonable to set as default-collection
          client = createCloudClient(null);
        } else if (i == 1) {
          // Test if we can create a collection through CloudSolrServer where
          // you have set default-collection to a non-existing collection
          // This is nice because you want to be able to create you first
          // collection using CloudSolrServer, and in such case there is
          // nothing reasonable to set as default-collection, but you might want
          // to use the same CloudSolrServer throughout the entire
          // lifetime of your client-application, so it is nice to be able to
          // set a default-collection on this CloudSolrServer once and for all
          // and use this CloudSolrServer to create the collection
          client = createCloudClient("awholynewcollection_" + i);
        }
        if (secondConfigSet) {
          createCollection(
              collectionInfos,
              "awholynewcollection_" + i,
              numShards,
              replicationFactor,
              maxShardsPerNode,
              client,
              null,
              "conf2");
        } else {
          createCollection(
              collectionInfos,
              "awholynewcollection_" + i,
              numShards,
              replicationFactor,
              maxShardsPerNode,
              client,
              null);
        }
      } finally {
        if (client != null) client.shutdown();
      }
    }

    Set<Entry<String, List<Integer>>> collectionInfosEntrySet = collectionInfos.entrySet();
    for (Entry<String, List<Integer>> entry : collectionInfosEntrySet) {
      String collection = entry.getKey();
      List<Integer> list = entry.getValue();
      checkForCollection(collection, list, null);

      String url = getUrlFromZk(collection);

      HttpSolrServer collectionClient = new HttpSolrServer(url);

      // poll for a second - it can take a moment before we are ready to serve
      waitForNon403or404or503(collectionClient);
    }

    // sometimes we restart one of the jetty nodes
    if (random().nextBoolean()) {
      JettySolrRunner jetty = jettys.get(random().nextInt(jettys.size()));
      ChaosMonkey.stop(jetty);
      ChaosMonkey.start(jetty);

      for (Entry<String, List<Integer>> entry : collectionInfosEntrySet) {
        String collection = entry.getKey();
        List<Integer> list = entry.getValue();
        checkForCollection(collection, list, null);

        String url = getUrlFromZk(collection);

        HttpSolrServer collectionClient = new HttpSolrServer(url);

        // poll for a second - it can take a moment before we are ready to serve
        waitForNon403or404or503(collectionClient);
      }
    }

    // sometimes we restart zookeeper
    if (random().nextBoolean()) {
      zkServer.shutdown();
      zkServer = new ZkTestServer(zkServer.getZkDir(), zkServer.getPort());
      zkServer.run();
    }

    // sometimes we cause a connection loss - sometimes it will hit the overseer
    if (random().nextBoolean()) {
      JettySolrRunner jetty = jettys.get(random().nextInt(jettys.size()));
      ChaosMonkey.causeConnectionLoss(jetty);
    }

    ZkStateReader zkStateReader = getCommonCloudSolrServer().getZkStateReader();
    for (int j = 0; j < cnt; j++) {
      waitForRecoveriesToFinish("awholynewcollection_" + j, zkStateReader, false);

      if (secondConfigSet) {
        // let's see if they are using the second config set
        byte[] data =
            zkStateReader
                .getZkClient()
                .getData(
                    ZkStateReader.COLLECTIONS_ZKNODE + "/" + "awholynewcollection_" + j,
                    null,
                    null,
                    true);
        assertNotNull(data);
        ZkNodeProps props = ZkNodeProps.load(data);
        String configName = props.getStr(ZkController.CONFIGNAME_PROP);
        assertEquals("conf2", configName);
      }
    }

    checkInstanceDirs(jettys.get(0));

    List<String> collectionNameList = new ArrayList<String>();
    collectionNameList.addAll(collectionInfos.keySet());
    String collectionName = collectionNameList.get(random().nextInt(collectionNameList.size()));

    String url = getUrlFromZk(collectionName);

    HttpSolrServer collectionClient = new HttpSolrServer(url);

    // lets try and use the solrj client to index a couple documents
    SolrInputDocument doc1 = getDoc(id, 6, i1, -600, tlong, 600, t1, "humpty dumpy sat on a wall");
    SolrInputDocument doc2 =
        getDoc(id, 7, i1, -600, tlong, 600, t1, "humpty dumpy3 sat on a walls");
    SolrInputDocument doc3 =
        getDoc(id, 8, i1, -600, tlong, 600, t1, "humpty dumpy2 sat on a walled");

    collectionClient.add(doc1);

    collectionClient.add(doc2);

    collectionClient.add(doc3);

    collectionClient.commit();

    assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());

    // lets try a collection reload

    // get core open times
    Map<String, Long> urlToTimeBefore = new HashMap<String, Long>();
    collectStartTimes(collectionName, urlToTimeBefore);
    assertTrue(urlToTimeBefore.size() > 0);
    ModifiableSolrParams params = new ModifiableSolrParams();
    params.set("action", CollectionAction.RELOAD.toString());
    params.set("name", collectionName);
    QueryRequest request = new QueryRequest(params);
    request.setPath("/admin/collections");

    // we can use this client because we just want base url
    final String baseUrl = getBaseUrl((HttpSolrServer) clients.get(0));

    createNewSolrServer("", baseUrl).request(request);

    // reloads make take a short while
    boolean allTimesAreCorrect = waitForReloads(collectionName, urlToTimeBefore);
    assertTrue("some core start times did not change on reload", allTimesAreCorrect);

    waitForRecoveriesToFinish("awholynewcollection_" + (cnt - 1), zkStateReader, false);

    // remove a collection
    params = new ModifiableSolrParams();
    params.set("action", CollectionAction.DELETE.toString());
    params.set("name", collectionName);
    request = new QueryRequest(params);
    request.setPath("/admin/collections");

    createNewSolrServer("", baseUrl).request(request);

    // ensure its out of the state
    checkForMissingCollection(collectionName);

    // collectionNameList.remove(collectionName);

    // remove an unknown collection
    params = new ModifiableSolrParams();
    params.set("action", CollectionAction.DELETE.toString());
    params.set("name", "unknown_collection");
    request = new QueryRequest(params);
    request.setPath("/admin/collections");

    boolean exp = false;
    try {
      createNewSolrServer("", baseUrl).request(request);
    } catch (SolrException e) {
      exp = true;
    }
    assertTrue("Expected exception", exp);

    // create another collection should still work
    params = new ModifiableSolrParams();
    params.set("action", CollectionAction.CREATE.toString());

    params.set("numShards", 1);
    params.set(REPLICATION_FACTOR, 2);
    collectionName = "acollectionafterbaddelete";

    params.set("name", collectionName);
    if (secondConfigSet) {
      params.set("collection.configName", "conf1");
    }
    request = new QueryRequest(params);
    request.setPath("/admin/collections");
    createNewSolrServer("", baseUrl).request(request);

    List<Integer> list = new ArrayList<Integer>(2);
    list.add(1);
    list.add(2);
    checkForCollection(collectionName, list, null);

    url = getUrlFromZk(collectionName);

    collectionClient = new HttpSolrServer(url);

    // poll for a second - it can take a moment before we are ready to serve
    waitForNon403or404or503(collectionClient);

    for (int j = 0; j < cnt; j++) {
      waitForRecoveriesToFinish(collectionName, zkStateReader, false);
    }

    // test maxShardsPerNode
    int numLiveNodes =
        getCommonCloudSolrServer().getZkStateReader().getClusterState().getLiveNodes().size();
    int numShards = (numLiveNodes / 2) + 1;
    int replicationFactor = 2;
    int maxShardsPerNode = 1;
    collectionInfos = new HashMap<String, List<Integer>>();
    CloudSolrServer client = createCloudClient("awholynewcollection_" + cnt);
    try {
      exp = false;
      try {
        createCollection(
            collectionInfos,
            "awholynewcollection_" + cnt,
            numShards,
            replicationFactor,
            maxShardsPerNode,
            client,
            null,
            "conf1");
      } catch (SolrException e) {
        exp = true;
      }
      assertTrue("expected exception", exp);
    } finally {
      client.shutdown();
    }

    // Test createNodeSet
    numLiveNodes =
        getCommonCloudSolrServer().getZkStateReader().getClusterState().getLiveNodes().size();
    List<String> createNodeList = new ArrayList<String>();
    int numOfCreateNodes = numLiveNodes / 2;
    assertFalse(
        "createNodeSet test is pointless with only " + numLiveNodes + " nodes running",
        numOfCreateNodes == 0);
    int i = 0;
    for (String liveNode :
        getCommonCloudSolrServer().getZkStateReader().getClusterState().getLiveNodes()) {
      if (i < numOfCreateNodes) {
        createNodeList.add(liveNode);
        i++;
      } else {
        break;
      }
    }
    maxShardsPerNode = 2;
    numShards = createNodeList.size() * maxShardsPerNode;
    replicationFactor = 1;
    collectionInfos = new HashMap<String, List<Integer>>();
    client = createCloudClient("awholynewcollection_" + (cnt + 1));
    try {
      createCollection(
          collectionInfos,
          "awholynewcollection_" + (cnt + 1),
          numShards,
          replicationFactor,
          maxShardsPerNode,
          client,
          StrUtils.join(createNodeList, ','),
          "conf1");
    } finally {
      client.shutdown();
    }
    checkForCollection(
        collectionInfos.keySet().iterator().next(),
        collectionInfos.entrySet().iterator().next().getValue(),
        createNodeList);

    checkNoTwoShardsUseTheSameIndexDir();
  }
  @Override
  public void prepare(ResponseBuilder rb) throws IOException {
    SolrQueryRequest req = rb.req;
    SolrParams params = req.getParams();
    // A runtime param can skip
    if (!params.getBool(QueryElevationParams.ENABLE, true)) {
      return;
    }

    boolean exclusive = params.getBool(QueryElevationParams.EXCLUSIVE, false);
    // A runtime parameter can alter the config value for forceElevation
    boolean force = params.getBool(QueryElevationParams.FORCE_ELEVATION, forceElevation);
    boolean markExcludes = params.getBool(QueryElevationParams.MARK_EXCLUDES, false);
    String boostStr = params.get(QueryElevationParams.IDS);
    String exStr = params.get(QueryElevationParams.EXCLUDE);

    Query query = rb.getQuery();
    String qstr = rb.getQueryString();
    if (query == null || qstr == null) {
      return;
    }

    ElevationObj booster = null;
    try {
      if (boostStr != null || exStr != null) {
        List<String> boosts =
            (boostStr != null)
                ? StrUtils.splitSmart(boostStr, ",", true)
                : new ArrayList<String>(0);
        List<String> excludes =
            (exStr != null) ? StrUtils.splitSmart(exStr, ",", true) : new ArrayList<String>(0);
        booster = new ElevationObj(qstr, boosts, excludes);
      } else {
        IndexReader reader = req.getSearcher().getIndexReader();
        qstr = getAnalyzedQuery(qstr);
        booster = getElevationMap(reader, req.getCore()).get(qstr);
      }
    } catch (Exception ex) {
      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error loading elevation", ex);
    }

    if (booster != null) {
      rb.req.getContext().put(BOOSTED, booster.ids);

      // Change the query to insert forced documents
      if (exclusive == true) {
        // we only want these results
        rb.setQuery(booster.include);
      } else {
        BooleanQuery newq = new BooleanQuery(true);
        newq.add(query, BooleanClause.Occur.SHOULD);
        newq.add(booster.include, BooleanClause.Occur.SHOULD);
        if (booster.exclude != null) {
          if (markExcludes == false) {
            for (TermQuery tq : booster.exclude) {
              newq.add(new BooleanClause(tq, BooleanClause.Occur.MUST_NOT));
            }
          } else {
            // we are only going to mark items as excluded, not actually exclude them.  This works
            // with the EditorialMarkerFactory
            rb.req.getContext().put(EXCLUDED, booster.excludeIds);
          }
        }
        rb.setQuery(newq);
      }

      ElevationComparatorSource comparator = new ElevationComparatorSource(booster);
      // if the sort is 'score desc' use a custom sorting method to
      // insert documents in their proper place
      SortSpec sortSpec = rb.getSortSpec();
      if (sortSpec.getSort() == null) {
        sortSpec.setSortAndFields(
            new Sort(
                new SortField[] {
                  new SortField("_elevate_", comparator, true),
                  new SortField(null, SortField.Type.SCORE, false)
                }),
            Arrays.asList(new SchemaField[2]));
      } else {
        // Check if the sort is based on score
        SortSpec modSortSpec = this.modifySortSpec(sortSpec, force, comparator);
        if (null != modSortSpec) {
          rb.setSortSpec(modSortSpec);
        }
      }

      // alter the sorting in the grouping specification if there is one
      GroupingSpecification groupingSpec = rb.getGroupingSpec();
      if (groupingSpec != null) {
        SortField[] groupSort = groupingSpec.getGroupSort().getSort();
        Sort modGroupSort = this.modifySort(groupSort, force, comparator);
        if (modGroupSort != null) {
          groupingSpec.setGroupSort(modGroupSort);
        }
        SortField[] withinGroupSort = groupingSpec.getSortWithinGroup().getSort();
        Sort modWithinGroupSort = this.modifySort(withinGroupSort, force, comparator);
        if (modWithinGroupSort != null) {
          groupingSpec.setSortWithinGroup(modWithinGroupSort);
        }
      }
    }

    // Add debugging information
    if (rb.isDebug()) {
      List<String> match = null;
      if (booster != null) {
        // Extract the elevated terms into a list
        match = new ArrayList<String>(booster.priority.size());
        for (Object o : booster.include.clauses()) {
          TermQuery tq = (TermQuery) ((BooleanClause) o).getQuery();
          match.add(tq.getTerm().text());
        }
      }

      SimpleOrderedMap<Object> dbg = new SimpleOrderedMap<Object>();
      dbg.add("q", qstr);
      dbg.add("match", match);
      if (rb.isDebugQuery()) {
        rb.addDebugInfo("queryBoosting", dbg);
      }
    }
  }