Esempio n. 1
0
 private void processAliases(Aliases aliases, List<String> collectionsList) {
   String collection = solrReq.getParams().get(COLLECTION_PROP);
   if (collection != null) {
     collectionsList = StrUtils.splitSmart(collection, ",", true);
   }
   if (collectionsList != null) {
     Set<String> newCollectionsList = new HashSet<>(collectionsList.size());
     for (String col : collectionsList) {
       String al = aliases.getCollectionAlias(col);
       if (al != null) {
         List<String> aliasList = StrUtils.splitSmart(al, ",", true);
         newCollectionsList.addAll(aliasList);
       } else {
         newCollectionsList.add(col);
       }
     }
     if (newCollectionsList.size() > 0) {
       StringBuilder collectionString = new StringBuilder();
       Iterator<String> it = newCollectionsList.iterator();
       int sz = newCollectionsList.size();
       for (int i = 0; i < sz; i++) {
         collectionString.append(it.next());
         if (i < newCollectionsList.size() - 1) {
           collectionString.append(",");
         }
       }
       ModifiableSolrParams params = new ModifiableSolrParams(solrReq.getParams());
       params.set(COLLECTION_PROP, collectionString.toString());
       solrReq.setParams(params);
     }
   }
 }
Esempio n. 2
0
  public FacetRange parse(Object arg) throws SyntaxError {
    parseCommonParams(arg);

    if (!(arg instanceof Map)) {
      throw err("Missing range facet arguments");
    }

    Map<String, Object> m = (Map<String, Object>) arg;

    facet.field = getString(m, "field", null);

    facet.start = m.get("start");
    facet.end = m.get("end");
    facet.gap = m.get("gap");
    facet.hardend = getBoolean(m, "hardend", facet.hardend);
    facet.mincount = getLong(m, "mincount", 0);

    // TODO: refactor list-of-options code

    Object o = m.get("include");
    String[] includeList = null;
    if (o != null) {
      List lst = null;

      if (o instanceof List) {
        lst = (List) o;
      } else if (o instanceof String) {
        lst = StrUtils.splitSmart((String) o, ',');
      }

      includeList = (String[]) lst.toArray(new String[lst.size()]);
    }
    facet.include = FacetParams.FacetRangeInclude.parseParam(includeList);

    facet.others = EnumSet.noneOf(FacetParams.FacetRangeOther.class);

    o = m.get("other");
    if (o != null) {
      List<String> lst = null;

      if (o instanceof List) {
        lst = (List) o;
      } else if (o instanceof String) {
        lst = StrUtils.splitSmart((String) o, ',');
      }

      for (String otherStr : lst) {
        facet.others.add(FacetParams.FacetRangeOther.get(otherStr));
      }
    }

    Object facetObj = m.get("facet");
    parseSubs(facetObj);

    return facet;
  }
  public Query parse() throws ParseException {
    // handle legacy "query;sort" syntax
    if (getLocalParams() == null) {
      String qstr = getString();
      sortStr = getParams().get(CommonParams.SORT);
      if (sortStr == null) {
        // sort may be legacy form, included in the query string
        List<String> commands = StrUtils.splitSmart(qstr, ';');
        if (commands.size() == 2) {
          qstr = commands.get(0);
          sortStr = commands.get(1);
        } else if (commands.size() == 1) {
          // This is need to support the case where someone sends: "q=query;"
          qstr = commands.get(0);
        } else if (commands.size() > 2) {
          throw new SolrException(
              SolrException.ErrorCode.BAD_REQUEST,
              "If you want to use multiple ';' in the query, use the 'sort' param.");
        }
      }
      setString(qstr);
    }

    return super.parse();
  }
Esempio n. 4
0
  public void handleRequest(SolrQueryRequest req, SolrQueryResponse rsp) {
    numRequests++;

    Query query = null;
    Filter filter = null;

    List<String> commands = StrUtils.splitSmart(req.getQueryString(), ';');

    String qs = commands.size() >= 1 ? commands.get(0) : "";
    query = QueryParsing.parseQuery(qs, req.getSchema());

    // If the first non-query, non-filter command is a simple sort on an indexed field, then
    // we can use the Lucene sort ability.
    Sort sort = null;
    if (commands.size() >= 2) {
      sort = QueryParsing.parseSort(commands.get(1), req);
    }

    try {

      int numHits;
      ScoreDoc[] scoreDocs;
      if (sort != null) {
        TopFieldDocs hits =
            req.getSearcher().search(query, filter, req.getStart() + req.getLimit(), sort);
        scoreDocs = hits.scoreDocs;
        numHits = hits.totalHits;
      } else {
        TopDocs hits = req.getSearcher().search(query, filter, req.getStart() + req.getLimit());
        scoreDocs = hits.scoreDocs;
        numHits = hits.totalHits;
      }

      int startRow = Math.min(numHits, req.getStart());
      int endRow = Math.min(numHits, req.getStart() + req.getLimit());
      int numRows = endRow - startRow;

      int[] ids = new int[numRows];
      Document[] data = new Document[numRows];
      for (int i = startRow; i < endRow; i++) {
        ids[i] = scoreDocs[i].doc;
        data[i] = req.getSearcher().doc(ids[i]);
      }

      rsp.add(null, new DocSlice(0, numRows, ids, null, numHits, 0.0f));

      /**
       * ********************* rsp.setResults(new DocSlice(0,numRows,ids,null,numHits));
       *
       * <p>// Setting the actual document objects is optional rsp.setResults(data);
       * **********************
       */
    } catch (IOException e) {
      rsp.setException(e);
      numErrors++;
      return;
    }
  }
Esempio n. 5
0
  protected ParsedParams parseParams(String type, String param) throws SyntaxError, IOException {
    SolrParams localParams = QueryParsing.getLocalParams(param, req.getParams());
    DocSet docs = docsOrig;
    String facetValue = param;
    String key = param;
    List<String> tags = Collections.emptyList();
    int threads = -1;

    if (localParams == null) {
      SolrParams params = global;
      SolrParams required = new RequiredSolrParams(params);
      return new ParsedParams(localParams, params, required, facetValue, docs, key, tags, threads);
    }

    SolrParams params = SolrParams.wrapDefaults(localParams, global);
    SolrParams required = new RequiredSolrParams(params);

    // remove local params unless it's a query
    if (type != FacetParams.FACET_QUERY) { // TODO Cut over to an Enum here
      facetValue = localParams.get(CommonParams.VALUE);
    }

    // reset set the default key now that localParams have been removed
    key = facetValue;

    // allow explicit set of the key
    key = localParams.get(CommonParams.OUTPUT_KEY, key);

    String tagStr = localParams.get(CommonParams.TAG);
    tags = tagStr == null ? Collections.<String>emptyList() : StrUtils.splitSmart(tagStr, ',');

    String threadStr = localParams.get(CommonParams.THREADS);
    if (threadStr != null) {
      threads = Integer.parseInt(threadStr);
    }

    // figure out if we need a new base DocSet
    String excludeStr = localParams.get(CommonParams.EXCLUDE);
    if (excludeStr == null)
      return new ParsedParams(localParams, params, required, facetValue, docs, key, tags, threads);

    List<String> excludeTagList = StrUtils.splitSmart(excludeStr, ',');
    docs = computeDocSet(docs, excludeTagList);
    return new ParsedParams(localParams, params, required, facetValue, docs, key, tags, threads);
  }
 private Command(SolrQueryRequest req, SolrQueryResponse resp, String httpMethod) {
   this.req = req;
   this.resp = resp;
   this.method = httpMethod;
   path = (String) req.getContext().get("path");
   if (path == null) path = getDefaultPath();
   parts = StrUtils.splitSmart(path, '/');
   if (parts.get(0).isEmpty()) parts.remove(0);
 }
  public void processGetUpdates(ResponseBuilder rb) throws IOException {
    SolrQueryRequest req = rb.req;
    SolrQueryResponse rsp = rb.rsp;
    SolrParams params = req.getParams();

    if (!params.getBool(COMPONENT_NAME, true)) {
      return;
    }

    String versionsStr = params.get("getUpdates");
    if (versionsStr == null) return;

    UpdateLog ulog = req.getCore().getUpdateHandler().getUpdateLog();
    if (ulog == null) return;

    List<String> versions = StrUtils.splitSmart(versionsStr, ",", true);

    List<Object> updates = new ArrayList<Object>(versions.size());

    long minVersion = Long.MAX_VALUE;

    // TODO: get this from cache instead of rebuilding?
    UpdateLog.RecentUpdates recentUpdates = ulog.getRecentUpdates();
    try {
      for (String versionStr : versions) {
        long version = Long.parseLong(versionStr);
        try {
          Object o = recentUpdates.lookup(version);
          if (o == null) continue;

          if (version > 0) {
            minVersion = Math.min(minVersion, version);
          }

          // TODO: do any kind of validation here?
          updates.add(o);

        } catch (SolrException e) {
          log.warn("Exception reading log for updates", e);
        } catch (ClassCastException e) {
          log.warn("Exception reading log for updates", e);
        }
      }

      // Must return all delete-by-query commands that occur after the first add requested
      // since they may apply.
      updates.addAll(recentUpdates.getDeleteByQuery(minVersion));

      rb.rsp.add("updates", updates);

    } finally {
      recentUpdates.close(); // cache this somehow?
    }
  }
  public void processSync(ResponseBuilder rb, int nVersions, String sync) {
    List<String> replicas = StrUtils.splitSmart(sync, ",", true);

    boolean cantReachIsSuccess = rb.req.getParams().getBool("cantReachIsSuccess", false);

    PeerSync peerSync =
        new PeerSync(rb.req.getCore(), replicas, nVersions, cantReachIsSuccess, true);
    boolean success = peerSync.sync();

    // TODO: more complex response?
    rb.rsp.add("sync", success);
  }
  private Set<String> getCollectionList(ClusterState clusterState, String collection) {
    // Extract each comma separated collection name and store in a List.
    List<String> rawCollectionsList = StrUtils.splitSmart(collection, ",", true);
    Set<String> collectionsList = new HashSet<>();
    // validate collections
    for (String collectionName : rawCollectionsList) {
      if (!clusterState.getCollections().contains(collectionName)) {
        Aliases aliases = zkStateReader.getAliases();
        String alias = aliases.getCollectionAlias(collectionName);
        if (alias != null) {
          List<String> aliasList = StrUtils.splitSmart(alias, ",", true);
          collectionsList.addAll(aliasList);
          continue;
        }

        throw new SolrException(ErrorCode.BAD_REQUEST, "Collection not found: " + collectionName);
      }

      collectionsList.add(collectionName);
    }
    return collectionsList;
  }
Esempio n. 10
0
  public List<String> getStringList(Map<String, Object> args, String paramName) {
    Object o = args.get(paramName);
    if (o == null) {
      return null;
    }
    if (o instanceof List) {
      return (List<String>) o;
    }
    if (o instanceof String) {
      return StrUtils.splitSmart((String) o, ",", true);
    }

    throw err("Expected list of string or comma separated string values.");
  }
Esempio n. 11
0
  @Override
  public void prepare(ResponseBuilder rb) throws IOException {
    SolrParams params = rb.req.getParams();
    if (params.getBool(TermsParams.TERMS, false)) {
      rb.doTerms = true;
    }

    // TODO: temporary... this should go in a different component.
    String shards = params.get(ShardParams.SHARDS);
    if (shards != null) {
      if (params.get(ShardParams.SHARDS_QT) == null) {
        throw new SolrException(
            SolrException.ErrorCode.BAD_REQUEST, "No shards.qt parameter specified");
      }
      List<String> lst = StrUtils.splitSmart(shards, ",", true);
      rb.shards = lst.toArray(new String[lst.size()]);
    }
  }
    /**
     * Retrieve all requests recorded by this queue which were sent to given collection and shard
     *
     * @param zkStateReader the {@link org.apache.solr.common.cloud.ZkStateReader} from which
     *     cluster state is read
     * @param collectionName the given collection name for which requests have to be extracted
     * @param shardId the given shard name for which requests have to be extracted
     * @return a list of {@link
     *     org.apache.solr.handler.component.TrackingShardHandlerFactory.ShardRequestAndParams} or
     *     empty list if none are found
     */
    public List<ShardRequestAndParams> getShardRequests(
        ZkStateReader zkStateReader, String collectionName, String shardId) {
      DocCollection collection = zkStateReader.getClusterState().getCollection(collectionName);
      assert collection != null;
      Slice slice = collection.getSlice(shardId);
      assert slice != null;

      for (Map.Entry<String, List<ShardRequestAndParams>> entry : requests.entrySet()) {
        // multiple shard addresses may be present separated by '|'
        List<String> list = StrUtils.splitSmart(entry.getKey(), '|');
        for (Map.Entry<String, Replica> replica : slice.getReplicasMap().entrySet()) {
          String coreUrl = new ZkCoreNodeProps(replica.getValue()).getCoreUrl();
          if (list.contains(coreUrl)) {
            return new ArrayList<>(entry.getValue());
          }
        }
      }
      return Collections.emptyList();
    }
Esempio n. 13
0
  /** Actually run the query */
  @Override
  public void process(ResponseBuilder rb) throws IOException {
    SolrQueryRequest req = rb.req;
    SolrQueryResponse rsp = rb.rsp;
    SolrParams params = req.getParams();
    if (!params.getBool(COMPONENT_NAME, true)) {
      return;
    }
    SolrIndexSearcher searcher = req.getSearcher();

    if (rb.getQueryCommand().getOffset() < 0) {
      throw new SolrException(
          SolrException.ErrorCode.BAD_REQUEST, "'start' parameter cannot be negative");
    }

    // -1 as flag if not set.
    long timeAllowed = (long) params.getInt(CommonParams.TIME_ALLOWED, -1);

    // Optional: This could also be implemented by the top-level searcher sending
    // a filter that lists the ids... that would be transparent to
    // the request handler, but would be more expensive (and would preserve score
    // too if desired).
    String ids = params.get(ShardParams.IDS);
    if (ids != null) {
      SchemaField idField = req.getSchema().getUniqueKeyField();
      List<String> idArr = StrUtils.splitSmart(ids, ",", true);
      int[] luceneIds = new int[idArr.size()];
      int docs = 0;
      for (int i = 0; i < idArr.size(); i++) {
        int id =
            req.getSearcher()
                .getFirstMatch(
                    new Term(idField.getName(), idField.getType().toInternal(idArr.get(i))));
        if (id >= 0) luceneIds[docs++] = id;
      }

      DocListAndSet res = new DocListAndSet();
      res.docList = new DocSlice(0, docs, luceneIds, null, docs, 0);
      if (rb.isNeedDocSet()) {
        List<Query> queries = new ArrayList<Query>();
        queries.add(rb.getQuery());
        List<Query> filters = rb.getFilters();
        if (filters != null) queries.addAll(filters);
        res.docSet = searcher.getDocSet(queries);
      }
      rb.setResults(res);
      rsp.add("response", rb.getResults().docList);
      return;
    }

    SolrIndexSearcher.QueryCommand cmd = rb.getQueryCommand();
    cmd.setTimeAllowed(timeAllowed);
    SolrIndexSearcher.QueryResult result = new SolrIndexSearcher.QueryResult();

    //
    // grouping / field collapsing
    //
    boolean doGroup = params.getBool(GroupParams.GROUP, false);
    if (doGroup) {
      try {
        cmd.groupCommands = new ArrayList<Grouping.Command>();

        String[] fields = params.getParams(GroupParams.GROUP_FIELD);
        String[] funcs = params.getParams(GroupParams.GROUP_FUNC);
        String[] queries = params.getParams(GroupParams.GROUP_QUERY);
        String groupSortStr = params.get(GroupParams.GROUP_SORT);
        Sort groupSort =
            groupSortStr != null ? QueryParsing.parseSort(groupSortStr, req.getSchema()) : null;

        int limitDefault = cmd.getLen(); // this is normally from "rows"
        int docsPerGroupDefault = params.getInt(GroupParams.GROUP_LIMIT, 1);

        // temporary: implement all group-by-field as group-by-func
        if (funcs == null) {
          funcs = fields;
        } else if (fields != null) {
          // catenate functions and fields
          String[] both = new String[fields.length + funcs.length];
          System.arraycopy(fields, 0, both, 0, fields.length);
          System.arraycopy(funcs, 0, both, fields.length, funcs.length);
          funcs = both;
        }

        if (funcs != null) {
          for (String groupByStr : funcs) {
            QParser parser = QParser.getParser(groupByStr, "func", rb.req);
            Query q = parser.getQuery();
            Grouping.CommandFunc gc = new Grouping.CommandFunc();
            gc.groupSort = groupSort;

            if (q instanceof FunctionQuery) {
              gc.groupBy = ((FunctionQuery) q).getValueSource();
            } else {
              gc.groupBy = new QueryValueSource(q, 0.0f);
            }
            gc.key = groupByStr;
            gc.groupLimit = limitDefault;
            gc.docsPerGroup = docsPerGroupDefault;

            cmd.groupCommands.add(gc);
          }
        }

        if (cmd.groupCommands.size() == 0) cmd.groupCommands = null;

        if (cmd.groupCommands != null) {
          if (rb.doHighlights || rb.isDebug()) {
            // we need a single list of the returned docs
            cmd.setFlags(SolrIndexSearcher.GET_DOCLIST);
          }

          searcher.search(result, cmd);
          rb.setResult(result);
          rsp.add("grouped", result.groupedResults);
          // TODO: get "hits" a different way to log
          return;
        }
      } catch (ParseException e) {
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
      }
    }

    // normal search result
    searcher.search(result, cmd);
    rb.setResult(result);

    rsp.add("response", rb.getResults().docList);
    rsp.getToLog().add("hits", rb.getResults().docList.matches());

    doFieldSortValues(rb, searcher);
    doPrefetch(rb);
  }
Esempio n. 14
0
  public static ArrayList<Node> getNodesForNewShard(
      ClusterState clusterState,
      String collectionName,
      int numSlices,
      int maxShardsPerNode,
      int repFactor,
      String createNodeSetStr) {
    List<String> createNodeList =
        createNodeSetStr == null ? null : StrUtils.splitSmart(createNodeSetStr, ",", true);

    Set<String> nodes = clusterState.getLiveNodes();

    List<String> nodeList = new ArrayList<String>(nodes.size());
    nodeList.addAll(nodes);
    if (createNodeList != null) nodeList.retainAll(createNodeList);

    HashMap<String, Node> nodeNameVsShardCount = new HashMap<String, Node>();
    for (String s : nodeList) nodeNameVsShardCount.put(s, new Node(s));
    for (String s : clusterState.getCollections()) {
      DocCollection c = clusterState.getCollection(s);
      // identify suitable nodes  by checking the no:of cores in each of them
      for (Slice slice : c.getSlices()) {
        Collection<Replica> replicas = slice.getReplicas();
        for (Replica replica : replicas) {
          Node count = nodeNameVsShardCount.get(replica.getNodeName());
          if (count != null) {
            count.totalNodes++;
            if (s.equals(collectionName)) {
              count.thisCollectionNodes++;
              if (count.thisCollectionNodes >= maxShardsPerNode)
                nodeNameVsShardCount.remove(replica.getNodeName());
            }
          }
        }
      }
    }

    if (nodeNameVsShardCount.size() <= 0) {
      throw new SolrException(
          SolrException.ErrorCode.BAD_REQUEST,
          "Cannot create collection "
              + collectionName
              + ". No live Solr-instances"
              + ((createNodeList != null)
                  ? " among Solr-instances specified in " + CREATE_NODE_SET + ":" + createNodeSetStr
                  : ""));
    }

    if (repFactor > nodeNameVsShardCount.size()) {
      log.warn(
          "Specified "
              + REPLICATION_FACTOR
              + " of "
              + repFactor
              + " on collection "
              + collectionName
              + " is higher than or equal to the number of Solr instances currently live or part of your "
              + CREATE_NODE_SET
              + "("
              + nodeList.size()
              + "). Its unusual to run two replica of the same slice on the same Solr-instance.");
    }

    int maxCoresAllowedToCreate = maxShardsPerNode * nodeList.size();
    int requestedCoresToCreate = numSlices * repFactor;
    int minCoresToCreate = requestedCoresToCreate;
    if (maxCoresAllowedToCreate < minCoresToCreate) {
      throw new SolrException(
          SolrException.ErrorCode.BAD_REQUEST,
          "Cannot create shards "
              + collectionName
              + ". Value of "
              + MAX_SHARDS_PER_NODE
              + " is "
              + maxShardsPerNode
              + ", and the number of live nodes is "
              + nodeList.size()
              + ". This allows a maximum of "
              + maxCoresAllowedToCreate
              + " to be created. Value of "
              + NUM_SLICES
              + " is "
              + numSlices
              + " and value of "
              + REPLICATION_FACTOR
              + " is "
              + repFactor
              + ". This requires "
              + requestedCoresToCreate
              + " shards to be created (higher than the allowed number)");
    }

    ArrayList<Node> sortedNodeList = new ArrayList<>(nodeNameVsShardCount.values());
    Collections.sort(
        sortedNodeList,
        new Comparator<Node>() {
          @Override
          public int compare(Node x, Node y) {
            return (x.weight() < y.weight()) ? -1 : ((x.weight() == y.weight()) ? 0 : 1);
          }
        });
    return sortedNodeList;
  }
Esempio n. 15
0
  private void init() throws Exception {
    // The states of client that is invalid in this request
    Aliases aliases = null;
    String corename = "";
    String origCorename = null;
    // set a request timer which can be reused by requests if needed
    req.setAttribute(SolrRequestParsers.REQUEST_TIMER_SERVLET_ATTRIBUTE, new RTimerTree());
    // put the core container in request attribute
    req.setAttribute("org.apache.solr.CoreContainer", cores);
    path = req.getServletPath();
    if (req.getPathInfo() != null) {
      // this lets you handle /update/commit when /update is a servlet
      path += req.getPathInfo();
    }
    // check for management path
    String alternate = cores.getManagementPath();
    if (alternate != null && path.startsWith(alternate)) {
      path = path.substring(0, alternate.length());
    }
    // unused feature ?
    int idx = path.indexOf(':');
    if (idx > 0) {
      // save the portion after the ':' for a 'handler' path parameter
      path = path.substring(0, idx);
    }

    boolean usingAliases = false;

    // Check for container handlers
    handler = cores.getRequestHandler(path);
    if (handler != null) {
      solrReq = SolrRequestParsers.DEFAULT.parse(null, path, req);
      solrReq.getContext().put(CoreContainer.class.getName(), cores);
      requestType = RequestType.ADMIN;
      action = ADMIN;
      return;
    } else {
      // otherwise, we should find a core from the path
      idx = path.indexOf("/", 1);
      if (idx > 1) {
        // try to get the corename as a request parameter first
        corename = path.substring(1, idx);

        // look at aliases
        if (cores.isZooKeeperAware()) {
          origCorename = corename;
          ZkStateReader reader = cores.getZkController().getZkStateReader();
          aliases = reader.getAliases();
          if (aliases != null && aliases.collectionAliasSize() > 0) {
            usingAliases = true;
            String alias = aliases.getCollectionAlias(corename);
            if (alias != null) {
              collectionsList = StrUtils.splitSmart(alias, ",", true);
              corename = collectionsList.get(0);
            }
          }
        }

        core = cores.getCore(corename);
        if (core != null) {
          path = path.substring(idx);
        } else if (cores.isCoreLoading(
            corename)) { // extra mem barriers, so don't look at this before trying to get core
          throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, "SolrCore is loading");
        } else {
          // the core may have just finished loading
          core = cores.getCore(corename);
          if (core != null) {
            path = path.substring(idx);
          }
        }
      }
      if (core == null) {
        if (!cores.isZooKeeperAware()) {
          core = cores.getCore("");
        }
      }
    }

    if (core == null && cores.isZooKeeperAware()) {
      // we couldn't find the core - lets make sure a collection was not specified instead
      core = getCoreByCollection(corename);
      if (core != null) {
        // we found a core, update the path
        path = path.substring(idx);
        if (collectionsList == null) collectionsList = new ArrayList<>();
        collectionsList.add(corename);
      }

      // if we couldn't find it locally, look on other nodes
      extractRemotePath(corename, origCorename, idx);
      if (action != null) return;
    }

    // With a valid core...
    if (core != null) {
      MDCLoggingContext.setCore(core);
      config = core.getSolrConfig();
      // get or create/cache the parser for the core
      SolrRequestParsers parser = config.getRequestParsers();

      // Determine the handler from the url path if not set
      // (we might already have selected the cores handler)
      extractHandlerFromURLPath(parser);
      if (action != null) return;

      // With a valid handler and a valid core...
      if (handler != null) {
        // if not a /select, create the request
        if (solrReq == null) {
          solrReq = parser.parse(core, path, req);
        }

        if (usingAliases) {
          processAliases(aliases, collectionsList);
        }

        action = PROCESS;
        return; // we are done with a valid handler
      }
    }
    log.debug("no handler or core retrieved for " + path + ", follow through...");

    action = PASSTHROUGH;
  }
Esempio n. 16
0
  @Override
  public void process(ResponseBuilder rb) throws IOException {
    SolrQueryRequest req = rb.req;
    SolrQueryResponse rsp = rb.rsp;
    SolrParams params = req.getParams();

    if (!params.getBool(COMPONENT_NAME, true)) {
      return;
    }

    String val = params.get("getVersions");
    if (val != null) {
      processGetVersions(rb);
      return;
    }

    val = params.get("getUpdates");
    if (val != null) {
      processGetUpdates(rb);
      return;
    }

    String id[] = params.getParams("id");
    String ids[] = params.getParams("ids");

    if (id == null && ids == null) {
      return;
    }

    String[] allIds = id == null ? new String[0] : id;

    if (ids != null) {
      List<String> lst = new ArrayList<String>();
      for (String s : allIds) {
        lst.add(s);
      }
      for (String idList : ids) {
        lst.addAll(StrUtils.splitSmart(idList, ",", true));
      }
      allIds = lst.toArray(new String[lst.size()]);
    }

    SolrCore core = req.getCore();
    SchemaField idField = core.getLatestSchema().getUniqueKeyField();
    FieldType fieldType = idField.getType();

    SolrDocumentList docList = new SolrDocumentList();
    UpdateLog ulog = core.getUpdateHandler().getUpdateLog();

    RefCounted<SolrIndexSearcher> searcherHolder = null;

    DocTransformer transformer = rsp.getReturnFields().getTransformer();
    if (transformer != null) {
      TransformContext context = new TransformContext();
      context.req = req;
      transformer.setContext(context);
    }
    try {
      SolrIndexSearcher searcher = null;

      BytesRef idBytes = new BytesRef();
      for (String idStr : allIds) {
        fieldType.readableToIndexed(idStr, idBytes);
        if (ulog != null) {
          Object o = ulog.lookup(idBytes);
          if (o != null) {
            // should currently be a List<Oper,Ver,Doc/Id>
            List entry = (List) o;
            assert entry.size() >= 3;
            int oper = (Integer) entry.get(0) & UpdateLog.OPERATION_MASK;
            switch (oper) {
              case UpdateLog.ADD:
                SolrDocument doc =
                    toSolrDoc(
                        (SolrInputDocument) entry.get(entry.size() - 1), core.getLatestSchema());
                if (transformer != null) {
                  transformer.transform(doc, -1); // unknown docID
                }
                docList.add(doc);
                break;
              case UpdateLog.DELETE:
                break;
              default:
                throw new SolrException(
                    SolrException.ErrorCode.SERVER_ERROR, "Unknown Operation! " + oper);
            }
            continue;
          }
        }

        // didn't find it in the update log, so it should be in the newest searcher opened
        if (searcher == null) {
          searcherHolder = core.getRealtimeSearcher();
          searcher = searcherHolder.get();
        }

        // SolrCore.verbose("RealTimeGet using searcher ", searcher);

        int docid = searcher.getFirstMatch(new Term(idField.getName(), idBytes));
        if (docid < 0) continue;
        StoredDocument luceneDocument = searcher.doc(docid);
        SolrDocument doc = toSolrDoc(luceneDocument, core.getLatestSchema());
        if (transformer != null) {
          transformer.transform(doc, docid);
        }
        docList.add(doc);
      }

    } finally {
      if (searcherHolder != null) {
        searcherHolder.decref();
      }
    }

    // if the client specified a single id=foo, then use "doc":{
    // otherwise use a standard doclist

    if (ids == null && allIds.length <= 1) {
      // if the doc was not found, then use a value of null.
      rsp.add("doc", docList.size() > 0 ? docList.get(0) : null);
    } else {
      docList.setNumFound(docList.size());
      rsp.add("response", docList);
    }
  }
  /** create the FieldAdders that control how each field is indexed */
  void prepareFields() {
    // Possible future optimization: for really rapid incremental indexing
    // from a POST, one could cache all of this setup info based on the params.
    // The link from FieldAdder to this would need to be severed for that to happen.

    adders = new CSVLoaderBase.FieldAdder[fieldnames.length];
    String skipStr = params.get(SKIP);
    List<String> skipFields = skipStr == null ? null : StrUtils.splitSmart(skipStr, ',');

    CSVLoaderBase.FieldAdder adder = new CSVLoaderBase.FieldAdder();
    CSVLoaderBase.FieldAdder adderKeepEmpty = new CSVLoaderBase.FieldAdderEmpty();

    for (int i = 0; i < fieldnames.length; i++) {
      String fname = fieldnames[i];
      // to skip a field, leave the entries in fields and addrs null
      if (fname.length() == 0 || (skipFields != null && skipFields.contains(fname))) continue;

      boolean keepEmpty = params.getFieldBool(fname, EMPTY, false);
      adders[i] = keepEmpty ? adderKeepEmpty : adder;

      // Order that operations are applied: split -> trim -> map -> add
      // so create in reverse order.
      // Creation of FieldAdders could be optimized and shared among fields

      String[] fmap = params.getFieldParams(fname, MAP);
      if (fmap != null) {
        for (String mapRule : fmap) {
          String[] mapArgs = colonSplit.split(mapRule, -1);
          if (mapArgs.length != 2)
            throw new SolrException(
                SolrException.ErrorCode.BAD_REQUEST,
                "Map rules must be of the form 'from:to' ,got '" + mapRule + "'");
          adders[i] = new CSVLoaderBase.FieldMapperSingle(mapArgs[0], mapArgs[1], adders[i]);
        }
      }

      if (params.getFieldBool(fname, TRIM, false)) {
        adders[i] = new CSVLoaderBase.FieldTrimmer(adders[i]);
      }

      if (params.getFieldBool(fname, SPLIT, false)) {
        String sepStr = params.getFieldParam(fname, SEPARATOR);
        char fsep = sepStr == null || sepStr.length() == 0 ? ',' : sepStr.charAt(0);
        String encStr = params.getFieldParam(fname, ENCAPSULATOR);
        char fenc = encStr == null || encStr.length() == 0 ? (char) -2 : encStr.charAt(0);
        String escStr = params.getFieldParam(fname, ESCAPE);
        char fesc =
            escStr == null || escStr.length() == 0 ? CSVStrategy.ESCAPE_DISABLED : escStr.charAt(0);

        CSVStrategy fstrat =
            new CSVStrategy(
                fsep, fenc, CSVStrategy.COMMENTS_DISABLED, fesc, false, false, false, false);
        adders[i] = new CSVLoaderBase.FieldSplitter(fstrat, adders[i]);
      }
    }
    // look for any literal fields - literal.foo=xyzzy
    Iterator<String> paramNames = params.getParameterNamesIterator();
    while (paramNames.hasNext()) {
      String pname = paramNames.next();
      if (!pname.startsWith(LITERALS_PREFIX)) continue;

      String name = pname.substring(LITERALS_PREFIX.length());
      literals.put(name, params.get(pname));
    }
  }
  @Override
  public void prepare(ResponseBuilder rb) throws IOException {
    SolrQueryRequest req = rb.req;
    SolrParams params = req.getParams();
    // A runtime param can skip
    if (!params.getBool(QueryElevationParams.ENABLE, true)) {
      return;
    }

    boolean exclusive = params.getBool(QueryElevationParams.EXCLUSIVE, false);
    // A runtime parameter can alter the config value for forceElevation
    boolean force = params.getBool(QueryElevationParams.FORCE_ELEVATION, forceElevation);
    boolean markExcludes = params.getBool(QueryElevationParams.MARK_EXCLUDES, false);
    String boostStr = params.get(QueryElevationParams.IDS);
    String exStr = params.get(QueryElevationParams.EXCLUDE);

    Query query = rb.getQuery();
    String qstr = rb.getQueryString();
    if (query == null || qstr == null) {
      return;
    }

    ElevationObj booster = null;
    try {
      if (boostStr != null || exStr != null) {
        List<String> boosts =
            (boostStr != null)
                ? StrUtils.splitSmart(boostStr, ",", true)
                : new ArrayList<String>(0);
        List<String> excludes =
            (exStr != null) ? StrUtils.splitSmart(exStr, ",", true) : new ArrayList<String>(0);
        booster = new ElevationObj(qstr, boosts, excludes);
      } else {
        IndexReader reader = req.getSearcher().getIndexReader();
        qstr = getAnalyzedQuery(qstr);
        booster = getElevationMap(reader, req.getCore()).get(qstr);
      }
    } catch (Exception ex) {
      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error loading elevation", ex);
    }

    if (booster != null) {
      rb.req.getContext().put(BOOSTED, booster.ids);

      // Change the query to insert forced documents
      if (exclusive == true) {
        // we only want these results
        rb.setQuery(booster.include);
      } else {
        BooleanQuery newq = new BooleanQuery(true);
        newq.add(query, BooleanClause.Occur.SHOULD);
        newq.add(booster.include, BooleanClause.Occur.SHOULD);
        if (booster.exclude != null) {
          if (markExcludes == false) {
            for (TermQuery tq : booster.exclude) {
              newq.add(new BooleanClause(tq, BooleanClause.Occur.MUST_NOT));
            }
          } else {
            // we are only going to mark items as excluded, not actually exclude them.  This works
            // with the EditorialMarkerFactory
            rb.req.getContext().put(EXCLUDED, booster.excludeIds);
          }
        }
        rb.setQuery(newq);
      }

      ElevationComparatorSource comparator = new ElevationComparatorSource(booster);
      // if the sort is 'score desc' use a custom sorting method to
      // insert documents in their proper place
      SortSpec sortSpec = rb.getSortSpec();
      if (sortSpec.getSort() == null) {
        sortSpec.setSortAndFields(
            new Sort(
                new SortField[] {
                  new SortField("_elevate_", comparator, true),
                  new SortField(null, SortField.Type.SCORE, false)
                }),
            Arrays.asList(new SchemaField[2]));
      } else {
        // Check if the sort is based on score
        SortSpec modSortSpec = this.modifySortSpec(sortSpec, force, comparator);
        if (null != modSortSpec) {
          rb.setSortSpec(modSortSpec);
        }
      }

      // alter the sorting in the grouping specification if there is one
      GroupingSpecification groupingSpec = rb.getGroupingSpec();
      if (groupingSpec != null) {
        SortField[] groupSort = groupingSpec.getGroupSort().getSort();
        Sort modGroupSort = this.modifySort(groupSort, force, comparator);
        if (modGroupSort != null) {
          groupingSpec.setGroupSort(modGroupSort);
        }
        SortField[] withinGroupSort = groupingSpec.getSortWithinGroup().getSort();
        Sort modWithinGroupSort = this.modifySort(withinGroupSort, force, comparator);
        if (modWithinGroupSort != null) {
          groupingSpec.setSortWithinGroup(modWithinGroupSort);
        }
      }
    }

    // Add debugging information
    if (rb.isDebug()) {
      List<String> match = null;
      if (booster != null) {
        // Extract the elevated terms into a list
        match = new ArrayList<String>(booster.priority.size());
        for (Object o : booster.include.clauses()) {
          TermQuery tq = (TermQuery) ((BooleanClause) o).getQuery();
          match.add(tq.getTerm().text());
        }
      }

      SimpleOrderedMap<Object> dbg = new SimpleOrderedMap<Object>();
      dbg.add("q", qstr);
      dbg.add("match", match);
      if (rb.isDebugQuery()) {
        rb.addDebugInfo("queryBoosting", dbg);
      }
    }
  }
Esempio n. 19
0
  public void handleRequest(RequestGetter requestGetter) {
    MDCLoggingContext.reset();
    MDCLoggingContext.setNode(cores);

    String path = requestGetter.getPath();
    solrParams = requestGetter.getSolrParams();
    SolrRequestHandler handler = null;
    String corename = "";
    String origCorename = null;
    try {
      // set a request timer which can be reused by requests if needed
      // req.setAttribute(SolrRequestParsers.REQUEST_TIMER_SERVLET_ATTRIBUTE, new RTimer());
      // put the core container in request attribute
      // req.setAttribute("org.apache.solr.CoreContainer", cores);
      // check for management path
      String alternate = cores.getManagementPath();
      if (alternate != null && path.startsWith(alternate)) {
        path = path.substring(0, alternate.length());
      }
      // unused feature ?
      int idx = path.indexOf(':');
      if (idx > 0) {
        // save the portion after the ':' for a 'handler' path parameter
        path = path.substring(0, idx);
      }

      boolean usingAliases = false;
      List<String> collectionsList = null;

      // Check for container handlers
      handler = cores.getRequestHandler(path);
      if (handler != null) {
        solrReq = parseSolrQueryRequest(SolrRequestParsers.DEFAULT, requestGetter);
        handleAdminRequest(handler, solrReq);
        return;
      } else {
        // otherwise, we should find a core from the path
        idx = path.indexOf("/", 1);
        if (idx > 1) {
          // try to get the corename as a request parameter first
          corename = path.substring(1, idx);

          // look at aliases
          if (cores.isZooKeeperAware()) {
            origCorename = corename;
            ZkStateReader reader = cores.getZkController().getZkStateReader();
            aliases = reader.getAliases();
            if (aliases != null && aliases.collectionAliasSize() > 0) {
              usingAliases = true;
              String alias = aliases.getCollectionAlias(corename);
              if (alias != null) {
                collectionsList = StrUtils.splitSmart(alias, ",", true);
                corename = collectionsList.get(0);
              }
            }
          }

          core = cores.getCore(corename);

          if (core != null) {
            path = path.substring(idx);
          }
        }

        // add collection name
        if (core == null && StringUtils.isNotBlank(requestGetter.getCollection())) {
          corename = requestGetter.getCollection();
          core = cores.getCore(corename);
        }

        if (core == null) {
          if (!cores.isZooKeeperAware()) {
            core = cores.getCore("");
          }
        }
      }

      if (core == null && cores.isZooKeeperAware()) {
        // we couldn't find the core - lets make sure a collection was not specified instead
        core = getCoreByCollection(cores, corename);

        if (core != null) {
          // we found a core, update the path
          path = path.substring(idx);
        }

        // try the default core
        if (core == null) {
          core = cores.getCore("");
          if (core != null) {}
        }
      }

      // With a valid core...
      if (core != null) {
        MDCLoggingContext.setCore(core);
        final SolrConfig config = core.getSolrConfig();
        // get or create/cache the parser for the core
        SolrRequestParsers parser = config.getRequestParsers();

        // Determine the handler from the url path if not set
        // (we might already have selected the cores handler)
        if (handler == null && path.length() > 1) { // don't match "" or "/" as valid path
          handler = core.getRequestHandler(path);

          if (handler == null) {
            // may be a restlet path
            // Handle /schema/* paths via Restlet
            if (path.equals("/schema") || path.startsWith("/schema/")) {
              throw new SolrException(
                  SolrException.ErrorCode.BAD_REQUEST, "unsupport /schema/**, use http solr");
            }
          }
          // no handler yet but allowed to handle select; let's check
          if (handler == null && parser.isHandleSelect()) {
            if ("/select".equals(path) || "/select/".equals(path)) {
              solrReq = parseSolrQueryRequest(parser, requestGetter);

              invalidStates =
                  checkStateIsValid(cores, solrReq.getParams().get(CloudSolrClient.STATE_VERSION));
              String qt = solrReq.getParams().get(CommonParams.QT);
              handler = core.getRequestHandler(qt);
              if (handler == null) {
                throw new SolrException(
                    SolrException.ErrorCode.BAD_REQUEST, "unknown handler: " + qt);
              }
              if (qt != null
                  && qt.startsWith("/")
                  && (handler instanceof ContentStreamHandlerBase)) {
                // For security reasons it's a bad idea to allow a leading '/', ex:
                // /select?qt=/update see SOLR-3161
                // There was no restriction from Solr 1.4 thru 3.5 and it's not supported for update
                // handlers.
                throw new SolrException(
                    SolrException.ErrorCode.BAD_REQUEST,
                    "Invalid Request Handler ('qt').  Do not use /select to access: " + qt);
              }
            }
          }
        }

        // With a valid handler and a valid core...
        if (handler != null) {
          // if not a /select, create the request
          if (solrReq == null) {
            solrReq = parseSolrQueryRequest(parser, requestGetter);
          }

          if (usingAliases) {
            processAliases(solrReq, aliases, collectionsList);
          }

          SolrQueryResponse solrRsp = new SolrQueryResponse();
          SolrRequestInfo.setRequestInfo(new SolrRequestInfo(solrReq, solrRsp));
          this.execute(handler, solrReq, solrRsp);
          QueryResponseWriter responseWriter = core.getQueryResponseWriter(solrReq);
          if (invalidStates != null)
            solrReq.getContext().put(CloudSolrClient.STATE_VERSION, invalidStates);
          writeResponse(solrRsp, responseWriter, solrReq);

          return; // we are done with a valid handler
        }
      }
      logger.debug("no handler or core retrieved for {}, follow through...", path);
      throw new SolrException(
          SolrException.ErrorCode.BAD_REQUEST, "no handler or core retrieved for " + path);
    } catch (Throwable ex) {
      sendError(core, solrReq, ex);
      // walk the the entire cause chain to search for an Error
      Throwable t = ex;
      while (t != null) {
        if (t instanceof Error) {
          if (t != ex) {
            logger.error(
                "An Error was wrapped in another exception - please report complete stacktrace on SOLR-6161",
                ex);
          }
          throw (Error) t;
        }
        t = t.getCause();
      }
      return;
    } finally {
      try {
        if (solrReq != null) {
          logger.debug("Closing out SolrRequest: {}", solrReq);
          solrReq.close();
        }
      } finally {
        try {
          if (core != null) {
            core.close();
          }
        } finally {
          SolrRequestInfo.clearRequestInfo();
        }
      }
      MDCLoggingContext.clear();
    }
  }
Esempio n. 20
0
  @Override
  public void prepare(ResponseBuilder rb) throws IOException {

    SolrQueryRequest req = rb.req;
    SolrParams params = req.getParams();
    if (!params.getBool(COMPONENT_NAME, true)) {
      return;
    }
    SolrQueryResponse rsp = rb.rsp;

    // Set field flags
    String fl = params.get(CommonParams.FL);
    int fieldFlags = 0;
    if (fl != null) {
      fieldFlags |= SolrPluginUtils.setReturnFields(fl, rsp);
    }
    rb.setFieldFlags(fieldFlags);

    String defType = params.get(QueryParsing.DEFTYPE, QParserPlugin.DEFAULT_QTYPE);

    // get it from the response builder to give a different component a chance
    // to set it.
    String queryString = rb.getQueryString();
    if (queryString == null) {
      // this is the normal way it's set.
      queryString = params.get(CommonParams.Q);
      rb.setQueryString(queryString);
    }

    try {
      QParser parser = QParser.getParser(rb.getQueryString(), defType, req);
      Query q = parser.getQuery();
      if (q == null) {
        // normalize a null query to a query that matches nothing
        q = new BooleanQuery();
      }
      rb.setQuery(q);
      rb.setSortSpec(parser.getSort(true));
      rb.setQparser(parser);

      String[] fqs = req.getParams().getParams(CommonParams.FQ);
      if (fqs != null && fqs.length != 0) {
        List<Query> filters = rb.getFilters();
        if (filters == null) {
          filters = new ArrayList<Query>();
          rb.setFilters(filters);
        }
        for (String fq : fqs) {
          if (fq != null && fq.trim().length() != 0) {
            QParser fqp = QParser.getParser(fq, null, req);
            filters.add(fqp.getQuery());
          }
        }
      }
    } catch (ParseException e) {
      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
    }

    // TODO: temporary... this should go in a different component.
    String shards = params.get(ShardParams.SHARDS);
    if (shards != null) {
      List<String> lst = StrUtils.splitSmart(shards, ",", true);
      rb.shards = lst.toArray(new String[lst.size()]);
    }
    String shards_rows = params.get(ShardParams.SHARDS_ROWS);
    if (shards_rows != null) {
      rb.shards_rows = Integer.parseInt(shards_rows);
    }
    String shards_start = params.get(ShardParams.SHARDS_START);
    if (shards_start != null) {
      rb.shards_start = Integer.parseInt(shards_start);
    }
  }
Esempio n. 21
0
  public int createSubRequests(ResponseBuilder rb) throws IOException {
    SolrParams params = rb.req.getParams();
    String id1[] = params.getParams("id");
    String ids[] = params.getParams("ids");

    if (id1 == null && ids == null) {
      return ResponseBuilder.STAGE_DONE;
    }

    List<String> allIds = new ArrayList<String>();
    if (id1 != null) {
      for (String s : id1) {
        allIds.add(s);
      }
    }
    if (ids != null) {
      for (String s : ids) {
        allIds.addAll(StrUtils.splitSmart(s, ",", true));
      }
    }

    // TODO: handle collection=...?

    ZkController zkController =
        rb.req.getCore().getCoreDescriptor().getCoreContainer().getZkController();

    // if shards=... then use that
    if (zkController != null && params.get("shards") == null) {
      CloudDescriptor cloudDescriptor = rb.req.getCore().getCoreDescriptor().getCloudDescriptor();

      String collection = cloudDescriptor.getCollectionName();
      ClusterState clusterState = zkController.getClusterState();
      DocCollection coll = clusterState.getCollection(collection);

      Map<String, List<String>> sliceToId = new HashMap<String, List<String>>();
      for (String id : allIds) {
        Slice slice = coll.getRouter().getTargetSlice(id, null, params, coll);

        List<String> idsForShard = sliceToId.get(slice.getName());
        if (idsForShard == null) {
          idsForShard = new ArrayList<String>(2);
          sliceToId.put(slice.getName(), idsForShard);
        }
        idsForShard.add(id);
      }

      for (Map.Entry<String, List<String>> entry : sliceToId.entrySet()) {
        String shard = entry.getKey();
        String shardIdList = StrUtils.join(entry.getValue(), ',');

        ShardRequest sreq = new ShardRequest();

        sreq.purpose = 1;
        // sreq.shards = new String[]{shard};    // TODO: would be nice if this would work...
        sreq.shards = sliceToShards(rb, collection, shard);
        sreq.actualShards = sreq.shards;
        sreq.params = new ModifiableSolrParams();
        sreq.params.set(
            ShardParams.SHARDS_QT,
            "/get"); // TODO: how to avoid hardcoding this and hit the same handler?
        sreq.params.set("distrib", false);
        sreq.params.set("ids", shardIdList);

        rb.addRequest(this, sreq);
      }
    } else {
      String shardIdList = StrUtils.join(allIds, ',');
      ShardRequest sreq = new ShardRequest();

      sreq.purpose = 1;
      sreq.shards = null; // ALL
      sreq.actualShards = sreq.shards;
      sreq.params = new ModifiableSolrParams();
      sreq.params.set(
          ShardParams.SHARDS_QT,
          "/get"); // TODO: how to avoid hardcoding this and hit the same handler?
      sreq.params.set("distrib", false);
      sreq.params.set("ids", shardIdList);

      rb.addRequest(this, sreq);
    }

    return ResponseBuilder.STAGE_DONE;
  }