/**
  * get the solr document list from a query response This differs from getResponseByParams in such
  * a way that it does only create the fields of the response but never search snippets and there
  * are also no facets generated.
  *
  * @param params
  * @return
  * @throws IOException
  * @throws SolrException
  */
 @Override
 public SolrDocumentList getDocumentListByParams(ModifiableSolrParams params)
     throws IOException, SolrException {
   SolrQueryRequest req = this.request(params);
   SolrQueryResponse response = null;
   String q = params.get(CommonParams.Q);
   String fq = params.get(CommonParams.FQ);
   String sort = params.get(CommonParams.SORT);
   String threadname = Thread.currentThread().getName();
   try {
     if (q != null)
       Thread.currentThread()
           .setName(
               "solr query: q = "
                   + q
                   + (fq == null ? "" : ", fq = " + fq)
                   + (sort == null ? "" : ", sort = " + sort)); // for debugging in Threaddump
     response = this.query(req);
     if (q != null) Thread.currentThread().setName(threadname);
     if (response == null) throw new IOException("response == null");
     return SolrQueryResponse2SolrDocumentList(req, response);
   } finally {
     req.close();
     SolrRequestInfo.clearRequestInfo();
   }
 }
  public void processGetVersions(ResponseBuilder rb) throws IOException {
    SolrQueryRequest req = rb.req;
    SolrQueryResponse rsp = rb.rsp;
    SolrParams params = req.getParams();

    if (!params.getBool(COMPONENT_NAME, true)) {
      return;
    }

    int nVersions = params.getInt("getVersions", -1);
    if (nVersions == -1) return;

    String sync = params.get("sync");
    if (sync != null) {
      processSync(rb, nVersions, sync);
      return;
    }

    UpdateLog ulog = req.getCore().getUpdateHandler().getUpdateLog();
    if (ulog == null) return;

    UpdateLog.RecentUpdates recentUpdates = ulog.getRecentUpdates();
    try {
      rb.rsp.add("versions", recentUpdates.getVersions(nVersions));
    } finally {
      recentUpdates.close(); // cache this somehow?
    }
  }
    private void handleGET() {
      if (parts.size() == 1) {
        resp.add("solrConfig", req.getCore().getSolrConfig().toMap());
      } else {
        if (ConfigOverlay.NAME.equals(parts.get(1))) {
          resp.add(ConfigOverlay.NAME, req.getCore().getSolrConfig().getOverlay().toMap());
        } else if (RequestParams.NAME.equals(parts.get(1))) {
          if (parts.size() == 3) {
            RequestParams params = req.getCore().getSolrConfig().getRequestParams();
            MapSolrParams p = params.getParams(parts.get(2));
            Map m = new LinkedHashMap<>();
            m.put(ConfigOverlay.ZNODEVER, params.getZnodeVersion());
            if (p != null) {
              m.put(RequestParams.NAME, ZkNodeProps.makeMap(parts.get(2), p.getMap()));
            }
            resp.add(SolrQueryResponse.NAME, m);
          } else {
            resp.add(
                SolrQueryResponse.NAME, req.getCore().getSolrConfig().getRequestParams().toMap());
          }

        } else {
          Map<String, Object> m = req.getCore().getSolrConfig().toMap();
          resp.add("solrConfig", ZkNodeProps.makeMap(parts.get(1), m.get(parts.get(1))));
        }
      }
    }
  int doListGen(int iter, Query q, List<Query> filt, boolean cacheQuery, boolean cacheFilt)
      throws Exception {
    SolrQueryRequest req = lrf.makeRequest();

    SolrIndexSearcher searcher = req.getSearcher();

    long start = System.currentTimeMillis();

    // These aren't public in SolrIndexSearcher
    int NO_CHECK_QCACHE = 0x80000000;
    int GET_DOCSET = 0x40000000;
    int NO_CHECK_FILTERCACHE = 0x20000000;
    int GET_SCORES = 0x01;

    int ret = 0;
    for (int i = 0; i < iter; i++) {
      DocList l =
          searcher.getDocList(
              q,
              filt,
              (Sort) null,
              0,
              10,
              (cacheQuery ? 0 : NO_CHECK_QCACHE) | (cacheFilt ? 0 : NO_CHECK_FILTERCACHE));
      ret += l.matches();
    }

    long end = System.currentTimeMillis();
    System.out.println(
        "ret=" + ret + " time=" + (end - start) + " throughput=" + iter * 1000 / (end - start + 1));

    req.close();
    assertTrue(ret > 0); // make sure we did some work
    return ret;
  }
  /** test range query performance */
  public void XtestFilteringPerformance() throws Exception {
    int indexSize = 19999;
    float fractionCovered = .1f;

    String l = t(0);
    String u = t((int) (indexSize * 10 * fractionCovered));

    SolrQueryRequest req = lrf.makeRequest();

    QParser parser = QParser.getParser("foomany_s:[" + l + " TO " + u + "]", null, req);
    Query rangeQ = parser.parse();
    List<Query> filters = new ArrayList<Query>();
    filters.add(rangeQ);
    req.close();

    parser =
        QParser.getParser(
            "{!dismax qf=t10_100_ws pf=t10_100_ws ps=20}" + t(0) + ' ' + t(1) + ' ' + t(2),
            null,
            req);
    Query q = parser.parse();

    // SolrIndexSearcher searcher = req.getSearcher();
    // DocSet range = searcher.getDocSet(rangeQ, null);

    createIndex2(indexSize, "foomany_s", "t10_100_ws");

    // doListGen(100, q, filters, false, true);
    doListGen(500, q, filters, false, true);

    req.close();
  }
  @Test
  public void testCommitWithin() throws Exception {
    ExtractingRequestHandler handler =
        (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract");
    assertTrue("handler is null and it shouldn't be", handler != null);

    SolrQueryRequest req =
        req(
            "literal.id",
            "one",
            ExtractingParams.RESOURCE_NAME,
            "extraction/version_control.txt",
            "commitWithin",
            "200");
    SolrQueryResponse rsp = new SolrQueryResponse();
    BufferingRequestProcessor p = new BufferingRequestProcessor(null);

    ExtractingDocumentLoader loader = (ExtractingDocumentLoader) handler.newLoader(req, p);
    loader.load(
        req, rsp, new ContentStreamBase.FileStream(getFile("extraction/version_control.txt")), p);

    AddUpdateCommand add = p.addCommands.get(0);
    assertEquals(200, add.commitWithin);

    req.close();
  }
  public LanguageIdentifierUpdateProcessor(
      SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) {
    super(next);
    schema = req.getSchema();

    initParams(req.getParams());
  }
 protected void execute(SolrQueryResponse rsp) {
   // a custom filter could add more stuff to the request before passing it on.
   // for example: sreq.getContext().put( "HttpServletRequest", req );
   // used for logging query stats in SolrCore.execute()
   solrReq.getContext().put("webapp", req.getContextPath());
   solrReq.getCore().execute(handler, solrReq, rsp);
 }
  @Test
  public void testCollateWithFilter() throws Exception {
    SolrCore core = h.getCore();
    SearchComponent speller = core.getSearchComponent("spellcheck");
    assertTrue("speller is null and it shouldn't be", speller != null);

    ModifiableSolrParams params = new ModifiableSolrParams();
    params.add(SpellCheckComponent.COMPONENT_NAME, "true");
    params.add(SpellCheckComponent.SPELLCHECK_BUILD, "true");
    params.add(SpellCheckComponent.SPELLCHECK_COUNT, "10");
    params.add(SpellCheckComponent.SPELLCHECK_COLLATE, "true");
    params.add(SpellCheckComponent.SPELLCHECK_MAX_COLLATION_TRIES, "5");
    params.add(SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, "2");
    params.add(CommonParams.Q, "lowerfilt:(+fauth +home +loane)");
    params.add(CommonParams.FQ, "NOT(id:1)");

    // Because a FilterQuery is applied which removes doc id#1 from possible hits, we would
    // not want the collations to return us "lowerfilt:(+faith +hope +loaves)" as this only matches
    // doc id#1.
    SolrRequestHandler handler = core.getRequestHandler("spellCheckCompRH");
    SolrQueryResponse rsp = new SolrQueryResponse();
    rsp.add("responseHeader", new SimpleOrderedMap());
    SolrQueryRequest req = new LocalSolrQueryRequest(core, params);
    handler.handleRequest(req, rsp);
    req.close();
    NamedList values = rsp.getValues();
    NamedList spellCheck = (NamedList) values.get("spellcheck");
    NamedList suggestions = (NamedList) spellCheck.get("suggestions");
    List<String> collations = suggestions.getAll("collation");
    assertTrue(collations.size() == 1);
    assertTrue(collations.get(0).equals("lowerfilt:(+faith +hope +love)"));
  }
 private void processAliases(Aliases aliases, List<String> collectionsList) {
   String collection = solrReq.getParams().get(COLLECTION_PROP);
   if (collection != null) {
     collectionsList = StrUtils.splitSmart(collection, ",", true);
   }
   if (collectionsList != null) {
     Set<String> newCollectionsList = new HashSet<>(collectionsList.size());
     for (String col : collectionsList) {
       String al = aliases.getCollectionAlias(col);
       if (al != null) {
         List<String> aliasList = StrUtils.splitSmart(al, ",", true);
         newCollectionsList.addAll(aliasList);
       } else {
         newCollectionsList.add(col);
       }
     }
     if (newCollectionsList.size() > 0) {
       StringBuilder collectionString = new StringBuilder();
       Iterator<String> it = newCollectionsList.iterator();
       int sz = newCollectionsList.size();
       for (int i = 0; i < sz; i++) {
         collectionString.append(it.next());
         if (i < newCollectionsList.size() - 1) {
           collectionString.append(",");
         }
       }
       ModifiableSolrParams params = new ModifiableSolrParams(solrReq.getParams());
       params.set(COLLECTION_PROP, collectionString.toString());
       solrReq.setParams(params);
     }
   }
 }
  public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
    SolrParams params = req.getParams();
    params = adjustParams(params);
    req.setParams(params);
    TupleStream tupleStream = null;

    try {
      tupleStream = this.streamFactory.constructStream(params.get("expr"));
    } catch (Exception e) {
      // Catch exceptions that occur while the stream is being created. This will include streaming
      // expression parse rules.
      SolrException.log(logger, e);
      rsp.add("result-set", new DummyErrorStream(e));

      return;
    }

    int worker = params.getInt("workerID", 0);
    int numWorkers = params.getInt("numWorkers", 1);
    StreamContext context = new StreamContext();
    context.workerID = worker;
    context.numWorkers = numWorkers;
    context.setSolrClientCache(clientCache);
    tupleStream.setStreamContext(context);
    rsp.add("result-set", new TimerStream(new ExceptionStream(tupleStream)));
  }
 private void doHighlightingByFastVectorHighlighter(
     FastVectorHighlighter highlighter,
     FieldQuery fieldQuery,
     SolrQueryRequest req,
     NamedList docSummaries,
     int docId,
     Document doc,
     String fieldName)
     throws IOException {
   SolrParams params = req.getParams();
   SolrFragmentsBuilder solrFb = getSolrFragmentsBuilder(fieldName, params);
   String[] snippets =
       highlighter.getBestFragments(
           fieldQuery,
           req.getSearcher().getIndexReader(),
           docId,
           fieldName,
           params.getFieldInt(fieldName, HighlightParams.FRAGSIZE, 100),
           params.getFieldInt(fieldName, HighlightParams.SNIPPETS, 1),
           getFragListBuilder(fieldName, params),
           getFragmentsBuilder(fieldName, params),
           solrFb.getPreTags(params, fieldName),
           solrFb.getPostTags(params, fieldName),
           getEncoder(fieldName, params));
   if (snippets != null && snippets.length > 0) docSummaries.add(fieldName, snippets);
   else alternateField(docSummaries, params, doc, fieldName);
 }
 static void setWt(SolrQueryRequest req, String wt) {
   SolrParams params = req.getParams();
   if (params.get(CommonParams.WT) != null) return; // wt is set by user
   Map<String, String> map = new HashMap<>(1);
   map.put(CommonParams.WT, wt);
   map.put("indent", "true");
   req.setParams(SolrParams.wrapDefaults(params, new MapSolrParams(map)));
 }
Exemple #14
0
  // make sure that log isn't needlessly replayed after a clean close
  @Test
  public void testCleanShutdown() throws Exception {
    DirectUpdateHandler2.commitOnClose = true;
    final Semaphore logReplay = new Semaphore(0);
    final Semaphore logReplayFinish = new Semaphore(0);

    UpdateLog.testing_logReplayHook =
        new Runnable() {
          @Override
          public void run() {
            try {
              assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS));
            } catch (Exception e) {
              throw new RuntimeException(e);
            }
          }
        };

    UpdateLog.testing_logReplayFinishHook =
        new Runnable() {
          @Override
          public void run() {
            logReplayFinish.release();
          }
        };

    SolrQueryRequest req = req();
    UpdateHandler uhandler = req.getCore().getUpdateHandler();
    UpdateLog ulog = uhandler.getUpdateLog();

    try {
      clearIndex();
      assertU(commit());

      assertU(adoc("id", "E1", "val_i", "1"));
      assertU(adoc("id", "E2", "val_i", "1"));

      // set to a high enough number so this test won't hang on a bug
      logReplay.release(10);

      h.close();
      createCore();

      // make sure the docs got committed
      assertJQ(req("q", "*:*"), "/response/numFound==2");

      // make sure no replay happened
      assertEquals(10, logReplay.availablePermits());

    } finally {
      DirectUpdateHandler2.commitOnClose = true;
      UpdateLog.testing_logReplayHook = null;
      UpdateLog.testing_logReplayFinishHook = null;

      req().close();
    }
  }
  @Override
  public void processAdd(AddUpdateCommand cmd) throws IOException {
    // TODO: check for id field?
    int hash = 0;
    if (zkEnabled) {
      zkCheck();
      hash = hash(cmd);
      nodes = setupRequest(hash);
    } else {
      isLeader = getNonZkLeaderAssumption(req);
    }

    boolean dropCmd = false;
    if (!forwardToLeader) {
      dropCmd = versionAdd(cmd);
    }

    if (dropCmd) {
      // TODO: do we need to add anything to the response?
      return;
    }

    ModifiableSolrParams params = null;
    if (nodes != null) {

      params = new ModifiableSolrParams(filterParams(req.getParams()));
      params.set(
          DISTRIB_UPDATE_PARAM,
          (isLeader ? DistribPhase.FROMLEADER.toString() : DistribPhase.TOLEADER.toString()));
      if (isLeader) {
        params.set(
            "distrib.from",
            ZkCoreNodeProps.getCoreUrl(zkController.getBaseUrl(), req.getCore().getName()));
      }

      params.set(
          "distrib.from",
          ZkCoreNodeProps.getCoreUrl(zkController.getBaseUrl(), req.getCore().getName()));
      cmdDistrib.distribAdd(cmd, nodes, params);
    }

    // TODO: what to do when no idField?
    if (returnVersions && rsp != null && idField != null) {
      if (addsResponse == null) {
        addsResponse = new NamedList<String>();
        rsp.add("adds", addsResponse);
      }
      if (scratch == null) scratch = new CharsRef();
      idField.getType().indexedToReadable(cmd.getIndexedId(), scratch);
      addsResponse.add(scratch.toString(), cmd.getVersion());
    }

    // TODO: keep track of errors?  needs to be done at a higher level though since
    // an id may fail before it gets to this processor.
    // Given that, it may also make sense to move the version reporting out of this
    // processor too.
  }
  @Override
  public void processDelete(DeleteUpdateCommand cmd) throws IOException {
    if (!cmd.isDeleteById()) {
      doDeleteByQuery(cmd);
      return;
    }

    int hash = 0;
    if (zkEnabled) {
      zkCheck();
      hash = hash(cmd);
      nodes = setupRequest(hash);
    } else {
      isLeader = getNonZkLeaderAssumption(req);
    }

    boolean dropCmd = false;
    if (!forwardToLeader) {
      dropCmd = versionDelete(cmd);
    }

    if (dropCmd) {
      // TODO: do we need to add anything to the response?
      return;
    }

    ModifiableSolrParams params = null;
    if (nodes != null) {

      params = new ModifiableSolrParams(filterParams(req.getParams()));
      params.set(
          DISTRIB_UPDATE_PARAM,
          (isLeader ? DistribPhase.FROMLEADER.toString() : DistribPhase.TOLEADER.toString()));
      if (isLeader) {
        params.set(
            "distrib.from",
            ZkCoreNodeProps.getCoreUrl(zkController.getBaseUrl(), req.getCore().getName()));
      }
      cmdDistrib.distribDelete(cmd, nodes, params);
    }

    // cmd.getIndexId == null when delete by query
    // TODO: what to do when no idField?
    if (returnVersions && rsp != null && cmd.getIndexedId() != null && idField != null) {
      if (deleteResponse == null) {
        deleteResponse = new NamedList<String>();
        rsp.add("deletes", deleteResponse);
      }
      if (scratch == null) scratch = new CharsRef();
      idField.getType().indexedToReadable(cmd.getIndexedId(), scratch);
      deleteResponse.add(
          scratch.toString(),
          cmd
              .getVersion()); // we're returning the version of the delete.. not the version of the
                              // doc we deleted.
    }
  }
 /**
  * Return a {@link org.apache.lucene.search.highlight.Scorer} suitable for this Query and field.
  *
  * @param query The current query
  * @param fieldName The name of the field
  * @param request The SolrQueryRequest
  */
 private Scorer getQueryScorer(Query query, String fieldName, SolrQueryRequest request) {
   boolean reqFieldMatch =
       request.getParams().getFieldBool(fieldName, HighlightParams.FIELD_MATCH, false);
   if (reqFieldMatch) {
     return new QueryTermScorer(query, request.getSearcher().getIndexReader(), fieldName);
   } else {
     return new QueryTermScorer(query);
   }
 }
  @Test
  public void testInterface() throws Exception {
    try {
      init("schema12.xml");
      SolrCore core = h.getCore();

      NamedList<String> args = new NamedList<>();
      args.add(QueryElevationComponent.FIELD_TYPE, "string");
      args.add(QueryElevationComponent.CONFIG_FILE, "elevate.xml");

      QueryElevationComponent comp = new QueryElevationComponent();
      comp.init(args);
      comp.inform(core);

      SolrQueryRequest req = req();
      IndexReader reader = req.getSearcher().getIndexReader();
      Map<String, ElevationObj> map = comp.getElevationMap(reader, core);
      req.close();

      // Make sure the boosts loaded properly
      assertEquals(7, map.size());
      assertEquals(1, map.get("XXXX").priority.size());
      assertEquals(2, map.get("YYYY").priority.size());
      assertEquals(3, map.get("ZZZZ").priority.size());
      assertEquals(null, map.get("xxxx"));
      assertEquals(null, map.get("yyyy"));
      assertEquals(null, map.get("zzzz"));

      // Now test the same thing with a lowercase filter: 'lowerfilt'
      args = new NamedList<>();
      args.add(QueryElevationComponent.FIELD_TYPE, "lowerfilt");
      args.add(QueryElevationComponent.CONFIG_FILE, "elevate.xml");

      comp = new QueryElevationComponent();
      comp.init(args);
      comp.inform(core);
      map = comp.getElevationMap(reader, core);
      assertEquals(7, map.size());
      assertEquals(null, map.get("XXXX"));
      assertEquals(null, map.get("YYYY"));
      assertEquals(null, map.get("ZZZZ"));
      assertEquals(1, map.get("xxxx").priority.size());
      assertEquals(2, map.get("yyyy").priority.size());
      assertEquals(3, map.get("zzzz").priority.size());

      assertEquals("xxxx", comp.getAnalyzedQuery("XXXX"));
      assertEquals("xxxxyyyy", comp.getAnalyzedQuery("XXXX YYYY"));

      assertQ(
          "Make sure QEC handles null queries",
          req("qt", "/elevate", "q.alt", "*:*", "defType", "dismax"),
          "//*[@numFound='0']");
    } finally {
      delete();
    }
  }
  // Skip encoding for updating the index
  void createIndex2(int nDocs, String... fields) throws IOException {
    Set<String> fieldSet = new HashSet<String>(Arrays.asList(fields));

    SolrQueryRequest req = lrf.makeRequest();
    SolrQueryResponse rsp = new SolrQueryResponse();
    UpdateRequestProcessorChain processorChain = req.getCore().getUpdateProcessingChain(null);
    UpdateRequestProcessor processor = processorChain.createProcessor(req, rsp);

    boolean foomany_s = fieldSet.contains("foomany_s");
    boolean foo1_s = fieldSet.contains("foo1_s");
    boolean foo2_s = fieldSet.contains("foo2_s");
    boolean foo4_s = fieldSet.contains("foo4_s");
    boolean foo8_s = fieldSet.contains("foo8_s");
    boolean t10_100_ws = fieldSet.contains("t10_100_ws");

    for (int i = 0; i < nDocs; i++) {
      SolrInputDocument doc = new SolrInputDocument();
      doc.addField("id", Float.toString(i));
      if (foomany_s) {
        doc.addField("foomany_s", t(r.nextInt(nDocs * 10)));
      }
      if (foo1_s) {
        doc.addField("foo1_s", t(0));
      }
      if (foo2_s) {
        doc.addField("foo2_s", r.nextInt(2));
      }
      if (foo4_s) {
        doc.addField("foo4_s", r.nextInt(4));
      }
      if (foo8_s) {
        doc.addField("foo8_s", r.nextInt(8));
      }
      if (t10_100_ws) {
        StringBuilder sb = new StringBuilder(9 * 100);
        for (int j = 0; j < 100; j++) {
          sb.append(' ');
          sb.append(t(r.nextInt(10)));
        }
        doc.addField("t10_100_ws", sb.toString());
      }

      AddUpdateCommand cmd = new AddUpdateCommand();
      cmd.solrDoc = doc;
      processor.processAdd(cmd);
    }
    processor.finish();
    req.close();

    assertU(commit());

    req = lrf.makeRequest();
    assertEquals(nDocs, req.getSearcher().maxDoc());
    req.close();
  }
  public void processGetUpdates(ResponseBuilder rb) throws IOException {
    SolrQueryRequest req = rb.req;
    SolrQueryResponse rsp = rb.rsp;
    SolrParams params = req.getParams();

    if (!params.getBool(COMPONENT_NAME, true)) {
      return;
    }

    String versionsStr = params.get("getUpdates");
    if (versionsStr == null) return;

    UpdateLog ulog = req.getCore().getUpdateHandler().getUpdateLog();
    if (ulog == null) return;

    List<String> versions = StrUtils.splitSmart(versionsStr, ",", true);

    List<Object> updates = new ArrayList<Object>(versions.size());

    long minVersion = Long.MAX_VALUE;

    // TODO: get this from cache instead of rebuilding?
    UpdateLog.RecentUpdates recentUpdates = ulog.getRecentUpdates();
    try {
      for (String versionStr : versions) {
        long version = Long.parseLong(versionStr);
        try {
          Object o = recentUpdates.lookup(version);
          if (o == null) continue;

          if (version > 0) {
            minVersion = Math.min(minVersion, version);
          }

          // TODO: do any kind of validation here?
          updates.add(o);

        } catch (SolrException e) {
          log.warn("Exception reading log for updates", e);
        } catch (ClassCastException e) {
          log.warn("Exception reading log for updates", e);
        }
      }

      // Must return all delete-by-query commands that occur after the first add requested
      // since they may apply.
      updates.addAll(recentUpdates.getDeleteByQuery(minVersion));

      rb.rsp.add("updates", updates);

    } finally {
      recentUpdates.close(); // cache this somehow?
    }
  }
Exemple #21
0
  public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
    SolrParams params = req.getParams();
    params = adjustParams(params);
    req.setParams(params);

    if (params.get("action") != null) {
      handleAdmin(req, rsp, params);
      return;
    }

    TupleStream tupleStream;

    try {
      tupleStream = this.streamFactory.constructStream(params.get("expr"));
    } catch (Exception e) {
      // Catch exceptions that occur while the stream is being created. This will include streaming
      // expression parse rules.
      SolrException.log(logger, e);
      rsp.add("result-set", new DummyErrorStream(e));

      return;
    }

    int worker = params.getInt("workerID", 0);
    int numWorkers = params.getInt("numWorkers", 1);
    StreamContext context = new StreamContext();
    context.workerID = worker;
    context.numWorkers = numWorkers;
    context.setSolrClientCache(clientCache);
    context.setModelCache(modelCache);
    context.put("core", this.coreName);
    context.put("solr-core", req.getCore());
    tupleStream.setStreamContext(context);

    // if asking for explanation then go get it
    if (params.getBool("explain", false)) {
      rsp.add("explanation", tupleStream.toExplanation(this.streamFactory));
    }

    if (tupleStream instanceof DaemonStream) {
      DaemonStream daemonStream = (DaemonStream) tupleStream;
      if (daemons.containsKey(daemonStream.getId())) {
        daemons.remove(daemonStream.getId()).close();
      }
      daemonStream.setDaemons(daemons);
      daemonStream.open(); // This will start the deamonStream
      daemons.put(daemonStream.getId(), daemonStream);
      rsp.add(
          "result-set",
          new DaemonResponseStream("Deamon:" + daemonStream.getId() + " started on " + coreName));
    } else {
      rsp.add("result-set", new TimerStream(new ExceptionStream(tupleStream)));
    }
  }
 protected void doPrefetch(ResponseBuilder rb) throws IOException {
   SolrQueryRequest req = rb.req;
   SolrQueryResponse rsp = rb.rsp;
   // pre-fetch returned documents
   if (!req.getParams().getBool(ShardParams.IS_SHARD, false)
       && rb.getResults().docList != null
       && rb.getResults().docList.size() <= 50) {
     // TODO: this may depend on the highlighter component (or other components?)
     SolrPluginUtils.optimizePreFetchDocs(rb.getResults().docList, rb.getQuery(), req, rsp);
   }
 }
 @Override
 public DocTransformer create(String field, SolrParams params, SolrQueryRequest req) {
   String v = req.getParams().get(ShardParams.SHARD_URL);
   if (v == null) {
     if (req.getParams().getBool(ShardParams.IS_SHARD, false)) {
       v = "[unknown]";
     } else {
       v = "[not a shard request]";
     }
   }
   return new ValueAugmenter(field, v);
 }
  /**
   * For example:
   *
   * <p>String json = solr.request( "/select?qt=dismax&wt=json&q=...", null ); String xml =
   * solr.request( "/update", "&lt;add><doc><field ..." );
   */
  public String request(String pathAndParams, String body) throws Exception {
    String path = null;
    SolrParams params = null;
    int idx = pathAndParams.indexOf('?');
    if (idx > 0) {
      path = pathAndParams.substring(0, idx);
      params = SolrRequestParsers.parseQueryString(pathAndParams.substring(idx + 1));
    } else {
      path = pathAndParams;
      params = new MapSolrParams(new HashMap<String, String>());
    }

    // Extract the handler from the path or params
    SolrRequestHandler handler = core.getRequestHandler(path);
    if (handler == null) {
      if ("/select".equals(path) || "/select/".equalsIgnoreCase(path)) {
        String qt = params.get(CommonParams.QT);
        handler = core.getRequestHandler(qt);
        if (handler == null) {
          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "unknown handler: " + qt);
        }
      }
    }
    if (handler == null) {
      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "unknown handler: " + path);
    }

    // Make a stream for the 'body' content
    List<ContentStream> streams = new ArrayList<ContentStream>(1);
    if (body != null && body.length() > 0) {
      streams.add(new ContentStreamBase.StringStream(body));
    }

    SolrQueryRequest req = null;
    try {
      req = parser.buildRequestFrom(core, params, streams);
      SolrQueryResponse rsp = new SolrQueryResponse();
      core.execute(handler, req, rsp);
      if (rsp.getException() != null) {
        throw rsp.getException();
      }

      // Now write it out
      QueryResponseWriter responseWriter = core.getQueryResponseWriter(req);
      StringWriter out = new StringWriter();
      responseWriter.write(out, req, rsp);
      return out.toString();
    } finally {
      if (req != null) {
        req.close();
      }
    }
  }
    @Override
    public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
      FileFloatSource.resetCache();
      log.debug("readerCache has been reset.");

      UpdateRequestProcessor processor =
          req.getCore().getUpdateProcessingChain(null).createProcessor(req, rsp);
      try {
        RequestHandlerUtils.handleCommit(processor, req.getParams(), true);
      } finally {
        processor.finish();
      }
    }
  @Override
  public void process(ResponseBuilder rb) throws IOException {
    SolrQueryRequest req = rb.req;
    if (rb.doHighlights) {
      SolrParams params = req.getParams();

      String[] defaultHighlightFields; // TODO: get from builder by default?

      if (rb.getQparser() != null) {
        defaultHighlightFields = rb.getQparser().getDefaultHighlightFields();
      } else {
        defaultHighlightFields = params.getParams(CommonParams.DF);
      }

      Query highlightQuery = rb.getHighlightQuery();
      if (highlightQuery == null) {
        if (rb.getQparser() != null) {
          try {
            highlightQuery = rb.getQparser().getHighlightQuery();
            rb.setHighlightQuery(highlightQuery);
          } catch (Exception e) {
            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
          }
        } else {
          highlightQuery = rb.getQuery();
          rb.setHighlightQuery(highlightQuery);
        }
      }

      if (highlightQuery != null) {
        boolean rewrite =
            !(Boolean.valueOf(req.getParams().get(HighlightParams.USE_PHRASE_HIGHLIGHTER, "true"))
                && Boolean.valueOf(
                    req.getParams().get(HighlightParams.HIGHLIGHT_MULTI_TERM, "true")));
        highlightQuery =
            rewrite ? highlightQuery.rewrite(req.getSearcher().getIndexReader()) : highlightQuery;
      }

      // No highlighting if there is no query -- consider q.alt="*:*
      if (highlightQuery != null) {
        NamedList sumData =
            highlighter.doHighlighting(
                rb.getResults().docList, highlightQuery, req, defaultHighlightFields);

        if (sumData != null) {
          // TODO ???? add this directly to the response?
          rb.rsp.add("highlighting", sumData);
        }
      }
    }
  }
  /**
   * Generates a list of Highlighted query fragments for each item in a list of documents, or
   * returns null if highlighting is disabled.
   *
   * @param docs query results
   * @param query the query
   * @param req the current request
   * @param defaultFields default list of fields to summarize
   * @return NamedList containing a NamedList for each document, which in turns contains sets
   *     (field, summary) pairs.
   */
  @Override
  @SuppressWarnings("unchecked")
  public NamedList<Object> doHighlighting(
      DocList docs, Query query, SolrQueryRequest req, String[] defaultFields) throws IOException {
    SolrParams params = req.getParams();
    if (!isHighlightingEnabled(params)) return null;

    SolrIndexSearcher searcher = req.getSearcher();
    IndexSchema schema = searcher.getSchema();
    NamedList fragments = new SimpleOrderedMap();
    String[] fieldNames = getHighlightFields(query, req, defaultFields);
    Set<String> fset = new HashSet<String>();

    {
      // pre-fetch documents using the Searcher's doc cache
      for (String f : fieldNames) {
        fset.add(f);
      }
      // fetch unique key if one exists.
      SchemaField keyField = schema.getUniqueKeyField();
      if (null != keyField) fset.add(keyField.getName());
    }

    // get FastVectorHighlighter instance out of the processing loop
    FastVectorHighlighter fvh =
        new FastVectorHighlighter(
            // FVH cannot process hl.usePhraseHighlighter parameter per-field basis
            params.getBool(HighlightParams.USE_PHRASE_HIGHLIGHTER, true),
            // FVH cannot process hl.requireFieldMatch parameter per-field basis
            params.getBool(HighlightParams.FIELD_MATCH, false));
    fvh.setPhraseLimit(params.getInt(HighlightParams.PHRASE_LIMIT, Integer.MAX_VALUE));
    FieldQuery fieldQuery = fvh.getFieldQuery(query, searcher.getIndexReader());

    // Highlight each document
    DocIterator iterator = docs.iterator();
    for (int i = 0; i < docs.size(); i++) {
      int docId = iterator.nextDoc();
      Document doc = searcher.doc(docId, fset);
      NamedList docSummaries = new SimpleOrderedMap();
      for (String fieldName : fieldNames) {
        fieldName = fieldName.trim();
        if (useFastVectorHighlighter(params, schema, fieldName))
          doHighlightingByFastVectorHighlighter(
              fvh, fieldQuery, req, docSummaries, docId, doc, fieldName);
        else doHighlightingByHighlighter(query, req, docSummaries, docId, doc, fieldName);
      }
      String printId = schema.printableUniqueKey(doc);
      fragments.add(printId == null ? null : printId, docSummaries);
    }
    return fragments;
  }
  @Override
  public void processCommit(CommitUpdateCommand cmd) throws IOException {
    if (zkEnabled) {
      zkCheck();
    }

    if (vinfo != null) {
      vinfo.lockForUpdate();
    }
    try {

      if (ulog == null
          || ulog.getState() == UpdateLog.State.ACTIVE
          || (cmd.getFlags() & UpdateCommand.REPLAY) != 0) {
        super.processCommit(cmd);
      } else {
        log.info(
            "Ignoring commit while not ACTIVE - state: "
                + ulog.getState()
                + " replay:"
                + (cmd.getFlags() & UpdateCommand.REPLAY));
      }

    } finally {
      if (vinfo != null) {
        vinfo.unlockForUpdate();
      }
    }
    // TODO: we should consider this? commit everyone in the current collection

    if (zkEnabled) {
      ModifiableSolrParams params = new ModifiableSolrParams(filterParams(req.getParams()));
      if (!req.getParams().getBool(COMMIT_END_POINT, false)) {
        params.set(COMMIT_END_POINT, true);

        String nodeName =
            req.getCore().getCoreDescriptor().getCoreContainer().getZkController().getNodeName();
        String shardZkNodeName = nodeName + "_" + req.getCore().getName();
        List<Node> nodes =
            getCollectionUrls(
                req,
                req.getCore().getCoreDescriptor().getCloudDescriptor().getCollectionName(),
                shardZkNodeName);

        if (nodes != null) {
          cmdDistrib.distribCommit(cmd, nodes, params);
          finish();
        }
      }
    }
  }
Exemple #29
0
  protected SolrQueryRequest parseSolrQueryRequest(
      SolrRequestParsers parser, RequestGetter requestGetter) throws Exception {
    ArrayList<ContentStream> streams = new ArrayList<>(1);
    if (requestGetter.getContentStreams() != null && requestGetter.getContentStreams().size() > 0) {
      streams.addAll(requestGetter.getContentStreams());
    }
    SolrQueryRequest sreq = parser.buildRequestFrom(core, requestGetter.getSolrParams(), streams);

    // Handlers and login will want to know the path. If it contains a ':'
    // the handler could use it for RESTful URLs
    sreq.getContext().put("path", RequestHandlers.normalize(requestGetter.getPath()));

    return sreq;
  }
  @Before
  public void setUp() throws Exception {

    Answer<GroupCollapseSummary> answer =
        new Answer<GroupCollapseSummary>() {

          @Override
          public GroupCollapseSummary answer(InvocationOnMock invocation) throws Throwable {
            Object[] args = invocation.getArguments();
            DummyGroupCollapseSummary dummyObject =
                new DummyGroupCollapseSummary(
                    (String) args[0],
                    (SolrIndexSearcher) args[1],
                    (Set<String>) args[2],
                    (String) args[3]);
            return dummyObject;
          }
        };

    PowerMockito.whenNew(GroupCollapseSummary.class).withAnyArguments().thenAnswer(answer);

    initMocks(this);

    schema = PowerMockito.mock(IndexSchema.class);

    rb.req = req;
    rb.rsp = rsp;
    when(rb.getGroupingSpec()).thenReturn(groupSpec);
    when(req.getParams()).thenReturn(params);
    when(req.getSchema()).thenReturn(schema);
    when(req.getSearcher()).thenReturn(searcher);
    mockResponse();

    when(schema.getFieldType(FIELD_PRICE)).thenReturn(priceType);
    when(schema.getFieldType(FIELD_DISCOUNT)).thenReturn(discountType);
    when(schema.getFieldType(FIELD_CLOSEOUT)).thenReturn(booleanType);
    when(schema.getFieldType(FIELD_COLOR)).thenReturn(stringType);
    when(schema.getFieldType(FIELD_COLORFAMILY)).thenReturn(stringType);

    numericType = PowerMockito.mock(org.apache.lucene.document.FieldType.NumericType.class);
    when(priceType.getNumericType()).thenReturn(numericType);
    when(priceType.getTypeName()).thenReturn("tfloat");
    when(discountType.getNumericType()).thenReturn(numericType);
    when(discountType.getTypeName()).thenReturn("tint");
    when(booleanType.getTypeName()).thenReturn("boolean");
    when(stringType.getTypeName()).thenReturn("string");

    when(groupSpec.getFields()).thenReturn(new String[] {"productId"});
  }