/** test range query performance */ public void XtestFilteringPerformance() throws Exception { int indexSize = 19999; float fractionCovered = .1f; String l = t(0); String u = t((int) (indexSize * 10 * fractionCovered)); SolrQueryRequest req = lrf.makeRequest(); QParser parser = QParser.getParser("foomany_s:[" + l + " TO " + u + "]", null, req); Query rangeQ = parser.parse(); List<Query> filters = new ArrayList<Query>(); filters.add(rangeQ); req.close(); parser = QParser.getParser( "{!dismax qf=t10_100_ws pf=t10_100_ws ps=20}" + t(0) + ' ' + t(1) + ' ' + t(2), null, req); Query q = parser.parse(); // SolrIndexSearcher searcher = req.getSearcher(); // DocSet range = searcher.getDocSet(rangeQ, null); createIndex2(indexSize, "foomany_s", "t10_100_ws"); // doListGen(100, q, filters, false, true); doListGen(500, q, filters, false, true); req.close(); }
// Skip encoding for updating the index void createIndex2(int nDocs, String... fields) throws IOException { Set<String> fieldSet = new HashSet<String>(Arrays.asList(fields)); SolrQueryRequest req = lrf.makeRequest(); SolrQueryResponse rsp = new SolrQueryResponse(); UpdateRequestProcessorChain processorChain = req.getCore().getUpdateProcessingChain(null); UpdateRequestProcessor processor = processorChain.createProcessor(req, rsp); boolean foomany_s = fieldSet.contains("foomany_s"); boolean foo1_s = fieldSet.contains("foo1_s"); boolean foo2_s = fieldSet.contains("foo2_s"); boolean foo4_s = fieldSet.contains("foo4_s"); boolean foo8_s = fieldSet.contains("foo8_s"); boolean t10_100_ws = fieldSet.contains("t10_100_ws"); for (int i = 0; i < nDocs; i++) { SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", Float.toString(i)); if (foomany_s) { doc.addField("foomany_s", t(r.nextInt(nDocs * 10))); } if (foo1_s) { doc.addField("foo1_s", t(0)); } if (foo2_s) { doc.addField("foo2_s", r.nextInt(2)); } if (foo4_s) { doc.addField("foo4_s", r.nextInt(4)); } if (foo8_s) { doc.addField("foo8_s", r.nextInt(8)); } if (t10_100_ws) { StringBuilder sb = new StringBuilder(9 * 100); for (int j = 0; j < 100; j++) { sb.append(' '); sb.append(t(r.nextInt(10))); } doc.addField("t10_100_ws", sb.toString()); } AddUpdateCommand cmd = new AddUpdateCommand(); cmd.solrDoc = doc; processor.processAdd(cmd); } processor.finish(); req.close(); assertU(commit()); req = lrf.makeRequest(); assertEquals(nDocs, req.getSearcher().maxDoc()); req.close(); }
/** * Tests the {@link * FieldAnalysisRequestHandler#resolveAnalysisRequest(org.apache.solr.request.SolrQueryRequest)} */ @Test public void testResolveAnalysisRequest() throws Exception { ModifiableSolrParams params = new ModifiableSolrParams(); params.add(AnalysisParams.FIELD_NAME, "text,nametext"); params.add(AnalysisParams.FIELD_TYPE, "whitetok,keywordtok"); params.add(AnalysisParams.FIELD_VALUE, "the quick red fox jumped over the lazy brown dogs"); params.add(CommonParams.Q, "fox brown"); SolrQueryRequest req = new LocalSolrQueryRequest(h.getCore(), params); FieldAnalysisRequest request = handler.resolveAnalysisRequest(req); List<String> fieldNames = request.getFieldNames(); assertEquals("Expecting 2 field names", 2, fieldNames.size()); assertEquals("text", fieldNames.get(0)); assertEquals("nametext", fieldNames.get(1)); List<String> fieldTypes = request.getFieldTypes(); assertEquals("Expecting 2 field types", 2, fieldTypes.size()); assertEquals("whitetok", fieldTypes.get(0)); assertEquals("keywordtok", fieldTypes.get(1)); assertEquals("the quick red fox jumped over the lazy brown dogs", request.getFieldValue()); assertEquals("fox brown", request.getQuery()); assertFalse(request.isShowMatch()); req.close(); // testing overide of query value using analysis.query param params.add(AnalysisParams.QUERY, "quick lazy"); req = new LocalSolrQueryRequest(h.getCore(), params); request = handler.resolveAnalysisRequest(req); assertEquals("quick lazy", request.getQuery()); req.close(); // testing analysis.showmatch param params.add(AnalysisParams.SHOW_MATCH, "false"); req = new LocalSolrQueryRequest(h.getCore(), params); request = handler.resolveAnalysisRequest(req); assertFalse(request.isShowMatch()); req.close(); params.set(AnalysisParams.SHOW_MATCH, "true"); req = new LocalSolrQueryRequest(h.getCore(), params); request = handler.resolveAnalysisRequest(req); assertTrue(request.isShowMatch()); req.close(); // testing absence of query value params.remove(CommonParams.Q); params.remove(AnalysisParams.QUERY); req = new LocalSolrQueryRequest(h.getCore(), params); request = handler.resolveAnalysisRequest(req); assertNull(request.getQuery()); req.close(); }
/** * get the solr document list from a query response This differs from getResponseByParams in such * a way that it does only create the fields of the response but never search snippets and there * are also no facets generated. * * @param params * @return * @throws IOException * @throws SolrException */ @Override public SolrDocumentList getDocumentListByParams(ModifiableSolrParams params) throws IOException, SolrException { SolrQueryRequest req = this.request(params); SolrQueryResponse response = null; String q = params.get(CommonParams.Q); String fq = params.get(CommonParams.FQ); String sort = params.get(CommonParams.SORT); String threadname = Thread.currentThread().getName(); try { if (q != null) Thread.currentThread() .setName( "solr query: q = " + q + (fq == null ? "" : ", fq = " + fq) + (sort == null ? "" : ", sort = " + sort)); // for debugging in Threaddump response = this.query(req); if (q != null) Thread.currentThread().setName(threadname); if (response == null) throw new IOException("response == null"); return SolrQueryResponse2SolrDocumentList(req, response); } finally { req.close(); SolrRequestInfo.clearRequestInfo(); } }
@Test public void testCommitWithin() throws Exception { ExtractingRequestHandler handler = (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract"); assertTrue("handler is null and it shouldn't be", handler != null); SolrQueryRequest req = req( "literal.id", "one", ExtractingParams.RESOURCE_NAME, "extraction/version_control.txt", "commitWithin", "200"); SolrQueryResponse rsp = new SolrQueryResponse(); BufferingRequestProcessor p = new BufferingRequestProcessor(null); ExtractingDocumentLoader loader = (ExtractingDocumentLoader) handler.newLoader(req, p); loader.load( req, rsp, new ContentStreamBase.FileStream(getFile("extraction/version_control.txt")), p); AddUpdateCommand add = p.addCommands.get(0); assertEquals(200, add.commitWithin); req.close(); }
int doListGen(int iter, Query q, List<Query> filt, boolean cacheQuery, boolean cacheFilt) throws Exception { SolrQueryRequest req = lrf.makeRequest(); SolrIndexSearcher searcher = req.getSearcher(); long start = System.currentTimeMillis(); // These aren't public in SolrIndexSearcher int NO_CHECK_QCACHE = 0x80000000; int GET_DOCSET = 0x40000000; int NO_CHECK_FILTERCACHE = 0x20000000; int GET_SCORES = 0x01; int ret = 0; for (int i = 0; i < iter; i++) { DocList l = searcher.getDocList( q, filt, (Sort) null, 0, 10, (cacheQuery ? 0 : NO_CHECK_QCACHE) | (cacheFilt ? 0 : NO_CHECK_FILTERCACHE)); ret += l.matches(); } long end = System.currentTimeMillis(); System.out.println( "ret=" + ret + " time=" + (end - start) + " throughput=" + iter * 1000 / (end - start + 1)); req.close(); assertTrue(ret > 0); // make sure we did some work return ret; }
@Test public void testCollateWithFilter() throws Exception { SolrCore core = h.getCore(); SearchComponent speller = core.getSearchComponent("spellcheck"); assertTrue("speller is null and it shouldn't be", speller != null); ModifiableSolrParams params = new ModifiableSolrParams(); params.add(SpellCheckComponent.COMPONENT_NAME, "true"); params.add(SpellCheckComponent.SPELLCHECK_BUILD, "true"); params.add(SpellCheckComponent.SPELLCHECK_COUNT, "10"); params.add(SpellCheckComponent.SPELLCHECK_COLLATE, "true"); params.add(SpellCheckComponent.SPELLCHECK_MAX_COLLATION_TRIES, "5"); params.add(SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, "2"); params.add(CommonParams.Q, "lowerfilt:(+fauth +home +loane)"); params.add(CommonParams.FQ, "NOT(id:1)"); // Because a FilterQuery is applied which removes doc id#1 from possible hits, we would // not want the collations to return us "lowerfilt:(+faith +hope +loaves)" as this only matches // doc id#1. SolrRequestHandler handler = core.getRequestHandler("spellCheckCompRH"); SolrQueryResponse rsp = new SolrQueryResponse(); rsp.add("responseHeader", new SimpleOrderedMap()); SolrQueryRequest req = new LocalSolrQueryRequest(core, params); handler.handleRequest(req, rsp); req.close(); NamedList values = rsp.getValues(); NamedList spellCheck = (NamedList) values.get("spellcheck"); NamedList suggestions = (NamedList) spellCheck.get("suggestions"); List<String> collations = suggestions.getAll("collation"); assertTrue(collations.size() == 1); assertTrue(collations.get(0).equals("lowerfilt:(+faith +hope +love)")); }
@Test public void testCollateWithMultipleRequestHandlers() throws Exception { SolrCore core = h.getCore(); SearchComponent speller = core.getSearchComponent("spellcheck"); assertTrue("speller is null and it shouldn't be", speller != null); ModifiableSolrParams params = new ModifiableSolrParams(); params.add(SpellCheckComponent.COMPONENT_NAME, "true"); params.add(SpellCheckComponent.SPELLCHECK_DICT, "multipleFields"); params.add(SpellCheckComponent.SPELLCHECK_BUILD, "true"); params.add(SpellCheckComponent.SPELLCHECK_COUNT, "10"); params.add(SpellCheckComponent.SPELLCHECK_COLLATE, "true"); params.add(SpellCheckComponent.SPELLCHECK_MAX_COLLATION_TRIES, "1"); params.add(SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, "1"); params.add(CommonParams.Q, "peac"); // SpellCheckCompRH has no "qf" defined. It will not find "peace" from "peac" despite it being // in the dictionary // because requrying against this Request Handler results in 0 hits. SolrRequestHandler handler = core.getRequestHandler("spellCheckCompRH"); SolrQueryResponse rsp = new SolrQueryResponse(); rsp.add("responseHeader", new SimpleOrderedMap()); SolrQueryRequest req = new LocalSolrQueryRequest(core, params); handler.handleRequest(req, rsp); req.close(); NamedList values = rsp.getValues(); NamedList spellCheck = (NamedList) values.get("spellcheck"); NamedList suggestions = (NamedList) spellCheck.get("suggestions"); String singleCollation = (String) suggestions.get("collation"); assertNull(singleCollation); // SpellCheckCompRH1 has "lowerfilt1" defined in the "qf" param. It will find "peace" from // "peac" because // requrying field "lowerfilt1" returns the hit. params.remove(SpellCheckComponent.SPELLCHECK_BUILD); handler = core.getRequestHandler("spellCheckCompRH1"); rsp = new SolrQueryResponse(); rsp.add("responseHeader", new SimpleOrderedMap()); req = new LocalSolrQueryRequest(core, params); handler.handleRequest(req, rsp); req.close(); values = rsp.getValues(); spellCheck = (NamedList) values.get("spellcheck"); suggestions = (NamedList) spellCheck.get("suggestions"); singleCollation = (String) suggestions.get("collation"); assertEquals(singleCollation, "peace"); }
@Test public void testInterface() throws Exception { try { init("schema12.xml"); SolrCore core = h.getCore(); NamedList<String> args = new NamedList<>(); args.add(QueryElevationComponent.FIELD_TYPE, "string"); args.add(QueryElevationComponent.CONFIG_FILE, "elevate.xml"); QueryElevationComponent comp = new QueryElevationComponent(); comp.init(args); comp.inform(core); SolrQueryRequest req = req(); IndexReader reader = req.getSearcher().getIndexReader(); Map<String, ElevationObj> map = comp.getElevationMap(reader, core); req.close(); // Make sure the boosts loaded properly assertEquals(7, map.size()); assertEquals(1, map.get("XXXX").priority.size()); assertEquals(2, map.get("YYYY").priority.size()); assertEquals(3, map.get("ZZZZ").priority.size()); assertEquals(null, map.get("xxxx")); assertEquals(null, map.get("yyyy")); assertEquals(null, map.get("zzzz")); // Now test the same thing with a lowercase filter: 'lowerfilt' args = new NamedList<>(); args.add(QueryElevationComponent.FIELD_TYPE, "lowerfilt"); args.add(QueryElevationComponent.CONFIG_FILE, "elevate.xml"); comp = new QueryElevationComponent(); comp.init(args); comp.inform(core); map = comp.getElevationMap(reader, core); assertEquals(7, map.size()); assertEquals(null, map.get("XXXX")); assertEquals(null, map.get("YYYY")); assertEquals(null, map.get("ZZZZ")); assertEquals(1, map.get("xxxx").priority.size()); assertEquals(2, map.get("yyyy").priority.size()); assertEquals(3, map.get("zzzz").priority.size()); assertEquals("xxxx", comp.getAnalyzedQuery("XXXX")); assertEquals("xxxxyyyy", comp.getAnalyzedQuery("XXXX YYYY")); assertQ( "Make sure QEC handles null queries", req("qt", "/elevate", "q.alt", "*:*", "defType", "dismax"), "//*[@numFound='0']"); } finally { delete(); } }
@Test public void testComponent() throws Exception { SolrCore core = h.getCore(); SearchComponent sc = core.getSearchComponent("clustering"); assertTrue("sc is null and it shouldn't be", sc != null); ModifiableSolrParams params = new ModifiableSolrParams(); params.add(ClusteringComponent.COMPONENT_NAME, "true"); params.add(CommonParams.Q, "*:*"); params.add(ClusteringParams.USE_SEARCH_RESULTS, "true"); SolrRequestHandler handler = core.getRequestHandler("standard"); SolrQueryResponse rsp; rsp = new SolrQueryResponse(); rsp.add("responseHeader", new SimpleOrderedMap()); SolrQueryRequest req = new LocalSolrQueryRequest(core, params); handler.handleRequest(req, rsp); NamedList values = rsp.getValues(); Object clusters = values.get("clusters"); // System.out.println("Clusters: " + clusters); assertTrue("clusters is null and it shouldn't be", clusters != null); req.close(); params = new ModifiableSolrParams(); params.add(ClusteringComponent.COMPONENT_NAME, "true"); params.add(ClusteringParams.ENGINE_NAME, "mock"); params.add(ClusteringParams.USE_COLLECTION, "true"); params.add(QueryComponent.COMPONENT_NAME, "false"); handler = core.getRequestHandler("docClustering"); rsp = new SolrQueryResponse(); rsp.add("responseHeader", new SimpleOrderedMap()); req = new LocalSolrQueryRequest(core, params); handler.handleRequest(req, rsp); values = rsp.getValues(); clusters = values.get("clusters"); // System.out.println("Clusters: " + clusters); assertTrue("clusters is null and it shouldn't be", clusters != null); req.close(); }
/** * For example: * * <p>String json = solr.request( "/select?qt=dismax&wt=json&q=...", null ); String xml = * solr.request( "/update", "<add><doc><field ..." ); */ public String request(String pathAndParams, String body) throws Exception { String path = null; SolrParams params = null; int idx = pathAndParams.indexOf('?'); if (idx > 0) { path = pathAndParams.substring(0, idx); params = SolrRequestParsers.parseQueryString(pathAndParams.substring(idx + 1)); } else { path = pathAndParams; params = new MapSolrParams(new HashMap<String, String>()); } // Extract the handler from the path or params SolrRequestHandler handler = core.getRequestHandler(path); if (handler == null) { if ("/select".equals(path) || "/select/".equalsIgnoreCase(path)) { String qt = params.get(CommonParams.QT); handler = core.getRequestHandler(qt); if (handler == null) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "unknown handler: " + qt); } } } if (handler == null) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "unknown handler: " + path); } // Make a stream for the 'body' content List<ContentStream> streams = new ArrayList<ContentStream>(1); if (body != null && body.length() > 0) { streams.add(new ContentStreamBase.StringStream(body)); } SolrQueryRequest req = null; try { req = parser.buildRequestFrom(core, params, streams); SolrQueryResponse rsp = new SolrQueryResponse(); core.execute(handler, req, rsp); if (rsp.getException() != null) { throw rsp.getException(); } // Now write it out QueryResponseWriter responseWriter = core.getQueryResponseWriter(req); StringWriter out = new StringWriter(); responseWriter.write(out, req, rsp); return out.toString(); } finally { if (req != null) { req.close(); } } }
@Test public void testElevationReloading() throws Exception { try { init("schema12.xml"); String testfile = "data-elevation.xml"; File f = new File(h.getCore().getDataDir(), testfile); writeFile(f, "aaa", "A"); QueryElevationComponent comp = (QueryElevationComponent) h.getCore().getSearchComponent("elevate"); NamedList<String> args = new NamedList<>(); args.add(QueryElevationComponent.CONFIG_FILE, testfile); comp.init(args); comp.inform(h.getCore()); SolrQueryRequest req = req(); IndexReader reader = req.getSearcher().getIndexReader(); Map<String, ElevationObj> map = comp.getElevationMap(reader, h.getCore()); assertTrue(map.get("aaa").priority.containsKey(new BytesRef("A"))); assertNull(map.get("bbb")); req.close(); // now change the file writeFile(f, "bbb", "B"); assertU(adoc("id", "10000")); // will get same reader if no index change assertU(commit()); req = req(); reader = req.getSearcher().getIndexReader(); map = comp.getElevationMap(reader, h.getCore()); assertNull(map.get("aaa")); assertTrue(map.get("bbb").priority.containsKey(new BytesRef("B"))); req.close(); } finally { delete(); } }
void destroy() { try { if (solrReq != null) { log.debug("Closing out SolrRequest: {}", solrReq); solrReq.close(); } } finally { try { if (core != null) core.close(); } finally { SolrRequestInfo.clearRequestInfo(); } AuthenticationPlugin authcPlugin = cores.getAuthenticationPlugin(); if (authcPlugin != null) authcPlugin.closeRequest(); } }
int doSetGen(int iter, Query q) throws Exception { SolrQueryRequest req = lrf.makeRequest(); SolrIndexSearcher searcher = req.getSearcher(); long start = System.currentTimeMillis(); int ret = 0; for (int i = 0; i < iter; i++) { DocSet set = searcher.getDocSetNC(q, null); ret += set.size(); } long end = System.currentTimeMillis(); System.out.println( "ret=" + ret + " time=" + (end - start) + " throughput=" + iter * 1000 / (end - start + 1)); req.close(); assertTrue(ret > 0); // make sure we did some work return ret; }
@Test public void testSolrIsLoadedAndQueryIsAnswered() { String q = "a"; SolrQueryRequest req = req( "q", q, DisMaxParams.QF, "f1 f2 f3", DisMaxParams.MM, "1", QueryParsing.OP, "OR", "defType", "querqy"); assertQ("Solr filter query fails", req, "//result[@name='response' and @numFound='3']"); req.close(); }
/** test range query performance */ public void XtestRangePerformance() throws Exception { int indexSize = 1999; float fractionCovered = 1.0f; String l = t(0); String u = t((int) (indexSize * 10 * fractionCovered)); SolrQueryRequest req = lrf.makeRequest(); QParser parser = QParser.getParser("foomany_s:[" + l + " TO " + u + "]", null, req); Query range = parser.parse(); QParser parser2 = QParser.getParser("{!frange l=" + l + " u=" + u + "}foomany_s", null, req); Query frange = parser2.parse(); req.close(); createIndex2(indexSize, "foomany_s"); doSetGen(1, range); doSetGen(1, frange); // load field cache doSetGen(100, range); doSetGen(10000, frange); }
public void handleRequest(RequestGetter requestGetter) { MDCLoggingContext.reset(); MDCLoggingContext.setNode(cores); String path = requestGetter.getPath(); solrParams = requestGetter.getSolrParams(); SolrRequestHandler handler = null; String corename = ""; String origCorename = null; try { // set a request timer which can be reused by requests if needed // req.setAttribute(SolrRequestParsers.REQUEST_TIMER_SERVLET_ATTRIBUTE, new RTimer()); // put the core container in request attribute // req.setAttribute("org.apache.solr.CoreContainer", cores); // check for management path String alternate = cores.getManagementPath(); if (alternate != null && path.startsWith(alternate)) { path = path.substring(0, alternate.length()); } // unused feature ? int idx = path.indexOf(':'); if (idx > 0) { // save the portion after the ':' for a 'handler' path parameter path = path.substring(0, idx); } boolean usingAliases = false; List<String> collectionsList = null; // Check for container handlers handler = cores.getRequestHandler(path); if (handler != null) { solrReq = parseSolrQueryRequest(SolrRequestParsers.DEFAULT, requestGetter); handleAdminRequest(handler, solrReq); return; } else { // otherwise, we should find a core from the path idx = path.indexOf("/", 1); if (idx > 1) { // try to get the corename as a request parameter first corename = path.substring(1, idx); // look at aliases if (cores.isZooKeeperAware()) { origCorename = corename; ZkStateReader reader = cores.getZkController().getZkStateReader(); aliases = reader.getAliases(); if (aliases != null && aliases.collectionAliasSize() > 0) { usingAliases = true; String alias = aliases.getCollectionAlias(corename); if (alias != null) { collectionsList = StrUtils.splitSmart(alias, ",", true); corename = collectionsList.get(0); } } } core = cores.getCore(corename); if (core != null) { path = path.substring(idx); } } // add collection name if (core == null && StringUtils.isNotBlank(requestGetter.getCollection())) { corename = requestGetter.getCollection(); core = cores.getCore(corename); } if (core == null) { if (!cores.isZooKeeperAware()) { core = cores.getCore(""); } } } if (core == null && cores.isZooKeeperAware()) { // we couldn't find the core - lets make sure a collection was not specified instead core = getCoreByCollection(cores, corename); if (core != null) { // we found a core, update the path path = path.substring(idx); } // try the default core if (core == null) { core = cores.getCore(""); if (core != null) {} } } // With a valid core... if (core != null) { MDCLoggingContext.setCore(core); final SolrConfig config = core.getSolrConfig(); // get or create/cache the parser for the core SolrRequestParsers parser = config.getRequestParsers(); // Determine the handler from the url path if not set // (we might already have selected the cores handler) if (handler == null && path.length() > 1) { // don't match "" or "/" as valid path handler = core.getRequestHandler(path); if (handler == null) { // may be a restlet path // Handle /schema/* paths via Restlet if (path.equals("/schema") || path.startsWith("/schema/")) { throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "unsupport /schema/**, use http solr"); } } // no handler yet but allowed to handle select; let's check if (handler == null && parser.isHandleSelect()) { if ("/select".equals(path) || "/select/".equals(path)) { solrReq = parseSolrQueryRequest(parser, requestGetter); invalidStates = checkStateIsValid(cores, solrReq.getParams().get(CloudSolrClient.STATE_VERSION)); String qt = solrReq.getParams().get(CommonParams.QT); handler = core.getRequestHandler(qt); if (handler == null) { throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "unknown handler: " + qt); } if (qt != null && qt.startsWith("/") && (handler instanceof ContentStreamHandlerBase)) { // For security reasons it's a bad idea to allow a leading '/', ex: // /select?qt=/update see SOLR-3161 // There was no restriction from Solr 1.4 thru 3.5 and it's not supported for update // handlers. throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Invalid Request Handler ('qt'). Do not use /select to access: " + qt); } } } } // With a valid handler and a valid core... if (handler != null) { // if not a /select, create the request if (solrReq == null) { solrReq = parseSolrQueryRequest(parser, requestGetter); } if (usingAliases) { processAliases(solrReq, aliases, collectionsList); } SolrQueryResponse solrRsp = new SolrQueryResponse(); SolrRequestInfo.setRequestInfo(new SolrRequestInfo(solrReq, solrRsp)); this.execute(handler, solrReq, solrRsp); QueryResponseWriter responseWriter = core.getQueryResponseWriter(solrReq); if (invalidStates != null) solrReq.getContext().put(CloudSolrClient.STATE_VERSION, invalidStates); writeResponse(solrRsp, responseWriter, solrReq); return; // we are done with a valid handler } } logger.debug("no handler or core retrieved for {}, follow through...", path); throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "no handler or core retrieved for " + path); } catch (Throwable ex) { sendError(core, solrReq, ex); // walk the the entire cause chain to search for an Error Throwable t = ex; while (t != null) { if (t instanceof Error) { if (t != ex) { logger.error( "An Error was wrapped in another exception - please report complete stacktrace on SOLR-6161", ex); } throw (Error) t; } t = t.getCause(); } return; } finally { try { if (solrReq != null) { logger.debug("Closing out SolrRequest: {}", solrReq); solrReq.close(); } } finally { try { if (core != null) { core.close(); } } finally { SolrRequestInfo.clearRequestInfo(); } } MDCLoggingContext.clear(); } }
@Test public void testExtendedCollate() throws Exception { SolrCore core = h.getCore(); SearchComponent speller = core.getSearchComponent("spellcheck"); assertTrue("speller is null and it shouldn't be", speller != null); ModifiableSolrParams params = new ModifiableSolrParams(); params.add(CommonParams.QT, "spellCheckCompRH"); params.add(CommonParams.Q, "lowerfilt:(+fauth +home +loane)"); params.add(SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS, "true"); params.add(SpellCheckComponent.COMPONENT_NAME, "true"); params.add(SpellCheckComponent.SPELLCHECK_BUILD, "true"); params.add(SpellCheckComponent.SPELLCHECK_COUNT, "10"); params.add(SpellCheckComponent.SPELLCHECK_COLLATE, "true"); // Testing backwards-compatible behavior. // Returns 1 collation as a single string. // All words are "correct" per the dictionary, but this collation would // return no results if tried. SolrRequestHandler handler = core.getRequestHandler("spellCheckCompRH"); SolrQueryResponse rsp = new SolrQueryResponse(); rsp.add("responseHeader", new SimpleOrderedMap()); SolrQueryRequest req = new LocalSolrQueryRequest(core, params); handler.handleRequest(req, rsp); req.close(); NamedList values = rsp.getValues(); NamedList spellCheck = (NamedList) values.get("spellcheck"); NamedList suggestions = (NamedList) spellCheck.get("suggestions"); String singleCollation = (String) suggestions.get("collation"); assertEquals("lowerfilt:(+faith +homer +loaves)", singleCollation); // Testing backwards-compatible response format but will only return a // collation that would return results. params.remove(SpellCheckComponent.SPELLCHECK_BUILD); params.add(SpellCheckComponent.SPELLCHECK_MAX_COLLATION_TRIES, "5"); params.add(SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, "1"); handler = core.getRequestHandler("spellCheckCompRH"); rsp = new SolrQueryResponse(); rsp.add("responseHeader", new SimpleOrderedMap()); req = new LocalSolrQueryRequest(core, params); handler.handleRequest(req, rsp); req.close(); values = rsp.getValues(); spellCheck = (NamedList) values.get("spellcheck"); suggestions = (NamedList) spellCheck.get("suggestions"); singleCollation = (String) suggestions.get("collation"); assertEquals("lowerfilt:(+faith +hope +loaves)", singleCollation); // Testing returning multiple collations if more than one valid // combination exists. params.remove(SpellCheckComponent.SPELLCHECK_MAX_COLLATION_TRIES); params.remove(SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS); params.add(SpellCheckComponent.SPELLCHECK_MAX_COLLATION_TRIES, "5"); params.add(SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, "2"); handler = core.getRequestHandler("spellCheckCompRH"); rsp = new SolrQueryResponse(); rsp.add("responseHeader", new SimpleOrderedMap()); req = new LocalSolrQueryRequest(core, params); handler.handleRequest(req, rsp); req.close(); values = rsp.getValues(); spellCheck = (NamedList) values.get("spellcheck"); suggestions = (NamedList) spellCheck.get("suggestions"); List<String> collations = suggestions.getAll("collation"); assertTrue(collations.size() == 2); for (String multipleCollation : collations) { assertTrue( multipleCollation.equals("lowerfilt:(+faith +hope +love)") || multipleCollation.equals("lowerfilt:(+faith +hope +loaves)")); } // Testing return multiple collations with expanded collation response // format. params.add(SpellCheckComponent.SPELLCHECK_COLLATE_EXTENDED_RESULTS, "true"); handler = core.getRequestHandler("spellCheckCompRH"); rsp = new SolrQueryResponse(); rsp.add("responseHeader", new SimpleOrderedMap()); req = new LocalSolrQueryRequest(core, params); handler.handleRequest(req, rsp); req.close(); values = rsp.getValues(); spellCheck = (NamedList) values.get("spellcheck"); suggestions = (NamedList) spellCheck.get("suggestions"); List<NamedList> expandedCollationList = suggestions.getAll("collation"); Set<String> usedcollations = new HashSet<String>(); assertTrue(expandedCollationList.size() == 2); for (NamedList expandedCollation : expandedCollationList) { String multipleCollation = (String) expandedCollation.get("collationQuery"); assertTrue( multipleCollation.equals("lowerfilt:(+faith +hope +love)") || multipleCollation.equals("lowerfilt:(+faith +hope +loaves)")); assertTrue(!usedcollations.contains(multipleCollation)); usedcollations.add(multipleCollation); int hits = (Integer) expandedCollation.get("hits"); assertTrue(hits == 1); NamedList misspellingsAndCorrections = (NamedList) expandedCollation.get("misspellingsAndCorrections"); assertTrue(misspellingsAndCorrections.size() == 3); String correctionForFauth = (String) misspellingsAndCorrections.get("fauth"); String correctionForHome = (String) misspellingsAndCorrections.get("home"); String correctionForLoane = (String) misspellingsAndCorrections.get("loane"); assertTrue(correctionForFauth.equals("faith")); assertTrue(correctionForHome.equals("hope")); assertTrue(correctionForLoane.equals("love") || correctionForLoane.equals("loaves")); } }
@Test public void testSorting() throws Exception { try { init("schema12.xml"); assertU(adoc("id", "a", "title", "ipod trash trash", "str_s1", "a")); assertU(adoc("id", "b", "title", "ipod ipod trash", "str_s1", "b")); assertU(adoc("id", "c", "title", "ipod ipod ipod ", "str_s1", "c")); assertU(adoc("id", "x", "title", "boosted", "str_s1", "x")); assertU(adoc("id", "y", "title", "boosted boosted", "str_s1", "y")); assertU(adoc("id", "z", "title", "boosted boosted boosted", "str_s1", "z")); assertU(commit()); String query = "title:ipod"; Map<String, String> args = new HashMap<>(); // reusing args & requests this way is a solr-test-antipattern. PLEASE // DO NOT COPY THIS CODE args.put(CommonParams.Q, query); args.put(CommonParams.QT, "/elevate"); args.put(CommonParams.FL, "id,score"); args.put("indent", "true"); // args.put( CommonParams.FL, "id,title,score" ); SolrQueryRequest req = new LocalSolrQueryRequest(h.getCore(), new MapSolrParams(args)); IndexReader reader = req.getSearcher().getIndexReader(); QueryElevationComponent booster = (QueryElevationComponent) req.getCore().getSearchComponent("elevate"); assertQ( "Make sure standard sort works as expected", req, "//*[@numFound='3']", "//result/doc[1]/str[@name='id'][.='c']", "//result/doc[2]/str[@name='id'][.='b']", "//result/doc[3]/str[@name='id'][.='a']"); // Explicitly set what gets boosted booster.elevationCache.clear(); booster.setTopQueryResults(reader, query, new String[] {"x", "y", "z"}, null); req.close(); req = new LocalSolrQueryRequest(h.getCore(), new MapSolrParams(args)); assertQ( "All six should make it", req, "//*[@numFound='6']", "//result/doc[1]/str[@name='id'][.='x']", "//result/doc[2]/str[@name='id'][.='y']", "//result/doc[3]/str[@name='id'][.='z']", "//result/doc[4]/str[@name='id'][.='c']", "//result/doc[5]/str[@name='id'][.='b']", "//result/doc[6]/str[@name='id'][.='a']"); booster.elevationCache.clear(); // now switch the order: req.close(); req = new LocalSolrQueryRequest(h.getCore(), new MapSolrParams(args)); booster.setTopQueryResults(reader, query, new String[] {"a", "x"}, null); assertQ( "All four should make it", req, "//*[@numFound='4']", "//result/doc[1]/str[@name='id'][.='a']", "//result/doc[2]/str[@name='id'][.='x']", "//result/doc[3]/str[@name='id'][.='c']", "//result/doc[4]/str[@name='id'][.='b']"); // Test reverse sort args.put(CommonParams.SORT, "score asc"); req.close(); req = new LocalSolrQueryRequest(h.getCore(), new MapSolrParams(args)); assertQ( "All four should make it", req, "//*[@numFound='4']" // NOTE REVERSED doc[X] indices , "//result/doc[4]/str[@name='id'][.='a']", "//result/doc[3]/str[@name='id'][.='x']", "//result/doc[2]/str[@name='id'][.='c']", "//result/doc[1]/str[@name='id'][.='b']"); // Try normal sort by 'id' // default 'forceBoost' should be false assertEquals(false, booster.forceElevation); args.put(CommonParams.SORT, "str_s1 asc"); req.close(); req = new LocalSolrQueryRequest(h.getCore(), new MapSolrParams(args)); assertQ( null, req, "//*[@numFound='4']", "//result/doc[1]/str[@name='id'][.='a']", "//result/doc[2]/str[@name='id'][.='b']", "//result/doc[3]/str[@name='id'][.='c']", "//result/doc[4]/str[@name='id'][.='x']"); args.put(CommonParams.SORT, "id asc"); req.close(); req = new LocalSolrQueryRequest(h.getCore(), new MapSolrParams(args)); assertQ( null, req, "//*[@numFound='4']", "//result/doc[1]/str[@name='id'][.='a']", "//result/doc[2]/str[@name='id'][.='b']", "//result/doc[3]/str[@name='id'][.='c']", "//result/doc[4]/str[@name='id'][.='x']"); booster.forceElevation = true; args.put(CommonParams.SORT, "id asc"); req.close(); req = new LocalSolrQueryRequest(h.getCore(), new MapSolrParams(args)); assertQ( null, req, "//*[@numFound='4']", "//result/doc[1]/str[@name='id'][.='a']", "//result/doc[2]/str[@name='id'][.='x']", "//result/doc[3]/str[@name='id'][.='b']", "//result/doc[4]/str[@name='id'][.='c']"); // Test exclusive (not to be confused with exclusion) args.put(QueryElevationParams.EXCLUSIVE, "true"); req.close(); req = new LocalSolrQueryRequest(h.getCore(), new MapSolrParams(args)); booster.setTopQueryResults(reader, query, new String[] {"x", "a"}, new String[] {}); assertQ( null, req, "//*[@numFound='2']", "//result/doc[1]/str[@name='id'][.='x']", "//result/doc[2]/str[@name='id'][.='a']"); // Test exclusion booster.elevationCache.clear(); args.remove(CommonParams.SORT); args.remove(QueryElevationParams.EXCLUSIVE); req.close(); req = new LocalSolrQueryRequest(h.getCore(), new MapSolrParams(args)); booster.setTopQueryResults(reader, query, new String[] {"x"}, new String[] {"a"}); assertQ( null, req, "//*[@numFound='3']", "//result/doc[1]/str[@name='id'][.='x']", "//result/doc[2]/str[@name='id'][.='c']", "//result/doc[3]/str[@name='id'][.='b']"); // Test setting ids and excludes from http parameters booster.elevationCache.clear(); args.put(QueryElevationParams.IDS, "x,y,z"); args.put(QueryElevationParams.EXCLUDE, "b"); req.close(); req = new LocalSolrQueryRequest(h.getCore(), new MapSolrParams(args)); assertQ( "All five should make it", req, "//*[@numFound='5']", "//result/doc[1]/str[@name='id'][.='x']", "//result/doc[2]/str[@name='id'][.='y']", "//result/doc[3]/str[@name='id'][.='z']", "//result/doc[4]/str[@name='id'][.='c']", "//result/doc[5]/str[@name='id'][.='a']"); args.put(QueryElevationParams.IDS, "x,z,y"); args.put(QueryElevationParams.EXCLUDE, "b,c"); req.close(); req = new LocalSolrQueryRequest(h.getCore(), new MapSolrParams(args)); assertQ( "All four should make it", req, "//*[@numFound='4']", "//result/doc[1]/str[@name='id'][.='x']", "//result/doc[2]/str[@name='id'][.='z']", "//result/doc[3]/str[@name='id'][.='y']", "//result/doc[4]/str[@name='id'][.='a']"); req.close(); } finally { delete(); } }
public void testCopyFieldsAndFieldBoostsAndDocBoosts() throws Exception { SolrCore core = h.getCore(); IndexSchema schema = core.getLatestSchema(); SolrInputDocument doc = new SolrInputDocument(); final float DOC_BOOST = 3.0F; doc.setDocumentBoost(DOC_BOOST); doc.addField("id", "42"); SolrInputField inTitle = new SolrInputField("title"); inTitle.addValue("titleA", 2.0F); inTitle.addValue("titleB", 7.0F); final float TITLE_BOOST = 2.0F * 7.0F; assertEquals(TITLE_BOOST, inTitle.getBoost(), 0.0F); doc.put(inTitle.getName(), inTitle); SolrInputField inFoo = new SolrInputField("foo_t"); inFoo.addValue("summer time", 1.0F); inFoo.addValue("in the city", 5.0F); inFoo.addValue("living is easy", 11.0F); final float FOO_BOOST = 1.0F * 5.0F * 11.0F; assertEquals(FOO_BOOST, inFoo.getBoost(), 0.0F); doc.put(inFoo.getName(), inFoo); Document out = DocumentBuilder.toDocument(doc, schema); IndexableField[] outTitle = out.getFields(inTitle.getName()); assertEquals("wrong number of title values", 2, outTitle.length); IndexableField[] outNoNorms = out.getFields("title_stringNoNorms"); assertEquals("wrong number of nonorms values", 2, outNoNorms.length); IndexableField[] outFoo = out.getFields(inFoo.getName()); assertEquals("wrong number of foo values", 3, outFoo.length); IndexableField[] outText = out.getFields("text"); assertEquals("wrong number of text values", 5, outText.length); // since Lucene no longer has native document boosts, we should find // the doc boost multiplied into the boost on the first field value // of each field. All other field values should be 1.0f // (lucene will multiply all of the field value boosts later) assertEquals(TITLE_BOOST * DOC_BOOST, outTitle[0].boost(), 0.0F); assertEquals(1.0F, outTitle[1].boost(), 0.0F); // assertEquals(FOO_BOOST * DOC_BOOST, outFoo[0].boost(), 0.0F); assertEquals(1.0F, outFoo[1].boost(), 0.0F); assertEquals(1.0F, outFoo[2].boost(), 0.0F); // assertEquals(TITLE_BOOST * DOC_BOOST, outText[0].boost(), 0.0F); assertEquals(1.0F, outText[1].boost(), 0.0F); assertEquals(FOO_BOOST, outText[2].boost(), 0.0F); assertEquals(1.0F, outText[3].boost(), 0.0F); assertEquals(1.0F, outText[4].boost(), 0.0F); // copyField dest with no norms should not have recieved any boost assertEquals(1.0F, outNoNorms[0].boost(), 0.0F); assertEquals(1.0F, outNoNorms[1].boost(), 0.0F); // now index that SolrInputDocument to check the computed norms assertU(adoc(doc)); assertU(commit()); SolrQueryRequest req = req("q", "id:42"); try { // very hack-ish SolrQueryResponse rsp = new SolrQueryResponse(); core.execute(core.getRequestHandler(req.getParams().get(CommonParams.QT)), req, rsp); DocList dl = ((ResultContext) rsp.getValues().get("response")).docs; assertTrue("can't find the doc we just added", 1 == dl.size()); int docid = dl.iterator().nextDoc(); SolrIndexSearcher searcher = req.getSearcher(); AtomicReader reader = SlowCompositeReaderWrapper.wrap(searcher.getTopReaderContext().reader()); assertTrue( "similarity doesn't extend DefaultSimilarity, " + "config or defaults have changed since test was written", searcher.getSimilarity() instanceof DefaultSimilarity); DefaultSimilarity sim = (DefaultSimilarity) searcher.getSimilarity(); NumericDocValues titleNorms = reader.getNormValues("title"); NumericDocValues fooNorms = reader.getNormValues("foo_t"); NumericDocValues textNorms = reader.getNormValues("text"); assertEquals(expectedNorm(sim, 2, TITLE_BOOST * DOC_BOOST), titleNorms.get(docid)); assertEquals(expectedNorm(sim, 8 - 3, FOO_BOOST * DOC_BOOST), fooNorms.get(docid)); assertEquals( expectedNorm(sim, 2 + 8 - 3, TITLE_BOOST * FOO_BOOST * DOC_BOOST), textNorms.get(docid)); } finally { req.close(); } }
@Test public void testBufferingFlags() throws Exception { DirectUpdateHandler2.commitOnClose = false; final Semaphore logReplayFinish = new Semaphore(0); UpdateLog.testing_logReplayFinishHook = new Runnable() { @Override public void run() { logReplayFinish.release(); } }; SolrQueryRequest req = req(); UpdateHandler uhandler = req.getCore().getUpdateHandler(); UpdateLog ulog = uhandler.getUpdateLog(); try { clearIndex(); assertU(commit()); assertEquals(UpdateLog.State.ACTIVE, ulog.getState()); ulog.bufferUpdates(); // simulate updates from a leader updateJ( jsonAdd(sdoc("id", "Q1", "_version_", "101")), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); updateJ( jsonAdd(sdoc("id", "Q2", "_version_", "102")), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); updateJ( jsonAdd(sdoc("id", "Q3", "_version_", "103")), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); assertEquals(UpdateLog.State.BUFFERING, ulog.getState()); req.close(); h.close(); createCore(); req = req(); uhandler = req.getCore().getUpdateHandler(); ulog = uhandler.getUpdateLog(); logReplayFinish.acquire(); // wait for replay to finish assertTrue( (ulog.getStartingOperation() & UpdateLog.FLAG_GAP) != 0); // since we died while buffering, we should see this last // // Try again to ensure that the previous log replay didn't wipe out our flags // req.close(); h.close(); createCore(); req = req(); uhandler = req.getCore().getUpdateHandler(); ulog = uhandler.getUpdateLog(); assertTrue((ulog.getStartingOperation() & UpdateLog.FLAG_GAP) != 0); // now do some normal non-buffered adds updateJ( jsonAdd(sdoc("id", "Q4", "_version_", "114")), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); updateJ( jsonAdd(sdoc("id", "Q5", "_version_", "115")), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); updateJ( jsonAdd(sdoc("id", "Q6", "_version_", "116")), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); assertU(commit()); req.close(); h.close(); createCore(); req = req(); uhandler = req.getCore().getUpdateHandler(); ulog = uhandler.getUpdateLog(); assertTrue((ulog.getStartingOperation() & UpdateLog.FLAG_GAP) == 0); ulog.bufferUpdates(); // simulate receiving no updates ulog.applyBufferedUpdates(); updateJ( jsonAdd(sdoc("id", "Q7", "_version_", "117")), params( DISTRIB_UPDATE_PARAM, FROM_LEADER)); // do another add to make sure flags are back to normal req.close(); h.close(); createCore(); req = req(); uhandler = req.getCore().getUpdateHandler(); ulog = uhandler.getUpdateLog(); assertTrue((ulog.getStartingOperation() & UpdateLog.FLAG_GAP) == 0); // check flags on Q7 logReplayFinish.acquire(); assertEquals( UpdateLog.State.ACTIVE, ulog.getState()); // leave each test method in a good state } finally { DirectUpdateHandler2.commitOnClose = true; UpdateLog.testing_logReplayHook = null; UpdateLog.testing_logReplayFinishHook = null; req().close(); } }