public boolean sync( ZkController zkController, SolrCore core, ZkNodeProps leaderProps, boolean peerSyncOnlyWithActive) { if (SKIP_AUTO_RECOVERY) { return true; } boolean success; SolrQueryRequest req = new LocalSolrQueryRequest(core, new ModifiableSolrParams()); SolrQueryResponse rsp = new SolrQueryResponse(); SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp)); try { if (isClosed) { log.warn("Closed, skipping sync up."); return false; } log.info("Sync replicas to " + ZkCoreNodeProps.getCoreUrl(leaderProps)); if (core.getUpdateHandler().getUpdateLog() == null) { log.error("No UpdateLog found - cannot sync"); return false; } success = syncReplicas(zkController, core, leaderProps, peerSyncOnlyWithActive); } finally { SolrRequestInfo.clearRequestInfo(); } return success; }
@Override public Query rewrite(IndexReader reader) throws IOException { SolrRequestInfo info = SolrRequestInfo.getRequestInfo(); CoreContainer container = info.getReq().getCore().getCoreDescriptor().getCoreContainer(); final SolrCore fromCore = container.getCore(fromIndex); if (fromCore == null) { throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Cross-core join: no such core " + fromIndex); } RefCounted<SolrIndexSearcher> fromHolder = null; fromHolder = fromCore.getRegisteredSearcher(); final Query joinQuery; try { joinQuery = JoinUtil.createJoinQuery( fromField, true, toField, fromQuery, fromHolder.get(), scoreMode); } finally { fromCore.close(); fromHolder.decref(); } return joinQuery.rewrite(reader); }
@Override public Query rewrite(IndexReader reader) throws IOException { SolrRequestInfo info = SolrRequestInfo.getRequestInfo(); final Query jq = JoinUtil.createJoinQuery( fromField, true, toField, fromQuery, info.getReq().getSearcher(), scoreMode); return jq.rewrite(reader); }
@Override public FuncValues getValues(QueryContext context, AtomicReaderContext readerContext) throws IOException { if (context.get(this) == null) { SolrRequestInfo requestInfo = SolrRequestInfo.getRequestInfo(); throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "testfunc: unweighted value source detected. delegate=" + source + " request=" + (requestInfo == null ? "null" : requestInfo.getReq())); } return source.getValues(context, readerContext); }
public TopDocsCollector getTopDocsCollector( int len, SolrIndexSearcher.QueryCommand cmd, IndexSearcher searcher) throws IOException { if (this.boostedPriority == null) { SolrRequestInfo info = SolrRequestInfo.getRequestInfo(); if (info != null) { Map context = info.getReq().getContext(); this.boostedPriority = (Map<BytesRef, Integer>) context.get(QueryElevationComponent.BOOSTED_PRIORITY); } } return new ReRankCollector( reRankDocs, length, reRankQuery, reRankWeight, cmd, searcher, boostedPriority, scale); }
/** * get the solr document list from a query response This differs from getResponseByParams in such * a way that it does only create the fields of the response but never search snippets and there * are also no facets generated. * * @param params * @return * @throws IOException * @throws SolrException */ @Override public SolrDocumentList getDocumentListByParams(ModifiableSolrParams params) throws IOException, SolrException { SolrQueryRequest req = this.request(params); SolrQueryResponse response = null; String q = params.get(CommonParams.Q); String fq = params.get(CommonParams.FQ); String sort = params.get(CommonParams.SORT); String threadname = Thread.currentThread().getName(); try { if (q != null) Thread.currentThread() .setName( "solr query: q = " + q + (fq == null ? "" : ", fq = " + fq) + (sort == null ? "" : ", sort = " + sort)); // for debugging in Threaddump response = this.query(req); if (q != null) Thread.currentThread().setName(threadname); if (response == null) throw new IOException("response == null"); return SolrQueryResponse2SolrDocumentList(req, response); } finally { req.close(); SolrRequestInfo.clearRequestInfo(); } }
private void handleFilterExclusions() throws IOException { List<String> excludeTags = freq.domain.excludeTags; if (excludeTags == null || excludeTags.size() == 0) { return; } // TODO: somehow remove responsebuilder dependency ResponseBuilder rb = SolrRequestInfo.getRequestInfo().getResponseBuilder(); Map tagMap = (Map) rb.req.getContext().get("tags"); if (tagMap == null) { // no filters were tagged return; } IdentityHashMap<Query, Boolean> excludeSet = new IdentityHashMap<>(); for (String excludeTag : excludeTags) { Object olst = tagMap.get(excludeTag); // tagMap has entries of List<String,List<QParser>>, but subject to change in the future if (!(olst instanceof Collection)) continue; for (Object o : (Collection<?>) olst) { if (!(o instanceof QParser)) continue; QParser qp = (QParser) o; try { excludeSet.put(qp.getQuery(), Boolean.TRUE); } catch (SyntaxError syntaxError) { // This should not happen since we should only be retrieving a previously parsed query throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, syntaxError); } } } if (excludeSet.size() == 0) return; List<Query> qlist = new ArrayList<>(); // add the base query if (!excludeSet.containsKey(rb.getQuery())) { qlist.add(rb.getQuery()); } // add the filters if (rb.getFilters() != null) { for (Query q : rb.getFilters()) { if (!excludeSet.containsKey(q)) { qlist.add(q); } } } // now walk back up the context tree // TODO: we lose parent exclusions... for (FacetContext curr = fcontext; curr != null; curr = curr.parent) { if (curr.filter != null) { qlist.add(curr.filter); } } // recompute the base domain fcontext.base = fcontext.searcher.getDocSet(qlist); }
void destroy() { try { if (solrReq != null) { log.debug("Closing out SolrRequest: {}", solrReq); solrReq.close(); } } finally { try { if (core != null) core.close(); } finally { SolrRequestInfo.clearRequestInfo(); } AuthenticationPlugin authcPlugin = cores.getAuthenticationPlugin(); if (authcPlugin != null) authcPlugin.closeRequest(); } }
/** Extract handler from the URL path if not set. This returns true if the action is set. */ private void extractHandlerFromURLPath(SolrRequestParsers parser) throws Exception { if (handler == null && path.length() > 1) { // don't match "" or "/" as valid path handler = core.getRequestHandler(path); if (handler == null) { // may be a restlet path // Handle /schema/* paths via Restlet if (path.equals("/schema") || path.startsWith("/schema/")) { solrReq = parser.parse(core, path, req); SolrRequestInfo.setRequestInfo(new SolrRequestInfo(solrReq, new SolrQueryResponse())); if (path.equals(req.getServletPath())) { // avoid endless loop - pass through to Restlet via webapp action = PASSTHROUGH; return; } else { // forward rewritten URI (without path prefix and core/collection name) to Restlet action = FORWARD; return; } } } // no handler yet but allowed to handle select; let's check if (handler == null && parser.isHandleSelect()) { if ("/select".equals(path) || "/select/".equals(path)) { solrReq = parser.parse(core, path, req); invalidStates = checkStateIsValid(solrReq.getParams().get(CloudSolrClient.STATE_VERSION)); String qt = solrReq.getParams().get(CommonParams.QT); handler = core.getRequestHandler(qt); if (handler == null) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "unknown handler: " + qt); } if (qt != null && qt.startsWith("/") && (handler instanceof ContentStreamHandlerBase)) { // For security reasons it's a bad idea to allow a leading '/', ex: /select?qt=/update // see SOLR-3161 // There was no restriction from Solr 1.4 thru 3.5 and it's not supported for update // handlers. throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Invalid Request Handler ('qt'). Do not use /select to access: " + qt); } } } } }
public DistributedUpdateProcessor( SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) { super(next); this.rsp = rsp; this.next = next; this.idField = req.getSchema().getUniqueKeyField(); // version init this.updateHandler = req.getCore().getUpdateHandler(); this.ulog = updateHandler.getUpdateLog(); this.vinfo = ulog == null ? null : ulog.getVersionInfo(); versionsStored = this.vinfo != null && this.vinfo.getVersionField() != null; returnVersions = req.getParams().getBool(UpdateParams.VERSIONS, false); // TODO: better way to get the response, or pass back info to it? SolrRequestInfo reqInfo = returnVersions ? SolrRequestInfo.getRequestInfo() : null; this.req = req; CoreDescriptor coreDesc = req.getCore().getCoreDescriptor(); this.zkEnabled = coreDesc.getCoreContainer().isZooKeeperAware(); zkController = req.getCore().getCoreDescriptor().getCoreContainer().getZkController(); if (zkEnabled) { numNodes = zkController.getZkStateReader().getClusterState().getLiveNodes().size(); cmdDistrib = new SolrCmdDistributor( numNodes, coreDesc.getCoreContainer().getZkController().getCmdDistribExecutor()); } // this.rsp = reqInfo != null ? reqInfo.getRsp() : null; cloudDesc = coreDesc.getCloudDescriptor(); if (cloudDesc != null) { collection = cloudDesc.getCollectionName(); } }
public DelegatingCollector getFilterCollector(IndexSearcher indexSearcher) { try { SolrIndexSearcher searcher = (SolrIndexSearcher) indexSearcher; SortedDocValues docValues = null; FunctionQuery funcQuery = null; docValues = DocValues.getSorted(searcher.getLeafReader(), this.field); FieldType fieldType = null; if (this.max != null) { if (this.max.indexOf("(") == -1) { fieldType = searcher.getSchema().getField(this.max).getType(); } else { LocalSolrQueryRequest request = null; try { SolrParams params = new ModifiableSolrParams(); request = new LocalSolrQueryRequest(searcher.getCore(), params); FunctionQParser functionQParser = new FunctionQParser(this.max, null, null, request); funcQuery = (FunctionQuery) functionQParser.parse(); } catch (Exception e) { throw new IOException(e); } finally { request.close(); } } } if (this.min != null) { if (this.min.indexOf("(") == -1) { fieldType = searcher.getSchema().getField(this.min).getType(); } else { LocalSolrQueryRequest request = null; try { SolrParams params = new ModifiableSolrParams(); request = new LocalSolrQueryRequest(searcher.getCore(), params); FunctionQParser functionQParser = new FunctionQParser(this.min, null, null, request); funcQuery = (FunctionQuery) functionQParser.parse(); } catch (Exception e) { throw new IOException(e); } finally { request.close(); } } } int maxDoc = searcher.maxDoc(); int leafCount = searcher.getTopReaderContext().leaves().size(); // Deal with boosted docs. // We have to deal with it here rather then the constructor because // because the QueryElevationComponent runs after the Queries are constructed. IntIntOpenHashMap boostDocs = null; Map context = null; SolrRequestInfo info = SolrRequestInfo.getRequestInfo(); if (info != null) { context = info.getReq().getContext(); } if (this.boosted == null && context != null) { this.boosted = (Map<BytesRef, Integer>) context.get(QueryElevationComponent.BOOSTED_PRIORITY); } boostDocs = getBoostDocs(searcher, this.boosted, context); if (this.min != null || this.max != null) { return new CollapsingFieldValueCollector( maxDoc, leafCount, docValues, this.nullPolicy, max != null ? this.max : this.min, max != null, this.needsScores, fieldType, boostDocs, funcQuery, searcher); } else { return new CollapsingScoreCollector( maxDoc, leafCount, docValues, this.nullPolicy, boostDocs); } } catch (Exception e) { throw new RuntimeException(e); } }
/** This method processes the request. */ public Action call() throws IOException { MDCLoggingContext.reset(); MDCLoggingContext.setNode(cores); if (cores == null) { sendError(503, "Server is shutting down or failed to initialize"); return RETURN; } if (solrDispatchFilter.abortErrorMessage != null) { sendError(500, solrDispatchFilter.abortErrorMessage); return RETURN; } try { init(); /* Authorize the request if 1. Authorization is enabled, and 2. The requested resource is not a known static file */ if (cores.getAuthorizationPlugin() != null && shouldAuthorize()) { AuthorizationContext context = getAuthCtx(); log.debug("AuthorizationContext : {}", context); AuthorizationResponse authResponse = cores.getAuthorizationPlugin().authorize(context); if (authResponse.statusCode == AuthorizationResponse.PROMPT.statusCode) { Map<String, String> headers = (Map) getReq().getAttribute(AuthenticationPlugin.class.getName()); if (headers != null) { for (Map.Entry<String, String> e : headers.entrySet()) response.setHeader(e.getKey(), e.getValue()); } log.debug( "USER_REQUIRED " + req.getHeader("Authorization") + " " + req.getUserPrincipal()); } if (!(authResponse.statusCode == HttpStatus.SC_ACCEPTED) && !(authResponse.statusCode == HttpStatus.SC_OK)) { log.info( "USER_REQUIRED auth header {} context : {} ", req.getHeader("Authorization"), context); sendError( authResponse.statusCode, "Unauthorized request, Response code: " + authResponse.statusCode); return RETURN; } } HttpServletResponse resp = response; switch (action) { case ADMIN: handleAdminRequest(); return RETURN; case REMOTEQUERY: remoteQuery(coreUrl + path, resp); return RETURN; case PROCESS: final Method reqMethod = Method.getMethod(req.getMethod()); HttpCacheHeaderUtil.setCacheControlHeader(config, resp, reqMethod); // unless we have been explicitly told not to, do cache validation // if we fail cache validation, execute the query if (config.getHttpCachingConfig().isNever304() || !HttpCacheHeaderUtil.doCacheHeaderValidation(solrReq, req, reqMethod, resp)) { SolrQueryResponse solrRsp = new SolrQueryResponse(); /* even for HEAD requests, we need to execute the handler to * ensure we don't get an error (and to make sure the correct * QueryResponseWriter is selected and we get the correct * Content-Type) */ SolrRequestInfo.setRequestInfo(new SolrRequestInfo(solrReq, solrRsp)); execute(solrRsp); HttpCacheHeaderUtil.checkHttpCachingVeto(solrRsp, resp, reqMethod); Iterator<Map.Entry<String, String>> headers = solrRsp.httpHeaders(); while (headers.hasNext()) { Map.Entry<String, String> entry = headers.next(); resp.addHeader(entry.getKey(), entry.getValue()); } QueryResponseWriter responseWriter = core.getQueryResponseWriter(solrReq); if (invalidStates != null) solrReq.getContext().put(CloudSolrClient.STATE_VERSION, invalidStates); writeResponse(solrRsp, responseWriter, reqMethod); } return RETURN; default: return action; } } catch (Throwable ex) { sendError(ex); // walk the the entire cause chain to search for an Error Throwable t = ex; while (t != null) { if (t instanceof Error) { if (t != ex) { log.error( "An Error was wrapped in another exception - please report complete stacktrace on SOLR-6161", ex); } throw (Error) t; } t = t.getCause(); } return RETURN; } finally { MDCLoggingContext.clear(); } }
public TopDocs topDocs(int start, int howMany) { try { TopDocs mainDocs = mainCollector.topDocs(0, Math.max(reRankDocs, length)); if (mainDocs.totalHits == 0 || mainDocs.scoreDocs.length == 0) { return mainDocs; } if (boostedPriority != null) { SolrRequestInfo info = SolrRequestInfo.getRequestInfo(); Map requestContext = null; if (info != null) { requestContext = info.getReq().getContext(); } IntIntOpenHashMap boostedDocs = QueryElevationComponent.getBoostDocs( (SolrIndexSearcher) searcher, boostedPriority, requestContext); ScoreDoc[] mainScoreDocs = mainDocs.scoreDocs; ScoreDoc[] reRankScoreDocs = new ScoreDoc[Math.min(mainScoreDocs.length, reRankDocs)]; System.arraycopy(mainScoreDocs, 0, reRankScoreDocs, 0, reRankScoreDocs.length); mainDocs.scoreDocs = reRankScoreDocs; Map<Integer, Float> scoreMap = getScoreMap(mainDocs.scoreDocs, mainDocs.scoreDocs.length); TopDocs rescoredDocs = new QueryRescorer(reRankQuery) { @Override protected float combine( float firstPassScore, boolean secondPassMatches, float secondPassScore) { float score = firstPassScore; if (secondPassMatches) { score += reRankWeight * secondPassScore; } return score; } }.rescore(searcher, mainDocs, mainDocs.scoreDocs.length); Arrays.sort( rescoredDocs.scoreDocs, new BoostedComp(boostedDocs, mainDocs.scoreDocs, rescoredDocs.getMaxScore())); // Lower howMany if we've collected fewer documents. howMany = Math.min(howMany, mainScoreDocs.length); if (howMany == rescoredDocs.scoreDocs.length) { if (scale) { scaleScores(rescoredDocs, scoreMap); } return rescoredDocs; // Just return the rescoredDocs } else if (howMany > rescoredDocs.scoreDocs.length) { // We need to return more then we've reRanked, so create the combined page. ScoreDoc[] scoreDocs = new ScoreDoc[howMany]; System.arraycopy( mainScoreDocs, 0, scoreDocs, 0, scoreDocs.length); // lay down the initial docs System.arraycopy( rescoredDocs.scoreDocs, 0, scoreDocs, 0, rescoredDocs.scoreDocs.length); // overlay the re-ranked docs. rescoredDocs.scoreDocs = scoreDocs; if (scale) { scaleScores(rescoredDocs, scoreMap); } return rescoredDocs; } else { // We've rescored more then we need to return. ScoreDoc[] scoreDocs = new ScoreDoc[howMany]; System.arraycopy(rescoredDocs.scoreDocs, 0, scoreDocs, 0, howMany); rescoredDocs.scoreDocs = scoreDocs; if (scale) { scaleScores(rescoredDocs, scoreMap); } return rescoredDocs; } } else { ScoreDoc[] mainScoreDocs = mainDocs.scoreDocs; /* * Create the array for the reRankScoreDocs. */ ScoreDoc[] reRankScoreDocs = new ScoreDoc[Math.min(mainScoreDocs.length, reRankDocs)]; /* * Copy the initial results into the reRankScoreDocs array. */ System.arraycopy(mainScoreDocs, 0, reRankScoreDocs, 0, reRankScoreDocs.length); mainDocs.scoreDocs = reRankScoreDocs; Map<Integer, Float> scoreMap = getScoreMap(mainDocs.scoreDocs, mainDocs.scoreDocs.length); TopDocs rescoredDocs = new QueryRescorer(reRankQuery) { @Override protected float combine( float firstPassScore, boolean secondPassMatches, float secondPassScore) { float score = firstPassScore; if (secondPassMatches) { score += reRankWeight * secondPassScore; } return score; } }.rescore(searcher, mainDocs, mainDocs.scoreDocs.length); // Lower howMany to return if we've collected fewer documents. howMany = Math.min(howMany, mainScoreDocs.length); if (howMany == rescoredDocs.scoreDocs.length) { if (scale) { scaleScores(rescoredDocs, scoreMap); } return rescoredDocs; // Just return the rescoredDocs } else if (howMany > rescoredDocs.scoreDocs.length) { // We need to return more then we've reRanked, so create the combined page. ScoreDoc[] scoreDocs = new ScoreDoc[howMany]; // lay down the initial docs System.arraycopy(mainScoreDocs, 0, scoreDocs, 0, scoreDocs.length); // overlay the rescoreds docs System.arraycopy( rescoredDocs.scoreDocs, 0, scoreDocs, 0, rescoredDocs.scoreDocs.length); rescoredDocs.scoreDocs = scoreDocs; if (scale) { assert (scoreMap != null); scaleScores(rescoredDocs, scoreMap); } return rescoredDocs; } else { // We've rescored more then we need to return. ScoreDoc[] scoreDocs = new ScoreDoc[howMany]; System.arraycopy(rescoredDocs.scoreDocs, 0, scoreDocs, 0, howMany); rescoredDocs.scoreDocs = scoreDocs; if (scale) { scaleScores(rescoredDocs, scoreMap); } return rescoredDocs; } } } catch (Exception e) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e); } }
public String _format(LoggingEvent event) { String message = (String) event.getMessage(); if (message == null) { message = ""; } StringBuilder sb = new StringBuilder(message.length() + 80); long now = event.timeStamp; long timeFromStart = now - startTime; long timeSinceLast = now - lastTime; lastTime = now; String shortClassName = getShortClassName( event.getLocationInformation().getClassName(), event.getLocationInformation().getMethodName()); /** * * sb.append(timeFromStart).append(' ').append(timeSinceLast); sb.append(' '); * sb.append(record.getSourceClassName()).append('.').append( record.getSourceMethodName()); * sb.append(' '); sb.append(record.getLevel()); * */ SolrRequestInfo requestInfo = SolrRequestInfo.getRequestInfo(); SolrQueryRequest req = requestInfo == null ? null : requestInfo.getReq(); SolrCore core = req == null ? null : req.getCore(); ZkController zkController = null; CoreInfo info = null; if (core != null) { info = coreInfoMap.get(core.hashCode()); if (info == null) { info = new CoreInfo(); info.shortId = "C" + Integer.toString(CoreInfo.maxCoreNum++); coreInfoMap.put(core.hashCode(), info); if (sb.length() == 0) sb.append("ASYNC "); sb.append(" NEW_CORE " + info.shortId); sb.append(" name=" + core.getName()); sb.append(" " + core); } zkController = core.getCoreDescriptor().getCoreContainer().getZkController(); if (zkController != null) { if (info.url == null) { info.url = zkController.getBaseUrl() + "/" + core.getName(); sb.append(" url=" + info.url + " node=" + zkController.getNodeName()); } Map<String, Object> coreProps = getReplicaProps(zkController, core); if (info.coreProps == null || !coreProps.equals(info.coreProps)) { info.coreProps = coreProps; final String corePropsString = "coll:" + core.getCoreDescriptor().getCloudDescriptor().getCollectionName() + " core:" + core.getName() + " props:" + coreProps; sb.append(" " + info.shortId + "_STATE=" + corePropsString); } } } if (sb.length() > 0) sb.append('\n'); sb.append(timeFromStart); // sb.append("\nL").append(record.getSequenceNumber()); // log number is // useful for sequencing when looking at multiple parts of a log file, but // ms since start should be fine. appendThread(sb, event); appendMDC(sb); // todo: should be able to get port from core container for non zk tests if (info != null) { sb.append(' ').append(info.shortId); // core } if (shortClassName.length() > 0) { sb.append(' ').append(shortClassName); } if (event.getLevel() != Level.INFO) { sb.append(' ').append(event.getLevel()); } sb.append(' '); appendMultiLineString(sb, message); ThrowableInformation thInfo = event.getThrowableInformation(); if (thInfo != null) { Throwable th = event.getThrowableInformation().getThrowable(); if (th != null) { sb.append(' '); String err = SolrException.toStr(th); String ignoredMsg = SolrException.doIgnore(th, err); if (ignoredMsg != null) { sb.append(ignoredMsg); } else { sb.append(err); } } } sb.append('\n'); /** * * Isn't core specific... prob better logged from zkController if (info != null) { * ClusterState clusterState = zkController.getClusterState(); if (info.clusterState != * clusterState) { // something has changed in the matrix... sb.append(zkController.getBaseUrl() * + " sees new ClusterState:"); } } * */ return sb.toString(); }
@SuppressWarnings("unchecked") @Override public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException { final FunctionValues targetFuncValues = targetValueSource.getValues(context, readerContext); // The key is a pair of leaf reader with a docId relative to that reader. The value is a Map // from field to Shape. final SolrCache<PerSegCacheKey, Shape> cache = SolrRequestInfo.getRequestInfo() .getReq() .getSearcher() .getCache(CACHE_KEY_PREFIX + fieldName); if (cache == null) { return targetFuncValues; // no caching; no configured cache } return new FunctionValues() { int docId = -1; Shape shape = null; private void setShapeFromDoc(int doc) { if (docId == doc) { return; } docId = doc; // lookup in cache PerSegCacheKey key = new PerSegCacheKey(readerContext.reader().getCoreCacheKey(), doc); shape = cache.get(key); if (shape == null) { shape = (Shape) targetFuncValues.objectVal(doc); if (shape != null) { cache.put(key, shape); } } else { // optimize shape on a cache hit if possible. This must be thread-safe and it is. if (shape instanceof JtsGeometry) { ((JtsGeometry) shape) .index(); // TODO would be nice if some day we didn't have to cast } } } // Use the cache for exists & objectVal; @Override public boolean exists(int doc) { setShapeFromDoc(doc); return shape != null; } @Override public Object objectVal(int doc) { setShapeFromDoc(doc); return shape; } @Override public Explanation explain(int doc) { return targetFuncValues.explain(doc); } @Override public String toString(int doc) { return targetFuncValues.toString(doc); } }; }
public void handleRequest(RequestGetter requestGetter) { MDCLoggingContext.reset(); MDCLoggingContext.setNode(cores); String path = requestGetter.getPath(); solrParams = requestGetter.getSolrParams(); SolrRequestHandler handler = null; String corename = ""; String origCorename = null; try { // set a request timer which can be reused by requests if needed // req.setAttribute(SolrRequestParsers.REQUEST_TIMER_SERVLET_ATTRIBUTE, new RTimer()); // put the core container in request attribute // req.setAttribute("org.apache.solr.CoreContainer", cores); // check for management path String alternate = cores.getManagementPath(); if (alternate != null && path.startsWith(alternate)) { path = path.substring(0, alternate.length()); } // unused feature ? int idx = path.indexOf(':'); if (idx > 0) { // save the portion after the ':' for a 'handler' path parameter path = path.substring(0, idx); } boolean usingAliases = false; List<String> collectionsList = null; // Check for container handlers handler = cores.getRequestHandler(path); if (handler != null) { solrReq = parseSolrQueryRequest(SolrRequestParsers.DEFAULT, requestGetter); handleAdminRequest(handler, solrReq); return; } else { // otherwise, we should find a core from the path idx = path.indexOf("/", 1); if (idx > 1) { // try to get the corename as a request parameter first corename = path.substring(1, idx); // look at aliases if (cores.isZooKeeperAware()) { origCorename = corename; ZkStateReader reader = cores.getZkController().getZkStateReader(); aliases = reader.getAliases(); if (aliases != null && aliases.collectionAliasSize() > 0) { usingAliases = true; String alias = aliases.getCollectionAlias(corename); if (alias != null) { collectionsList = StrUtils.splitSmart(alias, ",", true); corename = collectionsList.get(0); } } } core = cores.getCore(corename); if (core != null) { path = path.substring(idx); } } // add collection name if (core == null && StringUtils.isNotBlank(requestGetter.getCollection())) { corename = requestGetter.getCollection(); core = cores.getCore(corename); } if (core == null) { if (!cores.isZooKeeperAware()) { core = cores.getCore(""); } } } if (core == null && cores.isZooKeeperAware()) { // we couldn't find the core - lets make sure a collection was not specified instead core = getCoreByCollection(cores, corename); if (core != null) { // we found a core, update the path path = path.substring(idx); } // try the default core if (core == null) { core = cores.getCore(""); if (core != null) {} } } // With a valid core... if (core != null) { MDCLoggingContext.setCore(core); final SolrConfig config = core.getSolrConfig(); // get or create/cache the parser for the core SolrRequestParsers parser = config.getRequestParsers(); // Determine the handler from the url path if not set // (we might already have selected the cores handler) if (handler == null && path.length() > 1) { // don't match "" or "/" as valid path handler = core.getRequestHandler(path); if (handler == null) { // may be a restlet path // Handle /schema/* paths via Restlet if (path.equals("/schema") || path.startsWith("/schema/")) { throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "unsupport /schema/**, use http solr"); } } // no handler yet but allowed to handle select; let's check if (handler == null && parser.isHandleSelect()) { if ("/select".equals(path) || "/select/".equals(path)) { solrReq = parseSolrQueryRequest(parser, requestGetter); invalidStates = checkStateIsValid(cores, solrReq.getParams().get(CloudSolrClient.STATE_VERSION)); String qt = solrReq.getParams().get(CommonParams.QT); handler = core.getRequestHandler(qt); if (handler == null) { throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "unknown handler: " + qt); } if (qt != null && qt.startsWith("/") && (handler instanceof ContentStreamHandlerBase)) { // For security reasons it's a bad idea to allow a leading '/', ex: // /select?qt=/update see SOLR-3161 // There was no restriction from Solr 1.4 thru 3.5 and it's not supported for update // handlers. throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Invalid Request Handler ('qt'). Do not use /select to access: " + qt); } } } } // With a valid handler and a valid core... if (handler != null) { // if not a /select, create the request if (solrReq == null) { solrReq = parseSolrQueryRequest(parser, requestGetter); } if (usingAliases) { processAliases(solrReq, aliases, collectionsList); } SolrQueryResponse solrRsp = new SolrQueryResponse(); SolrRequestInfo.setRequestInfo(new SolrRequestInfo(solrReq, solrRsp)); this.execute(handler, solrReq, solrRsp); QueryResponseWriter responseWriter = core.getQueryResponseWriter(solrReq); if (invalidStates != null) solrReq.getContext().put(CloudSolrClient.STATE_VERSION, invalidStates); writeResponse(solrRsp, responseWriter, solrReq); return; // we are done with a valid handler } } logger.debug("no handler or core retrieved for {}, follow through...", path); throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "no handler or core retrieved for " + path); } catch (Throwable ex) { sendError(core, solrReq, ex); // walk the the entire cause chain to search for an Error Throwable t = ex; while (t != null) { if (t instanceof Error) { if (t != ex) { logger.error( "An Error was wrapped in another exception - please report complete stacktrace on SOLR-6161", ex); } throw (Error) t; } t = t.getCause(); } return; } finally { try { if (solrReq != null) { logger.debug("Closing out SolrRequest: {}", solrReq); solrReq.close(); } } finally { try { if (core != null) { core.close(); } } finally { SolrRequestInfo.clearRequestInfo(); } } MDCLoggingContext.clear(); } }