private void handleAdmin(SolrQueryRequest req, SolrQueryResponse rsp, SolrParams params) { String action = params.get("action"); if ("stop".equalsIgnoreCase(action)) { String id = params.get("id"); DaemonStream d = daemons.get(id); if (d != null) { d.close(); rsp.add("result-set", new DaemonResponseStream("Deamon:" + id + " stopped on " + coreName)); } else { rsp.add( "result-set", new DaemonResponseStream("Deamon:" + id + " not found on " + coreName)); } } else if ("start".equalsIgnoreCase(action)) { String id = params.get("id"); DaemonStream d = daemons.get(id); d.open(); rsp.add("result-set", new DaemonResponseStream("Deamon:" + id + " started on " + coreName)); } else if ("list".equalsIgnoreCase(action)) { Collection<DaemonStream> vals = daemons.values(); rsp.add("result-set", new DaemonCollectionStream(vals)); } else if ("kill".equalsIgnoreCase(action)) { String id = params.get("id"); DaemonStream d = daemons.remove(id); if (d != null) { d.close(); } rsp.add("result-set", new DaemonResponseStream("Deamon:" + id + " killed on " + coreName)); } }
private void extractRemotePath(String corename, String origCorename, int idx) throws UnsupportedEncodingException, KeeperException, InterruptedException { if (core == null && idx > 0) { coreUrl = getRemotCoreUrl(corename, origCorename); // don't proxy for internal update requests invalidStates = checkStateIsValid(queryParams.get(CloudSolrClient.STATE_VERSION)); if (coreUrl != null && queryParams.get(DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM) == null) { path = path.substring(idx); if (invalidStates != null) { // it does not make sense to send the request to a remote node throw new SolrException( SolrException.ErrorCode.INVALID_STATE, new String(Utils.toJSON(invalidStates), org.apache.lucene.util.IOUtils.UTF_8)); } action = REMOTEQUERY; } else { if (!retry) { // we couldn't find a core to work with, try reloading aliases // TODO: it would be nice if admin ui elements skipped this... ZkStateReader reader = cores.getZkController().getZkStateReader(); reader.updateAliases(); action = RETRY; } } } }
@Test public void testGroupCollapseFilterFieldAllFiltered() throws IOException { mockResponse(true); when(rb.grouping()).thenReturn(true); when(params.getBool(GroupCollapseParams.GROUP_COLLAPSE, false)).thenReturn(true); when(params.get(GroupCollapseParams.GROUP_COLLAPSE_FL)) .thenReturn("price,discount,isCloseout,color,colorFamily"); when(params.get(GroupCollapseParams.GROUP_COLLAPSE_FF)).thenReturn(FIELD_CLOSEOUT); component.process(rb); verify(rb).grouping(); verify(rb).getGroupingSpec(); verify(params).getBool(GroupCollapseParams.GROUP_COLLAPSE, false); verify(params).get(GroupCollapseParams.GROUP_COLLAPSE_FL); verify(params).get(GroupCollapseParams.GROUP_COLLAPSE_FF); verify(params).getParams(GroupCollapseParams.GROUP_COLLAPSE_FQ); verifyNoMoreInteractions(rb); verifyNoMoreInteractions(params); ArgumentCaptor<NamedList> namedListArgument = ArgumentCaptor.forClass(NamedList.class); verify(rsp).add(eq("groups_summary"), namedListArgument.capture()); NamedList groupsSummary = namedListArgument.getValue(); NamedList productId = (NamedList) groupsSummary.get("productId"); assertNotNull(productId); Set<String> colorFamilies = new HashSet<String>(); colorFamilies.add("RedColorFamily"); colorFamilies.add("BlackColorFamily"); verifyProductSummary( (NamedList) productId.get("product1"), 80.0f, 100.0f, 0.0f, 20.0f, 2, colorFamilies); colorFamilies = new HashSet<String>(); colorFamilies.add("OrangeColorFamily"); colorFamilies.add("BrownColorFamily"); verifyProductSummary( (NamedList) productId.get("product2"), 60.0f, 80.0f, 20.0f, 40.0f, 2, colorFamilies); }
private void initKerberos() { String keytabFile = params.get(KERBEROS_KEYTAB, "").trim(); if (keytabFile.length() == 0) { throw new IllegalArgumentException( KERBEROS_KEYTAB + " required because " + KERBEROS_ENABLED + " set to true"); } String principal = params.get(KERBEROS_PRINCIPAL, ""); if (principal.length() == 0) { throw new IllegalArgumentException( KERBEROS_PRINCIPAL + " required because " + KERBEROS_ENABLED + " set to true"); } synchronized (HdfsDirectoryFactory.class) { if (kerberosInit == null) { kerberosInit = new Boolean(true); Configuration conf = new Configuration(); conf.set("hadoop.security.authentication", "kerberos"); // UserGroupInformation.setConfiguration(conf); LOG.info( "Attempting to acquire kerberos ticket with keytab: {}, principal: {} ", keytabFile, principal); // try { // UserGroupInformation.loginUserFromKeytab(principal, keytabFile); // } catch (IOException ioe) { // throw new RuntimeException(ioe); // } LOG.info("Got Kerberos ticket"); } } }
private boolean needsScores(SolrParams params) { String sortSpec = params.get("sort"); if (sortSpec != null && sortSpec.length() != 0) { String[] sorts = sortSpec.split(","); for (String s : sorts) { String parts[] = s.split(" "); if (parts[0].equals("score")) { return true; } } } else { // No sort specified so it defaults to score. return true; } String fl = params.get("fl"); if (fl != null) { String[] fls = fl.split(","); for (String f : fls) { if (f.trim().equals("score")) { return true; } } } if (this.boosted != null) { return true; } return false; }
private void initParameters(SolrParams parameters) { if (parameters != null) { this.setEnabled(parameters.getBool("enabled", true)); this.inputFieldname = parameters.get(INPUT_FIELD_PARAM, DEFAULT_INPUT_FIELDNAME); this.boostFieldname = parameters.get(BOOST_FIELD_PARAM, DEFAULT_BOOST_FIELDNAME); this.boostFilename = parameters.get(BOOST_FILENAME_PARAM); } }
public NamedList buildResponse() { NamedList<Object> response = new SimpleOrderedMap<>(); // determine if we are going index or count sort boolean sort = !TermsParams.TERMS_SORT_INDEX.equals( params.get(TermsParams.TERMS_SORT, TermsParams.TERMS_SORT_COUNT)); // init minimum frequency long freqmin = 1; String s = params.get(TermsParams.TERMS_MINCOUNT); if (s != null) freqmin = Long.parseLong(s); // init maximum frequency, default to max int long freqmax = -1; s = params.get(TermsParams.TERMS_MAXCOUNT); if (s != null) freqmax = Long.parseLong(s); if (freqmax < 0) { freqmax = Long.MAX_VALUE; } // init limit, default to max int long limit = 10; s = params.get(TermsParams.TERMS_LIMIT); if (s != null) limit = Long.parseLong(s); if (limit < 0) { limit = Long.MAX_VALUE; } // loop though each field we want terms from for (String key : fieldmap.keySet()) { NamedList<Number> fieldterms = new SimpleOrderedMap<>(); TermsResponse.Term[] data = null; if (sort) { data = getCountSorted(fieldmap.get(key)); } else { data = getLexSorted(fieldmap.get(key)); } // loop though each term until we hit limit int cnt = 0; for (TermsResponse.Term tc : data) { if (tc.getFrequency() >= freqmin && tc.getFrequency() <= freqmax) { fieldterms.add(tc.getTerm(), num(tc.getFrequency())); cnt++; } if (cnt >= limit) { break; } } response.add(key, fieldterms); } return response; }
@Override public void execute(CoreAdminHandler.CallInfo it) throws Exception { ZkController zkController = it.handler.coreContainer.getZkController(); if (zkController == null) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Internal SolrCloud API"); } final SolrParams params = it.req.getParams(); String cname = params.get(CoreAdminParams.CORE); if (cname == null) { throw new IllegalArgumentException(CoreAdminParams.CORE + " is required"); } String name = params.get(NAME); if (name == null) { throw new IllegalArgumentException(CoreAdminParams.NAME + " is required"); } String repoName = params.get(CoreAdminParams.BACKUP_REPOSITORY); BackupRepository repository = it.handler.coreContainer.newBackupRepository(Optional.ofNullable(repoName)); String location = repository.getBackupLocation(params.get(CoreAdminParams.BACKUP_LOCATION)); if (location == null) { throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "'location' is not specified as a query" + " parameter or as a default repository property"); } // An optional parameter to describe the snapshot to be backed-up. If this // parameter is not supplied, the latest index commit is backed-up. String commitName = params.get(CoreAdminParams.COMMIT_NAME); URI locationUri = repository.createURI(location); try (SolrCore core = it.handler.coreContainer.getCore(cname)) { SnapShooter snapShooter = new SnapShooter(repository, core, locationUri, name, commitName); // validateCreateSnapshot will create parent dirs instead of throw; that choice is dubious. // But we want to throw. One reason is that // this dir really should, in fact must, already exist here if triggered via a collection // backup on a shared // file system. Otherwise, perhaps the FS location isn't shared -- we want an error. if (!snapShooter.getBackupRepository().exists(snapShooter.getLocation())) { throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Directory to contain snapshots doesn't exist: " + snapShooter.getLocation()); } snapShooter.validateCreateSnapshot(); snapShooter.createSnapshot(); } catch (Exception e) { throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "Failed to backup core=" + cname + " because " + e, e); } }
/** * Handle "SWAP" action * * @param req * @param rsp * @return true if a modification has resulted that requires persistance of the CoreContainer * configuration. */ protected boolean handleSwapAction(SolrQueryRequest req, SolrQueryResponse rsp) { final SolrParams params = req.getParams(); final SolrParams required = params.required(); final String cname = params.get(CoreAdminParams.CORE); boolean doPersist = params.getBool(CoreAdminParams.PERSISTENT, coreContainer.isPersistent()); String other = required.get(CoreAdminParams.OTHER); coreContainer.swap(cname, other); return doPersist; }
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { SolrParams params = req.getParams(); params = adjustParams(params); req.setParams(params); if (params.get("action") != null) { handleAdmin(req, rsp, params); return; } TupleStream tupleStream; try { tupleStream = this.streamFactory.constructStream(params.get("expr")); } catch (Exception e) { // Catch exceptions that occur while the stream is being created. This will include streaming // expression parse rules. SolrException.log(logger, e); rsp.add("result-set", new DummyErrorStream(e)); return; } int worker = params.getInt("workerID", 0); int numWorkers = params.getInt("numWorkers", 1); StreamContext context = new StreamContext(); context.workerID = worker; context.numWorkers = numWorkers; context.setSolrClientCache(clientCache); context.setModelCache(modelCache); context.put("core", this.coreName); context.put("solr-core", req.getCore()); tupleStream.setStreamContext(context); // if asking for explanation then go get it if (params.getBool("explain", false)) { rsp.add("explanation", tupleStream.toExplanation(this.streamFactory)); } if (tupleStream instanceof DaemonStream) { DaemonStream daemonStream = (DaemonStream) tupleStream; if (daemons.containsKey(daemonStream.getId())) { daemons.remove(daemonStream.getId()).close(); } daemonStream.setDaemons(daemons); daemonStream.open(); // This will start the deamonStream daemons.put(daemonStream.getId(), daemonStream); rsp.add( "result-set", new DaemonResponseStream("Deamon:" + daemonStream.getId() + " started on " + coreName)); } else { rsp.add("result-set", new TimerStream(new ExceptionStream(tupleStream))); } }
@Test public void withFieldAliasesWhenNoSupportedLocales() { Map<String, String> parameters = new HashMap<String, String>(); parameters.put("qf", "comment^0.40"); parameters.put("xwiki.multilingualFields", "title, comment"); SolrParams paramsWithAliases = plugin.withFieldAliases("title:text", new MapSolrParams(parameters)); // Aliases for the ROOT locale. assertEquals("title__", paramsWithAliases.get("f.title.qf")); assertEquals("comment__", paramsWithAliases.get("f.comment.qf")); }
/** * Match up search results and add corresponding data for each result (if we have query results * available). */ @Override @SuppressWarnings({"rawtypes", "unchecked"}) public void process(ResponseBuilder rb) throws IOException { SolrParams params = rb.req.getParams(); if (!params.getBool(getName(), false)) { return; } XJoinResults<?> results = (XJoinResults<?>) rb.req.getContext().get(getResultsTag()); if (results == null || rb.getResults() == null) { return; } // general results FieldAppender appender = new FieldAppender( (String) params.get(getName() + "." + XJoinParameters.RESULTS_FIELD_LIST, "*")); NamedList general = appender.addNamedList(rb.rsp.getValues(), getName(), results); // per join id results FieldAppender docAppender = new FieldAppender( (String) params.get(getName() + "." + XJoinParameters.DOC_FIELD_LIST, "*")); Set<String> joinFields = new HashSet<>(); joinFields.add(joinField); List<String> joinIds = new ArrayList<>(); for (Iterator<Integer> it = docIterator(rb); it.hasNext(); ) { StoredDocument doc = rb.req.getSearcher().doc(it.next(), joinFields); for (String joinId : doc.getValues(joinField)) { if (!joinIds.contains(joinId)) { joinIds.add(joinId); } } } for (String joinId : joinIds) { Object object = results.getResult(joinId); if (object == null) continue; NamedList external = new NamedList<>(); general.add("external", external); external.add("joinId", joinId); if (object instanceof Iterable) { for (Object item : (Iterable) object) { docAppender.addNamedList(external, "doc", item); } } else { docAppender.addNamedList(external, "doc", object); } } }
@Test public void withFieldAliasesWhenNoMultilingualFields() { Map<String, String> parameters = new HashMap<String, String>(); parameters.put("qf", "title^0.4 comment^0.40 date^1.0"); parameters.put("xwiki.supportedLocales", "en, ro"); SolrParams paramsWithAliases = plugin.withFieldAliases("title:text", new MapSolrParams(parameters)); // The existing parameters should have been preserved. assertEquals(2, paramsWithAliases.toNamedList().size()); assertEquals("title^0.4 comment^0.40 date^1.0", paramsWithAliases.get("qf")); assertEquals("en, ro", paramsWithAliases.get("xwiki.supportedLocales")); }
/** Generate external process results (if they have not already been generated). */ @Override public void prepare(ResponseBuilder rb) throws IOException { SolrParams params = rb.req.getParams(); if (!params.getBool(getName(), false)) { return; } XJoinResults<?> results = (XJoinResults<?>) rb.req.getContext().get(getResultsTag()); if (results != null) { return; } // generate external process results, by passing 'external' prefixed parameters // from the query string to our factory String prefix = getName() + "." + XJoinParameters.EXTERNAL_PREFIX + "."; ModifiableSolrParams externalParams = new ModifiableSolrParams(); for (Iterator<String> it = params.getParameterNamesIterator(); it.hasNext(); ) { String name = it.next(); if (name.startsWith(prefix)) { externalParams.set(name.substring(prefix.length()), params.get(name)); } } results = factory.getResults(externalParams); rb.req.getContext().put(getResultsTag(), results); }
// get the elevation map from the data dir Map<String, ElevationObj> getElevationMap(IndexReader reader, SolrCore core) throws Exception { synchronized (elevationCache) { Map<String, ElevationObj> map = elevationCache.get(null); if (map != null) return map; map = elevationCache.get(reader); if (map == null) { String f = initArgs.get(CONFIG_FILE); if (f == null) { throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "QueryElevationComponent must specify argument: " + CONFIG_FILE); } log.info("Loading QueryElevation from data dir: " + f); Config cfg; ZkController zkController = core.getCoreDescriptor().getCoreContainer().getZkController(); if (zkController != null) { cfg = new Config(core.getResourceLoader(), f, null, null); } else { InputStream is = VersionedFile.getLatestFile(core.getDataDir(), f); cfg = new Config(core.getResourceLoader(), f, new InputSource(is), null); } map = loadElevationMap(cfg); elevationCache.put(reader, map); } return map; } }
public void processGetVersions(ResponseBuilder rb) throws IOException { SolrQueryRequest req = rb.req; SolrQueryResponse rsp = rb.rsp; SolrParams params = req.getParams(); if (!params.getBool(COMPONENT_NAME, true)) { return; } int nVersions = params.getInt("getVersions", -1); if (nVersions == -1) return; String sync = params.get("sync"); if (sync != null) { processSync(rb, nVersions, sync); return; } UpdateLog ulog = req.getCore().getUpdateHandler().getUpdateLog(); if (ulog == null) return; UpdateLog.RecentUpdates recentUpdates = ulog.getRecentUpdates(); try { rb.rsp.add("versions", recentUpdates.getVersions(nVersions)); } finally { recentUpdates.close(); // cache this somehow? } }
@Test public void testStandardParseParamsAndFillStreams() throws Exception { ArrayList<ContentStream> streams = new ArrayList<ContentStream>(); Map<String, String[]> params = new HashMap<String, String[]>(); params.put("q", new String[] {"hello"}); // Set up the expected behavior String[] ct = new String[] { "application/x-www-form-urlencoded", "Application/x-www-form-urlencoded", "application/x-www-form-urlencoded; charset=utf-8", "application/x-www-form-urlencoded;" }; for (String contentType : ct) { HttpServletRequest request = createMock(HttpServletRequest.class); expect(request.getMethod()).andReturn("POST").anyTimes(); expect(request.getContentType()).andReturn(contentType).anyTimes(); expect(request.getParameterMap()).andReturn(params).anyTimes(); replay(request); MultipartRequestParser multipart = new MultipartRequestParser(1000000); RawRequestParser raw = new RawRequestParser(); StandardRequestParser standard = new StandardRequestParser(multipart, raw); SolrParams p = standard.parseParamsAndFillStreams(request, streams); assertEquals("contentType: " + contentType, "hello", p.get("q")); } }
private ShardRequest createShardQuery(SolrParams params) { ShardRequest sreq = new ShardRequest(); sreq.purpose = ShardRequest.PURPOSE_GET_TERMS; // base shard request on original parameters sreq.params = new ModifiableSolrParams(params); // don't pass through the shards param sreq.params.remove(ShardParams.SHARDS); // remove any limits for shards, we want them to return all possible // responses // we want this so we can calculate the correct counts // dont sort by count to avoid that unnecessary overhead on the shards sreq.params.remove(TermsParams.TERMS_MAXCOUNT); sreq.params.remove(TermsParams.TERMS_MINCOUNT); sreq.params.set(TermsParams.TERMS_LIMIT, -1); sreq.params.set(TermsParams.TERMS_SORT, TermsParams.TERMS_SORT_INDEX); // TODO: is there a better way to handle this? String qt = params.get(CommonParams.QT); if (qt != null) { sreq.params.add(CommonParams.QT, qt); } return sreq; }
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { SolrParams params = req.getParams(); params = adjustParams(params); req.setParams(params); TupleStream tupleStream = null; try { tupleStream = this.streamFactory.constructStream(params.get("expr")); } catch (Exception e) { // Catch exceptions that occur while the stream is being created. This will include streaming // expression parse rules. SolrException.log(logger, e); rsp.add("result-set", new DummyErrorStream(e)); return; } int worker = params.getInt("workerID", 0); int numWorkers = params.getInt("numWorkers", 1); StreamContext context = new StreamContext(); context.workerID = worker; context.numWorkers = numWorkers; context.setSolrClientCache(clientCache); tupleStream.setStreamContext(context); rsp.add("result-set", new TimerStream(new ExceptionStream(tupleStream))); }
/** * Handle "UNLOAD" Action * * @param req * @param rsp * @return true if a modification has resulted that requires persistance of the CoreContainer * configuration. */ protected boolean handleUnloadAction(SolrQueryRequest req, SolrQueryResponse rsp) throws SolrException { SolrParams params = req.getParams(); String cname = params.get(CoreAdminParams.CORE); SolrCore core = coreContainer.remove(cname); if (core == null) { throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "No such core exists '" + cname + "'"); } if (params.getBool(CoreAdminParams.DELETE_INDEX, false)) { core.addCloseHook( new CloseHook() { @Override public void preClose(SolrCore core) {} @Override public void postClose(SolrCore core) { File dataDir = new File(core.getIndexDir()); for (File file : dataDir.listFiles()) { if (!file.delete()) { log.error(file.getAbsolutePath() + " could not be deleted on core unload"); } } if (!dataDir.delete()) log.error(dataDir.getAbsolutePath() + " could not be deleted on core unload"); } }); } core.close(); return coreContainer.isPersistent(); }
static void setWt(SolrQueryRequest req, String wt) { SolrParams params = req.getParams(); if (params.get(CommonParams.WT) != null) return; // wt is set by user Map<String, String> map = new HashMap<>(1); map.put(CommonParams.WT, wt); map.put("indent", "true"); req.setParams(SolrParams.wrapDefaults(params, new MapSolrParams(map))); }
/** * Handle "ALIAS" action * * @param req * @param rsp * @return true if a modification has resulted that requires persistance of the CoreContainer * configuration. */ @Deprecated protected boolean handleAliasAction(SolrQueryRequest req, SolrQueryResponse rsp) { SolrParams params = req.getParams(); String name = params.get(CoreAdminParams.OTHER); String cname = params.get(CoreAdminParams.CORE); boolean doPersist = false; if (cname.equals(name)) return doPersist; SolrCore core = coreContainer.getCore(cname); if (core != null) { doPersist = coreContainer.isPersistent(); coreContainer.register(name, core, false); // no core.close() since each entry in the cores map should increase the ref } return doPersist; }
@Test public void withFieldAliases() { Map<String, String> parameters = new HashMap<String, String>(); parameters.put("qf", "title^0.4 comment^0.40 date^1.0"); parameters.put("xwiki.multilingualFields", "title, property.*, foo, comment"); parameters.put("xwiki.supportedLocales", "en, fr, zh_TW"); parameters.put("xwiki.typedDynamicFields", "property.*"); parameters.put("xwiki.dynamicFieldTypes", "boolean, int"); String query = "title:text AND x:y AND property.Blog.BlogPostClass.summary:wiki AND title_ro:value"; SolrParams paramsWithAliases = plugin.withFieldAliases(query, new MapSolrParams(parameters)); assertEquals("title__ title_en title_fr title_zh_TW", paramsWithAliases.get("f.title.qf")); assertEquals( "property.Blog.BlogPostClass.summary__ property.Blog.BlogPostClass.summary_en " + "property.Blog.BlogPostClass.summary_fr property.Blog.BlogPostClass.summary_zh_TW " + "property.Blog.BlogPostClass.summary_boolean property.Blog.BlogPostClass.summary_int", paramsWithAliases.get("f.property.Blog.BlogPostClass.summary.qf")); // Event if this field doesn't appear in the query, it's a default field so it has to have the // alias. assertEquals( "comment__ comment_en comment_fr comment_zh_TW", paramsWithAliases.get("f.comment.qf")); // These fields are not declared as multilingual. assertNull(paramsWithAliases.get("f.x.qf")); assertNull(paramsWithAliases.get("f.title_ro.qf")); // This is a default field but it's not declared as multilingual. assertNull(paramsWithAliases.get("f.date.qf")); // This multilingual field doesn't appear in the query and it's not a default field either. assertNull(paramsWithAliases.get("f.foo.qf")); }
protected ParsedParams parseParams(String type, String param) throws SyntaxError, IOException { SolrParams localParams = QueryParsing.getLocalParams(param, req.getParams()); DocSet docs = docsOrig; String facetValue = param; String key = param; List<String> tags = Collections.emptyList(); int threads = -1; if (localParams == null) { SolrParams params = global; SolrParams required = new RequiredSolrParams(params); return new ParsedParams(localParams, params, required, facetValue, docs, key, tags, threads); } SolrParams params = SolrParams.wrapDefaults(localParams, global); SolrParams required = new RequiredSolrParams(params); // remove local params unless it's a query if (type != FacetParams.FACET_QUERY) { // TODO Cut over to an Enum here facetValue = localParams.get(CommonParams.VALUE); } // reset set the default key now that localParams have been removed key = facetValue; // allow explicit set of the key key = localParams.get(CommonParams.OUTPUT_KEY, key); String tagStr = localParams.get(CommonParams.TAG); tags = tagStr == null ? Collections.<String>emptyList() : StrUtils.splitSmart(tagStr, ','); String threadStr = localParams.get(CommonParams.THREADS); if (threadStr != null) { threads = Integer.parseInt(threadStr); } // figure out if we need a new base DocSet String excludeStr = localParams.get(CommonParams.EXCLUDE); if (excludeStr == null) return new ParsedParams(localParams, params, required, facetValue, docs, key, tags, threads); List<String> excludeTagList = StrUtils.splitSmart(excludeStr, ','); docs = computeDocSet(docs, excludeTagList); return new ParsedParams(localParams, params, required, facetValue, docs, key, tags, threads); }
@Override public NamedList<Object> request(SolrRequest request) throws SolrServerException, IOException { SolrParams reqParams = request.getParams(); String collection = (reqParams != null) ? reqParams.get("collection", getDefaultCollection()) : getDefaultCollection(); return requestWithRetryOnStaleState(request, 0, collection); }
@Override public void prepare(ResponseBuilder rb) throws IOException { SolrParams params = rb.req.getParams(); if (params.getBool(TermsParams.TERMS, false)) { rb.doTerms = true; } // TODO: temporary... this should go in a different component. String shards = params.get(ShardParams.SHARDS); if (shards != null) { if (params.get(ShardParams.SHARDS_QT) == null) { throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "No shards.qt parameter specified"); } List<String> lst = StrUtils.splitSmart(shards, ",", true); rb.shards = lst.toArray(new String[lst.size()]); } }
/** * Handle "RENAME" Action * * @param req * @param rsp * @return true if a modification has resulted that requires persistance of the CoreContainer * configuration. * @throws SolrException */ protected boolean handleRenameAction(SolrQueryRequest req, SolrQueryResponse rsp) throws SolrException { SolrParams params = req.getParams(); String name = params.get(CoreAdminParams.OTHER); String cname = params.get(CoreAdminParams.CORE); boolean doPersist = false; if (cname.equals(name)) return doPersist; SolrCore core = coreContainer.getCore(cname); if (core != null) { doPersist = coreContainer.isPersistent(); coreContainer.register(name, core, false); coreContainer.remove(cname); core.close(); } return doPersist; }
/** * Handler "RELOAD" action * * @param req * @param rsp * @return true if a modification has resulted that requires persistance of the CoreContainer * configuration. */ protected boolean handleReloadAction(SolrQueryRequest req, SolrQueryResponse rsp) { SolrParams params = req.getParams(); String cname = params.get(CoreAdminParams.CORE); try { coreContainer.reload(cname); return false; // no change on reload } catch (Exception ex) { throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "Error handling 'reload' action", ex); } }
public void processGetUpdates(ResponseBuilder rb) throws IOException { SolrQueryRequest req = rb.req; SolrQueryResponse rsp = rb.rsp; SolrParams params = req.getParams(); if (!params.getBool(COMPONENT_NAME, true)) { return; } String versionsStr = params.get("getUpdates"); if (versionsStr == null) return; UpdateLog ulog = req.getCore().getUpdateHandler().getUpdateLog(); if (ulog == null) return; List<String> versions = StrUtils.splitSmart(versionsStr, ",", true); List<Object> updates = new ArrayList<Object>(versions.size()); long minVersion = Long.MAX_VALUE; // TODO: get this from cache instead of rebuilding? UpdateLog.RecentUpdates recentUpdates = ulog.getRecentUpdates(); try { for (String versionStr : versions) { long version = Long.parseLong(versionStr); try { Object o = recentUpdates.lookup(version); if (o == null) continue; if (version > 0) { minVersion = Math.min(minVersion, version); } // TODO: do any kind of validation here? updates.add(o); } catch (SolrException e) { log.warn("Exception reading log for updates", e); } catch (ClassCastException e) { log.warn("Exception reading log for updates", e); } } // Must return all delete-by-query commands that occur after the first add requested // since they may apply. updates.addAll(recentUpdates.getDeleteByQuery(minVersion)); rb.rsp.add("updates", updates); } finally { recentUpdates.close(); // cache this somehow? } }
/** * For example: * * <p>String json = solr.request( "/select?qt=dismax&wt=json&q=...", null ); String xml = * solr.request( "/update", "<add><doc><field ..." ); */ public String request(String pathAndParams, String body) throws Exception { String path = null; SolrParams params = null; int idx = pathAndParams.indexOf('?'); if (idx > 0) { path = pathAndParams.substring(0, idx); params = SolrRequestParsers.parseQueryString(pathAndParams.substring(idx + 1)); } else { path = pathAndParams; params = new MapSolrParams(new HashMap<String, String>()); } // Extract the handler from the path or params SolrRequestHandler handler = core.getRequestHandler(path); if (handler == null) { if ("/select".equals(path) || "/select/".equalsIgnoreCase(path)) { String qt = params.get(CommonParams.QT); handler = core.getRequestHandler(qt); if (handler == null) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "unknown handler: " + qt); } } } if (handler == null) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "unknown handler: " + path); } // Make a stream for the 'body' content List<ContentStream> streams = new ArrayList<ContentStream>(1); if (body != null && body.length() > 0) { streams.add(new ContentStreamBase.StringStream(body)); } SolrQueryRequest req = null; try { req = parser.buildRequestFrom(core, params, streams); SolrQueryResponse rsp = new SolrQueryResponse(); core.execute(handler, req, rsp); if (rsp.getException() != null) { throw rsp.getException(); } // Now write it out QueryResponseWriter responseWriter = core.getQueryResponseWriter(req); StringWriter out = new StringWriter(); responseWriter.write(out, req, rsp); return out.toString(); } finally { if (req != null) { req.close(); } } }