public void testBOMMarkers() throws Exception { final String fileWithBom = "stopwithbom.txt"; SolrResourceLoader loader = new SolrResourceLoader("solr/collection1"); // preliminary sanity check InputStream bomStream = loader.openResource(fileWithBom); try { final byte[] bomExpected = new byte[] {-17, -69, -65}; final byte[] firstBytes = new byte[3]; assertEquals( "Should have been able to read 3 bytes from bomStream", 3, bomStream.read(firstBytes)); assertTrue( "This test only works if " + fileWithBom + " contains a BOM -- it appears someone removed it.", Arrays.equals(bomExpected, firstBytes)); } finally { try { bomStream.close(); } catch (Exception e) { /* IGNORE */ } } // now make sure getLines skips the BOM... List<String> lines = loader.getLines(fileWithBom); assertEquals(1, lines.size()); assertEquals("BOMsAreEvil", lines.get(0)); }
public void testInstanceDir() throws Exception { SolrResourceLoader loader = new SolrResourceLoader(null); String instDir = loader.getInstanceDir(); assertTrue(instDir + " is not equal to " + "solr/", instDir.equals("solr/") == true); loader = new SolrResourceLoader("solr"); instDir = loader.getInstanceDir(); assertTrue( instDir + " is not equal to " + "solr/", instDir.equals("solr" + File.separator) == true); }
/** * Builds a config: * * <p>Note that the 'name' parameter is used to obtain a valid input stream if no valid one is * provided through 'is'. If no valid stream is provided, a valid SolrResourceLoader instance * should be provided through 'loader' so the resource can be opened (@see * SolrResourceLoader#openResource); if no SolrResourceLoader instance is provided, a default one * will be created. * * <p>Consider passing a non-null 'name' parameter in all use-cases since it is used for logging * & exception reporting. * * @param loader the resource loader used to obtain an input stream if 'is' is null * @param name the resource name used if the input stream 'is' is null * @param is the resource as a SAX InputSource * @param prefix an optional prefix that will be preprended to all non-absolute xpath expressions */ public Config( SolrResourceLoader loader, String name, InputSource is, String prefix, boolean substituteProps) throws ParserConfigurationException, IOException, SAXException { if (loader == null) { loader = new SolrResourceLoader(SolrResourceLoader.locateSolrHome()); } this.loader = loader; this.name = name; this.prefix = (prefix != null && !prefix.endsWith("/")) ? prefix + '/' : prefix; try { javax.xml.parsers.DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); if (is == null) { InputStream in = loader.openConfig(name); if (in instanceof ZkSolrResourceLoader.ZkByteArrayInputStream) { zkVersion = ((ZkSolrResourceLoader.ZkByteArrayInputStream) in).getStat().getVersion(); log.info("loaded config {} with version {} ", name, zkVersion); } is = new InputSource(in); is.setSystemId(SystemIdResolver.createSystemIdFromResourceName(name)); } // only enable xinclude, if a SystemId is available if (is.getSystemId() != null) { try { dbf.setXIncludeAware(true); dbf.setNamespaceAware(true); } catch (UnsupportedOperationException e) { log.warn(name + " XML parser doesn't support XInclude option"); } } final DocumentBuilder db = dbf.newDocumentBuilder(); db.setEntityResolver(new SystemIdResolver(loader)); db.setErrorHandler(xmllog); try { doc = db.parse(is); origDoc = copyDoc(doc); } finally { // some XML parsers are broken and don't close the byte stream (but they should according to // spec) IOUtils.closeQuietly(is.getByteStream()); } if (substituteProps) { DOMUtil.substituteProperties(doc, getSubstituteProperties()); } } catch (ParserConfigurationException | SAXException | TransformerException e) { SolrException.log(log, "Exception during parsing file: " + name, e); throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); } }
public void testWrongEncoding() throws Exception { String wrongEncoding = "stopwordsWrongEncoding.txt"; SolrResourceLoader loader = new SolrResourceLoader("solr/collection1"); // ensure we get our exception try { List<String> lines = loader.getLines(wrongEncoding); fail(); } catch (SolrException expected) { assertTrue(expected.getCause() instanceof CharacterCodingException); } }
public static ConfigOverlay getConfigOverlay(SolrResourceLoader loader) { InputStream in = null; InputStreamReader isr = null; try { try { in = loader.openResource(ConfigOverlay.RESOURCE_NAME); } catch (IOException e) { // TODO: we should be explicitly looking for file not found exceptions // and logging if it's not the expected IOException // hopefully no problem, assume no overlay.json file return new ConfigOverlay(Collections.EMPTY_MAP, -1); } int version = 0; // will be always 0 for file based resourceLoader if (in instanceof ZkSolrResourceLoader.ZkByteArrayInputStream) { version = ((ZkSolrResourceLoader.ZkByteArrayInputStream) in).getStat().getVersion(); log.info("config overlay loaded . version : {} ", version); } isr = new InputStreamReader(in, StandardCharsets.UTF_8); Map m = (Map) ObjectBuilder.getVal(new JSONParser(isr)); return new ConfigOverlay(m, version); } catch (Exception e) { throw new SolrException(ErrorCode.SERVER_ERROR, "Error reading config overlay", e); } finally { IOUtils.closeQuietly(isr); IOUtils.closeQuietly(in); } }
@Override public String getDataHome(CoreDescriptor cd) throws IOException { if (hdfsDataDir == null) { throw new SolrException( ErrorCode.SERVER_ERROR, "You must set the " + this.getClass().getSimpleName() + " param " + HDFS_HOME + " for relative dataDir paths to work"); } // by default, we go off the instance directory String path; if (cd.getCloudDescriptor() != null) { path = URLEncoder.encode(cd.getCloudDescriptor().getCollectionName(), "UTF-8") + "/" + URLEncoder.encode(cd.getCloudDescriptor().getCoreNodeName(), "UTF-8"); } else { path = cd.getName(); } return normalize( SolrResourceLoader.normalizeDir( ZkController.trimLeadingAndTrailingSlashes(hdfsDataDir) + "/" + path + "/" + cd.getDataDir())); }
public CoreContainer( NodeConfig config, Properties properties, CoresLocator locator, boolean asyncSolrCoreLoad) { this.loader = config.getSolrResourceLoader(); this.solrHome = loader.getInstancePath().toString(); this.cfg = checkNotNull(config); this.coresLocator = locator; this.containerProperties = new Properties(properties); this.asyncSolrCoreLoad = asyncSolrCoreLoad; }
public static SolrConfig readFromResourceLoader(SolrResourceLoader loader, String name) { try { return new SolrConfig(loader, name, null); } catch (Exception e) { String resource; if (loader instanceof ZkSolrResourceLoader) { resource = name; } else { resource = Paths.get(loader.getConfigDir()).resolve(name).toString(); } throw new SolrException( ErrorCode.SERVER_ERROR, "Error loading solr config from " + resource, e); } }
private void initLibs() { NodeList nodes = (NodeList) evaluate("lib", XPathConstants.NODESET); if (nodes == null || nodes.getLength() == 0) return; log.info("Adding specified lib dirs to ClassLoader"); SolrResourceLoader loader = getResourceLoader(); try { for (int i = 0; i < nodes.getLength(); i++) { Node node = nodes.item(i); String baseDir = DOMUtil.getAttr(node, "dir"); String path = DOMUtil.getAttr(node, "path"); if (null != baseDir) { // :TODO: add support for a simpler 'glob' mutually exclusive of regex String regex = DOMUtil.getAttr(node, "regex"); FileFilter filter = (null == regex) ? null : new RegexFileFilter(regex); loader.addToClassLoader(baseDir, filter, false); } else if (null != path) { final File file = FileUtils.resolvePath(new File(loader.getInstanceDir()), path); loader.addToClassLoader( file.getParent(), new FileFilter() { @Override public boolean accept(File pathname) { return pathname.equals(file); } }, false); } else { throw new RuntimeException("lib: missing mandatory attributes: 'dir' or 'path'"); } } } finally { loader.reloadLuceneSPI(); } }
public void testAwareCompatibility() { SolrResourceLoader loader = new SolrResourceLoader("."); Class clazz = ResourceLoaderAware.class; // Check ResourceLoaderAware valid objects loader.assertAwareCompatibility(clazz, new NGramFilterFactory(new HashMap<String, String>())); loader.assertAwareCompatibility( clazz, new KeywordTokenizerFactory(new HashMap<String, String>())); // Make sure it throws an error for invalid objects Object[] invalid = new Object[] { // new NGramTokenFilter( null ), "hello", new Float(12.3f), new LukeRequestHandler(), new JSONResponseWriter() }; for (Object obj : invalid) { try { loader.assertAwareCompatibility(clazz, obj); Assert.fail("Should be invalid class: " + obj + " FOR " + clazz); } catch (SolrException ex) { } // OK } clazz = SolrCoreAware.class; // Check ResourceLoaderAware valid objects loader.assertAwareCompatibility(clazz, new LukeRequestHandler()); loader.assertAwareCompatibility(clazz, new FacetComponent()); loader.assertAwareCompatibility(clazz, new JSONResponseWriter()); // Make sure it throws an error for invalid objects invalid = new Object[] { new NGramFilterFactory(new HashMap<String, String>()), "hello", new Float(12.3f), new KeywordTokenizerFactory(new HashMap<String, String>()) }; for (Object obj : invalid) { try { loader.assertAwareCompatibility(clazz, obj); Assert.fail("Should be invalid class: " + obj + " FOR " + clazz); } catch (SolrException ex) { } // OK } }
private void initLibs() { NodeList nodes = (NodeList) evaluate("lib", XPathConstants.NODESET); if (nodes == null || nodes.getLength() == 0) return; log.info("Adding specified lib dirs to ClassLoader"); SolrResourceLoader loader = getResourceLoader(); List<URL> urls = new ArrayList<>(); for (int i = 0; i < nodes.getLength(); i++) { Node node = nodes.item(i); String baseDir = DOMUtil.getAttr(node, "dir"); String path = DOMUtil.getAttr(node, PATH); if (null != baseDir) { // :TODO: add support for a simpler 'glob' mutually exclusive of regex Path dir = loader.getInstancePath().resolve(baseDir); String regex = DOMUtil.getAttr(node, "regex"); try { if (regex == null) urls.addAll(SolrResourceLoader.getURLs(dir)); else urls.addAll(SolrResourceLoader.getFilteredURLs(dir, regex)); } catch (IOException e) { log.warn( "Couldn't add files from {} filtered by {} to classpath: {}", dir, regex, e.getMessage()); } } else if (null != path) { final Path dir = loader.getInstancePath().resolve(path); try { urls.add(dir.toUri().toURL()); } catch (MalformedURLException e) { log.warn("Couldn't add file {} to classpath: {}", dir, e.getMessage()); } } else { throw new RuntimeException("lib: missing mandatory attributes: 'dir' or 'path'"); } } if (urls.size() > 0) { loader.addToClassLoader(urls); loader.reloadLuceneSPI(); } }
/** Load the cores defined for this CoreContainer */ public void load() { log.info("Loading cores into CoreContainer [instanceDir={}]", loader.getInstancePath()); // add the sharedLib to the shared resource loader before initializing cfg based plugins String libDir = cfg.getSharedLibDirectory(); if (libDir != null) { Path libPath = loader.getInstancePath().resolve(libDir); try { loader.addToClassLoader(SolrResourceLoader.getURLs(libPath)); loader.reloadLuceneSPI(); } catch (IOException e) { log.warn("Couldn't add files from {} to classpath: {}", libPath, e.getMessage()); } } shardHandlerFactory = ShardHandlerFactory.newInstance(cfg.getShardHandlerFactoryPluginInfo(), loader); updateShardHandler = new UpdateShardHandler(cfg.getUpdateShardHandlerConfig()); solrCores.allocateLazyCores(cfg.getTransientCacheSize(), loader); logging = LogWatcher.newRegisteredLogWatcher(cfg.getLogWatcherConfig(), loader); hostName = cfg.getNodeName(); zkSys.initZooKeeper(this, solrHome, cfg.getCloudConfig()); if (isZooKeeperAware()) pkiAuthenticationPlugin = new PKIAuthenticationPlugin(this, zkSys.getZkController().getNodeName()); ZkStateReader.ConfigData securityConfig = isZooKeeperAware() ? getZkController().getZkStateReader().getSecurityProps(false) : new ZkStateReader.ConfigData(EMPTY_MAP, -1); initializeAuthorizationPlugin((Map<String, Object>) securityConfig.data.get("authorization")); initializeAuthenticationPlugin((Map<String, Object>) securityConfig.data.get("authentication")); this.backupRepoFactory = new BackupRepositoryFactory(cfg.getBackupRepositoryPlugins()); containerHandlers.put(ZK_PATH, new ZookeeperInfoHandler(this)); securityConfHandler = new SecurityConfHandler(this); collectionsHandler = createHandler(cfg.getCollectionsHandlerClass(), CollectionsHandler.class); containerHandlers.put(COLLECTIONS_HANDLER_PATH, collectionsHandler); infoHandler = createHandler(cfg.getInfoHandlerClass(), InfoHandler.class); containerHandlers.put(INFO_HANDLER_PATH, infoHandler); coreAdminHandler = createHandler(cfg.getCoreAdminHandlerClass(), CoreAdminHandler.class); containerHandlers.put(CORES_HANDLER_PATH, coreAdminHandler); configSetsHandler = createHandler(cfg.getConfigSetsHandlerClass(), ConfigSetsHandler.class); containerHandlers.put(CONFIGSETS_HANDLER_PATH, configSetsHandler); containerHandlers.put(AUTHZ_PATH, securityConfHandler); containerHandlers.put(AUTHC_PATH, securityConfHandler); if (pkiAuthenticationPlugin != null) containerHandlers.put( PKIAuthenticationPlugin.PATH, pkiAuthenticationPlugin.getRequestHandler()); coreConfigService = ConfigSetService.createConfigSetService(cfg, loader, zkSys.zkController); containerProperties.putAll(cfg.getSolrProperties()); // setup executor to load cores in parallel ExecutorService coreLoadExecutor = ExecutorUtil.newMDCAwareFixedThreadPool( cfg.getCoreLoadThreadCount( isZooKeeperAware() ? DEFAULT_CORE_LOAD_THREADS_IN_CLOUD : DEFAULT_CORE_LOAD_THREADS), new DefaultSolrThreadFactory("coreLoadExecutor")); final List<Future<SolrCore>> futures = new ArrayList<>(); try { List<CoreDescriptor> cds = coresLocator.discover(this); if (isZooKeeperAware()) { // sort the cores if it is in SolrCloud. In standalone node the order does not matter CoreSorter coreComparator = new CoreSorter().init(this); cds = new ArrayList<>(cds); // make a copy Collections.sort(cds, coreComparator::compare); } checkForDuplicateCoreNames(cds); for (final CoreDescriptor cd : cds) { if (cd.isTransient() || !cd.isLoadOnStartup()) { solrCores.putDynamicDescriptor(cd.getName(), cd); } else if (asyncSolrCoreLoad) { solrCores.markCoreAsLoading(cd); } if (cd.isLoadOnStartup()) { futures.add( coreLoadExecutor.submit( () -> { SolrCore core; try { if (zkSys.getZkController() != null) { zkSys.getZkController().throwErrorIfReplicaReplaced(cd); } core = create(cd, false); } finally { if (asyncSolrCoreLoad) { solrCores.markCoreAsNotLoading(cd); } } try { zkSys.registerInZk(core, true); } catch (RuntimeException e) { SolrException.log(log, "Error registering SolrCore", e); } return core; })); } } // Start the background thread backgroundCloser = new CloserThread(this, solrCores, cfg); backgroundCloser.start(); } finally { if (asyncSolrCoreLoad && futures != null) { coreContainerWorkExecutor.submit( (Runnable) () -> { try { for (Future<SolrCore> future : futures) { try { future.get(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } catch (ExecutionException e) { log.error("Error waiting for SolrCore to be created", e); } } } finally { ExecutorUtil.shutdownAndAwaitTermination(coreLoadExecutor); } }); } else { ExecutorUtil.shutdownAndAwaitTermination(coreLoadExecutor); } } if (isZooKeeperAware()) { zkSys.getZkController().checkOverseerDesignate(); } }
/** * Create a new CoreContainer using the given SolrResourceLoader. The container's cores are not * loaded. * * @param loader the SolrResourceLoader * @see #load() */ public CoreContainer(SolrResourceLoader loader) { this(SolrXmlConfig.fromSolrHome(loader, loader.getInstancePath())); }
/** * Create a new CoreContainer using system properties to detect the solr home directory. The * container's cores are not loaded. * * @see #load() */ public CoreContainer() { this(new SolrResourceLoader(SolrResourceLoader.locateSolrHome())); }
protected <T> T createHandler(String handlerClass, Class<T> clazz) { return loader.newInstance( handlerClass, clazz, null, new Class[] {CoreContainer.class}, new Object[] {this}); }
protected Properties getSubstituteProperties() { return loader.getCoreProperties(); }