public void inform(SolrCore core) {
   if (initParams != null) {
     log.info("Initializing Clustering Engines");
     boolean searchHasDefault = false;
     boolean documentHasDefault = false;
     for (int i = 0; i < initParams.size(); i++) {
       if (initParams.getName(i).equals("engine")) {
         NamedList engineNL = (NamedList) initParams.getVal(i);
         String className = (String) engineNL.get("classname");
         if (className == null) {
           className = CarrotClusteringEngine.class.getName();
         }
         SolrResourceLoader loader = core.getResourceLoader();
         ClusteringEngine clusterer = (ClusteringEngine) loader.newInstance(className);
         if (clusterer != null) {
           String name = clusterer.init(engineNL, core);
           if (name != null) {
             boolean isDefault = name.equals(ClusteringEngine.DEFAULT_ENGINE_NAME);
             if (clusterer instanceof SearchClusteringEngine) {
               if (isDefault == true && searchHasDefault == false) {
                 searchHasDefault = true;
               } else if (isDefault == true && searchHasDefault == true) {
                 throw new RuntimeException("More than one engine is missing name: " + engineNL);
               }
               searchClusteringEngines.put(name, (SearchClusteringEngine) clusterer);
             } else if (clusterer instanceof DocumentClusteringEngine) {
               if (isDefault == true && documentHasDefault == false) {
                 searchHasDefault = true;
               } else if (isDefault == true && documentHasDefault == true) {
                 throw new RuntimeException("More than one engine is missing name: " + engineNL);
               }
               documentClusteringEngines.put(name, (DocumentClusteringEngine) clusterer);
             }
           } else {
             if (clusterer instanceof SearchClusteringEngine && searchHasDefault == false) {
               searchClusteringEngines.put(
                   ClusteringEngine.DEFAULT_ENGINE_NAME, (SearchClusteringEngine) clusterer);
               searchHasDefault = true;
             } else if (clusterer instanceof DocumentClusteringEngine
                 && documentHasDefault == false) {
               documentClusteringEngines.put(
                   ClusteringEngine.DEFAULT_ENGINE_NAME, (DocumentClusteringEngine) clusterer);
               documentHasDefault = true;
             } else {
               throw new RuntimeException("More than one engine is missing name: " + engineNL);
             }
           }
         }
       }
     }
     log.info("Finished Initializing Clustering Engines");
   }
 }
Beispiel #2
0
 /**
  * Constructs a schema using the specified resource name and stream. If the is stream is null, the
  * resource loader will load the schema resource by name.
  *
  * @throws FileNotFoundException
  * @see SolrResourceLoader#openSchema By default, this follows the normal config path directory
  *     searching rules.
  * @see Config#openResource
  */
 public SubIndexSchema(final String path, final Version luceneMatchVersion)
     throws FileNotFoundException {
   this.luceneMatchVersion = luceneMatchVersion;
   // important - Get the class loader of one of the SIREn class as parent
   // class loader, otehrwise it is not possible to find the SIREn classes
   loader = new SolrResourceLoader(null, SubTextField.class.getClassLoader());
   final InputStream lis = loader.openSchema(path);
   this.readSchema(lis);
   // important - allows filters to complete loading.
   // For example, StopWordFilterFactory will load the stopwords list at this
   // point
   loader.inform(loader);
 }
  /**
   * Creates a new StorageIO instance for a Solr core, taking into account whether the core is
   * running in cloud mode as well as initArgs.
   */
  public static StorageIO newStorageIO(
      String collection, SolrResourceLoader resourceLoader, NamedList<String> initArgs) {
    StorageIO storageIO = null;

    SolrZkClient zkClient = null;
    String zkConfigName = null;
    if (resourceLoader instanceof ZkSolrResourceLoader) {
      zkClient = ((ZkSolrResourceLoader) resourceLoader).getZkController().getZkClient();
      try {
        zkConfigName =
            ((ZkSolrResourceLoader) resourceLoader)
                .getZkController()
                .getZkStateReader()
                .readConfigName(collection);
      } catch (Exception e) {
        log.error(
            "Failed to get config name for collection {} due to: {}", collection, e.toString());
      }
      if (zkConfigName == null) {
        throw new SolrException(
            ErrorCode.SERVER_ERROR, "Could not find config name for collection:" + collection);
      }
    }

    if (initArgs.get(STORAGE_IO_CLASS_INIT_ARG) != null) {
      storageIO =
          resourceLoader.newInstance(initArgs.get(STORAGE_IO_CLASS_INIT_ARG), StorageIO.class);
    } else {
      if (zkClient != null) {
        String znodeBase = "/configs/" + zkConfigName;
        log.info(
            "Setting up ZooKeeper-based storage for the RestManager with znodeBase: " + znodeBase);
        storageIO = new ManagedResourceStorage.ZooKeeperStorageIO(zkClient, znodeBase);
      } else {
        storageIO = new FileStorageIO();
      }
    }

    if (storageIO instanceof FileStorageIO) {
      // using local fs, if storageDir is not set in the solrconfig.xml, assume the configDir for
      // the core
      if (initArgs.get(STORAGE_DIR_INIT_ARG) == null) {
        initArgs.add(STORAGE_DIR_INIT_ARG, resourceLoader.getConfigDir());
      }
    }

    storageIO.configure(resourceLoader, initArgs);

    return storageIO;
  }
  /**
   * This constructor is designed to make it easy for JNI embedded applications to setup the entire
   * solr environment with a simple interface. It takes three parameters: <code>instanceDir:</code>
   * The solr instance directory. If null, it will check the standard places first
   * (JNDI,properties,"solr" directory) <code>dataDir:</code> where the index is stored. <code>
   * loggingPath:</code> Path to a java.util.logging.config.file. If the path represents an absolute
   * path or is relative to the CWD, it will use that. Next it will try a path relative to the
   * instanceDir. If none of these files exist, it will error.
   */
  public DirectSolrConnection(String instanceDir, String dataDir, String loggingPath) {
    // If a loggingPath is specified, try using that (this needs to happen first)
    if (loggingPath != null) {
      File loggingConfig = new File(loggingPath);
      if (!loggingConfig.exists() && instanceDir != null) {
        loggingConfig = new File(new File(instanceDir), loggingPath);
      }
      if (loggingConfig.exists()) {
        System.setProperty("java.util.logging.config.file", loggingConfig.getAbsolutePath());
      } else {
        throw new SolrException(
            SolrException.ErrorCode.SERVER_ERROR, "can not find logging file: " + loggingConfig);
      }
    }

    if (instanceDir == null) {
      instanceDir = SolrResourceLoader.locateInstanceDir();
    }

    // Initialize
    try {
      CoreContainer cores = new CoreContainer(new SolrResourceLoader(instanceDir));
      SolrConfig solrConfig = new SolrConfig(instanceDir, SolrConfig.DEFAULT_CONF_FILE, null);
      CoreDescriptor dcore =
          new CoreDescriptor(cores, "", solrConfig.getResourceLoader().getInstanceDir());
      IndexSchema indexSchema = new IndexSchema(solrConfig, instanceDir + "/conf/schema.xml", null);
      core = new SolrCore(null, dataDir, solrConfig, indexSchema, dcore);
      cores.register("", core, false);
      parser = new SolrRequestParsers(solrConfig);
    } catch (Exception ee) {
      throw new RuntimeException(ee);
    }
  }
Beispiel #5
0
  public static MMSegDictionary getDict(String dicPath, ResourceLoader loader) {
    MMSegDictionary dic = null;
    if (dicPath != null) {
      File f = new File(dicPath);
      if (!f.isAbsolute() && loader instanceof SolrResourceLoader) { // 相对目录
        SolrResourceLoader srl = (SolrResourceLoader) loader;
        dicPath = srl.getInstanceDir() + dicPath;
        f = new File(dicPath);
      }

      dic = MMSegDictionary.getInstance(f);
    } else {
      dic = MMSegDictionary.getInstance();
    }
    return dic;
  }
 // TODO: Remove for 5.x, this should fail when we don't have a real solr.xml file after we take
 // out the remove
 // the hard-coded default from ConifgSolrXmlOld
 @Test
 public void testHardCodedSolrXml() throws IOException {
   SolrResourceLoader loader = null;
   final File solrHome =
       new File(
           TEMP_DIR, SolrXmlInZkTest.getClassName() + File.separator + "testHardCodedSolrXml");
   try {
     loader = new SolrResourceLoader(solrHome.getAbsolutePath());
     ConfigSolr.fromSolrHome(loader, solrHome.getAbsolutePath());
   } catch (Exception e) {
     fail(
         "Should NOT have thrown any exception here, solr.xml should have been received from the hard-coded string");
   } finally {
     loader.close();
   }
 }
  @SuppressWarnings("unchecked")
  @Override
  public void inform(SolrCore core) {
    if (initParams != null) {
      log.info("Initializing Clustering Engines");

      // Our target list of engines, split into search-results and document clustering.
      SolrResourceLoader loader = core.getResourceLoader();

      for (Map.Entry<String, Object> entry : initParams) {
        if ("engine".equals(entry.getKey())) {
          NamedList<Object> engineInitParams = (NamedList<Object>) entry.getValue();

          String engineClassName =
              StringUtils.defaultIfBlank(
                  (String) engineInitParams.get("classname"),
                  CarrotClusteringEngine.class.getName());

          // Instantiate the clustering engine and split to appropriate map.
          final ClusteringEngine engine =
              loader.newInstance(engineClassName, ClusteringEngine.class);
          final String name = StringUtils.defaultIfBlank(engine.init(engineInitParams, core), "");
          final ClusteringEngine previousEntry;
          if (engine instanceof SearchClusteringEngine) {
            previousEntry = searchClusteringEngines.put(name, (SearchClusteringEngine) engine);
          } else if (engine instanceof DocumentClusteringEngine) {
            previousEntry = documentClusteringEngines.put(name, (DocumentClusteringEngine) engine);
          } else {
            log.warn("Unknown type of a clustering engine for class: " + engineClassName);
            continue;
          }
          if (previousEntry != null) {
            log.warn("Duplicate clustering engine component named '" + name + "'.");
          }
        }
      }

      // Set up the default engine key for both types of engines.
      setupDefaultEngine("search results clustering", searchClusteringEngines);
      setupDefaultEngine("document clustering", documentClusteringEngines);

      log.info("Finished Initializing Clustering Engines");
    }
  }
  @SuppressWarnings("unchecked")
  private void loadExternalFileDictionary(IndexSchema schema, SolrResourceLoader loader) {
    try {

      // Get the field's analyzer
      if (fieldTypeName != null && schema.getFieldTypeNoEx(fieldTypeName) != null) {
        FieldType fieldType = schema.getFieldTypes().get(fieldTypeName);
        // Do index-time analysis using the given fieldType's analyzer
        RAMDirectory ramDir = new RAMDirectory();
        IndexWriter writer =
            new IndexWriter(
                ramDir, fieldType.getAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
        writer.setMergeFactor(300);
        writer.setMaxBufferedDocs(150);

        List<String> lines = loader.getLines(sourceLocation, characterEncoding);

        for (String s : lines) {
          Document d = new Document();
          d.add(new Field(WORD_FIELD_NAME, s, Field.Store.NO, Field.Index.TOKENIZED));
          writer.addDocument(d);
        }
        writer.optimize();
        writer.close();

        dictionary = new HighFrequencyDictionary(IndexReader.open(ramDir), WORD_FIELD_NAME, 0.0f);
      } else {
        // check if character encoding is defined
        if (characterEncoding == null) {
          dictionary = new PlainTextDictionary(loader.openResource(sourceLocation));
        } else {
          dictionary =
              new PlainTextDictionary(
                  new InputStreamReader(loader.openResource(sourceLocation), characterEncoding));
        }
      }

    } catch (IOException e) {
      log.error("Unable to load spellings", e);
    }
  }
  private void checkInstanceDirs(JettySolrRunner jetty) {
    CoreContainer cores = ((SolrDispatchFilter) jetty.getDispatchFilter().getFilter()).getCores();
    Collection<SolrCore> theCores = cores.getCores();
    for (SolrCore core : theCores) {
      if (!oldStyleSolrXml) {
        // look for core props file
        assertTrue(
            "Could not find expected core.properties file",
            new File((String) core.getStatistics().get("instanceDir"), "core.properties").exists());
      }

      assertEquals(
          new File(
                  SolrResourceLoader.normalizeDir(
                      jetty.getSolrHome() + File.separator + core.getName()))
              .getAbsolutePath(),
          new File(
                  SolrResourceLoader.normalizeDir((String) core.getStatistics().get("instanceDir")))
              .getAbsolutePath());
    }
  }
    private void handleCommands(List<CommandOperation> ops, ConfigOverlay overlay)
        throws IOException {
      for (CommandOperation op : ops) {
        switch (op.name) {
          case SET_PROPERTY:
            overlay = applySetProp(op, overlay);
            break;
          case UNSET_PROPERTY:
            overlay = applyUnset(op, overlay);
            break;
          case SET_USER_PROPERTY:
            overlay = applySetUserProp(op, overlay);
            break;
          case UNSET_USER_PROPERTY:
            overlay = applyUnsetUserProp(op, overlay);
            break;
          case UPDATE_REQHANDLER:
          case CREATE_REQHANDLER:
            overlay = applyRequestHandler(op, overlay);
            break;
          case DELETE_REQHANDLER:
            overlay = applyDeleteHandler(op, overlay);
            break;
        }
      }
      List errs = CommandOperation.captureErrors(ops);
      if (!errs.isEmpty()) {
        resp.add(CommandOperation.ERR_MSGS, errs);
        return;
      }

      SolrResourceLoader loader = req.getCore().getResourceLoader();
      if (loader instanceof ZkSolrResourceLoader) {
        ZkController.persistConfigResourceToZooKeeper(
            loader,
            overlay.getZnodeVersion(),
            ConfigOverlay.RESOURCE_NAME,
            overlay.toByteArray(),
            true);

      } else {
        SolrResourceLoader.persistConfLocally(
            loader, ConfigOverlay.RESOURCE_NAME, overlay.toByteArray());
        req.getCore().getCoreDescriptor().getCoreContainer().reload(req.getCore().getName());
      }
    }
  /**
   * A trick to add resources to a classpath so that we can run tests inside the development dir,
   * but not necessarily install and compile the whole solr distribution.
   *
   * <p>We cannot guarantee which resource will be loaded first if it is present in both locations.
   * So a warning is emitted. Also, we are adding the default Solr example/solr/conf
   *
   * <p>This method, if run by a test, should be called from inside getSchemaFile() because at that
   * stage the instance already contains a config
   *
   * @param loader
   */
  public static void makeResourcesVisible(SolrResourceLoader loader, String... paths) {
    try {
      URLClassLoader innerLoader = (URLClassLoader) loader.getClassLoader();
      Class<?> classLoader = URLClassLoader.class;
      Class[] params = new Class[] {URL.class};
      Method method = classLoader.getDeclaredMethod("addURL", params);
      method.setAccessible(true);

      for (String p : paths) {
        File f = new File(p);
        f = f.isDirectory() ? f : f.getParentFile();
        method.invoke(innerLoader, new Object[] {f.toURI().toURL()});
        System.err.println("MontyDevel warning - adding resource path: " + f);
        System.err.println(
            "If you encounter strange errors, then first check for duplicate files!!!");
      }
    } catch (Exception e) {
      e.printStackTrace();
    }
  }
  @Override
  protected boolean checkEnforce(ServletContext context) throws IOException {
    /*
     * Rely on the SolrResourceLoader to locate the solr home directory.
     */

    int httpsPort = getHttpsPort();

    if (httpsPort > -1) {
      setHttpsPort(httpsPort);
    }

    String solrHome = SolrResourceLoader.locateSolrHome();

    if (logger.isDebugEnabled()) {
      logger.debug("solrHome:" + solrHome);
    }

    /*
     * Find the active cores.
     */
    List<File> cores = new ArrayList();
    findCores(new File(solrHome), cores);

    /*
     * Get the alfresco.secureComms value for each core.
     */
    Set<String> secureCommsSet = new HashSet();
    for (File core : cores) {
      collectSecureComms(core, secureCommsSet);
    }

    /*
     * alfresco.secureComms values should be in sync for each core
     */

    if (secureCommsSet.size() > 1) {
      StringBuilder buf = new StringBuilder();
      int i = 0;
      for (String s : secureCommsSet) {
        if (i > 0) {
          buf.append(" | ");
        }
        buf.append(s);
        i++;
      }

      throw new IOException(
          "More then one distinct value found for alfresco.secureComms:"
              + buf.toString()
              + ". All alfresco.secureComms values must be set to the same value.");
    }

    if (secureCommsSet.size() == 0) {
      // No secureComms were found.
      return false;
    }

    String secureComms = secureCommsSet.iterator().next();

    if (logger.isDebugEnabled()) {
      logger.debug("secureComms:" + secureComms);
    }

    if ("none".equals(secureComms)) {
      return false;
    } else {
      return true;
    }
  }
Beispiel #13
0
  //
  // <analyzer><tokenizer class="...."/><tokenizer class="...." arg="....">
  //
  //
  private Analyzer readAnalyzer(final Node node) throws XPathExpressionException {
    // parent node used to be passed in as "fieldtype"
    // if (!fieldtype.hasChildNodes()) return null;
    // Node node = DOMUtil.getChild(fieldtype,"analyzer");

    if (node == null) return null;
    final NamedNodeMap attrs = node.getAttributes();
    final String analyzerName = DOMUtil.getAttr(attrs, "class");
    if (analyzerName != null) {
      // No need to be core-aware as Analyzers are not in the core-aware list
      final Class<? extends Analyzer> clazz =
          loader.findClass(analyzerName).asSubclass(Analyzer.class);
      try {
        try {
          // first try to use a ctor with version parameter (needed for many new Analyzers that have
          // no default one anymore)
          final Constructor<? extends Analyzer> cnstr = clazz.getConstructor(Version.class);
          final String matchVersionStr = DOMUtil.getAttr(attrs, LUCENE_MATCH_VERSION_PARAM);
          final Version luceneMatchVersion =
              (matchVersionStr == null)
                  ? this.luceneMatchVersion
                  : Config.parseLuceneVersionString(matchVersionStr);
          if (luceneMatchVersion == null) {
            throw new SolrException(
                SolrException.ErrorCode.SERVER_ERROR,
                "Configuration Error: Analyzer '"
                    + clazz.getName()
                    + "' needs a 'luceneMatchVersion' parameter");
          }
          return cnstr.newInstance(luceneMatchVersion);
        } catch (final NoSuchMethodException nsme) {
          // otherwise use default ctor
          return clazz.newInstance();
        }
      } catch (final Exception e) {
        throw new SolrException(
            SolrException.ErrorCode.SERVER_ERROR, "Cannot load analyzer: " + analyzerName);
      }
    }

    final XPath xpath = XPathFactory.newInstance().newXPath();

    // Load the CharFilters
    // --------------------------------------------------------------------------------
    final ArrayList<CharFilterFactory> charFilters = new ArrayList<CharFilterFactory>();
    final AbstractPluginLoader<CharFilterFactory> charFilterLoader =
        new AbstractPluginLoader<CharFilterFactory>(
            "[schema.xml] analyzer/charFilter", false, false) {
          @Override
          protected void init(final CharFilterFactory plugin, final Node node) throws Exception {
            if (plugin != null) {
              final Map<String, String> params = DOMUtil.toMapExcept(node.getAttributes(), "class");
              // copy the luceneMatchVersion from config, if not set
              if (!params.containsKey(LUCENE_MATCH_VERSION_PARAM))
                params.put(LUCENE_MATCH_VERSION_PARAM, luceneMatchVersion.toString());
              plugin.init(params);
              charFilters.add(plugin);
            }
          }

          @Override
          protected CharFilterFactory register(final String name, final CharFilterFactory plugin)
              throws Exception {
            return null; // used for map registration
          }
        };
    charFilterLoader.load(
        loader, (NodeList) xpath.evaluate("./charFilter", node, XPathConstants.NODESET));

    // Load the Tokenizer
    // Although an analyzer only allows a single Tokenizer, we load a list to make sure
    // the configuration is ok
    // --------------------------------------------------------------------------------
    final ArrayList<TokenizerFactory> tokenizers = new ArrayList<TokenizerFactory>(1);
    final AbstractPluginLoader<TokenizerFactory> tokenizerLoader =
        new AbstractPluginLoader<TokenizerFactory>(
            "[schema.xml] analyzer/tokenizer", false, false) {
          @Override
          protected void init(final TokenizerFactory plugin, final Node node) throws Exception {
            if (!tokenizers.isEmpty()) {
              throw new SolrException(
                  SolrException.ErrorCode.SERVER_ERROR,
                  "The schema defines multiple tokenizers for: " + node);
            }
            final Map<String, String> params = DOMUtil.toMapExcept(node.getAttributes(), "class");
            // copy the luceneMatchVersion from config, if not set
            if (!params.containsKey(LUCENE_MATCH_VERSION_PARAM))
              params.put(LUCENE_MATCH_VERSION_PARAM, luceneMatchVersion.toString());
            plugin.init(params);
            tokenizers.add(plugin);
          }

          @Override
          protected TokenizerFactory register(final String name, final TokenizerFactory plugin)
              throws Exception {
            return null; // used for map registration
          }
        };
    tokenizerLoader.load(
        loader, (NodeList) xpath.evaluate("./tokenizer", node, XPathConstants.NODESET));

    // Make sure something was loaded
    if (tokenizers.isEmpty()) {
      throw new SolrException(
          SolrException.ErrorCode.SERVER_ERROR,
          "analyzer without class or tokenizer & filter list");
    }

    // Load the Filters
    // --------------------------------------------------------------------------------
    final ArrayList<TokenFilterFactory> filters = new ArrayList<TokenFilterFactory>();
    final AbstractPluginLoader<TokenFilterFactory> filterLoader =
        new AbstractPluginLoader<TokenFilterFactory>("[schema.xml] analyzer/filter", false, false) {
          @Override
          protected void init(final TokenFilterFactory plugin, final Node node) throws Exception {
            if (plugin != null) {
              final Map<String, String> params = DOMUtil.toMapExcept(node.getAttributes(), "class");
              // copy the luceneMatchVersion from config, if not set
              if (!params.containsKey(LUCENE_MATCH_VERSION_PARAM))
                params.put(LUCENE_MATCH_VERSION_PARAM, luceneMatchVersion.toString());
              plugin.init(params);
              filters.add(plugin);
            }
          }

          @Override
          protected TokenFilterFactory register(final String name, final TokenFilterFactory plugin)
              throws Exception {
            return null; // used for map registration
          }
        };
    filterLoader.load(loader, (NodeList) xpath.evaluate("./filter", node, XPathConstants.NODESET));

    return new TokenizerChain(
        charFilters.toArray(new CharFilterFactory[charFilters.size()]),
        tokenizers.get(0),
        filters.toArray(new TokenFilterFactory[filters.size()]));
  };
    private void handleParams(ArrayList<CommandOperation> ops, RequestParams params) {
      for (CommandOperation op : ops) {
        switch (op.name) {
          case CREATE:
          case UPDATE:
            {
              Map<String, Object> map = op.getDataMap();
              if (op.hasError()) break;

              for (Map.Entry<String, Object> entry : map.entrySet()) {

                Map val = null;
                String key = entry.getKey();
                if (key == null || key.trim().isEmpty()) {
                  op.addError("null key ");
                  continue;
                }
                key = key.trim();
                if (!validName(key)) {
                  op.addError(
                      MessageFormat.format(
                          "''{0}'' name should only have chars [a-zA-Z_-.0-9] ", key));
                  continue;
                }

                try {
                  val = (Map) entry.getValue();
                } catch (Exception e1) {
                  op.addError("invalid params for key : " + key);
                  continue;
                }

                if (val.containsKey("")) {
                  op.addError("Empty keys are not allowed in params");
                  continue;
                }

                MapSolrParams old = params.getParams(key);
                if (op.name.equals(UPDATE)) {
                  LinkedHashMap m = new LinkedHashMap(old.getMap());
                  m.putAll(val);
                  val = m;
                }
                params = params.setParams(key, val);
              }
              break;
            }
          case "delete":
            {
              List<String> name = op.getStrs(CommandOperation.ROOT_OBJ);
              if (op.hasError()) break;
              for (String s : name) {
                if (params.getParams(s) == null) {
                  op.addError(
                      MessageFormat.format("can't delete . No such params ''{0}'' exist", s));
                }
                params = params.setParams(s, null);
              }
            }
        }
      }

      List errs = CommandOperation.captureErrors(ops);
      if (!errs.isEmpty()) {
        resp.add(CommandOperation.ERR_MSGS, errs);
        return;
      }

      SolrResourceLoader loader = req.getCore().getResourceLoader();
      if (loader instanceof ZkSolrResourceLoader) {
        ZkController.persistConfigResourceToZooKeeper(
            loader, params.getZnodeVersion(), RequestParams.RESOURCE, params.toByteArray(), true);

      } else {
        SolrResourceLoader.persistConfLocally(
            loader, ConfigOverlay.RESOURCE_NAME, params.toByteArray());
        req.getCore().getSolrConfig().refreshRequestParams();
      }
    }