Пример #1
0
 private Map<String, Object> getReplicaProps(ZkController zkController, SolrCore core) {
   final String collection = core.getCoreDescriptor().getCloudDescriptor().getCollectionName();
   Replica replica =
       zkController
           .getClusterState()
           .getReplica(collection, zkController.getCoreNodeName(core.getCoreDescriptor()));
   if (replica != null) {
     return replica.getProperties();
   }
   return Collections.EMPTY_MAP;
 }
  // get the elevation map from the data dir
  Map<String, ElevationObj> getElevationMap(IndexReader reader, SolrCore core) throws Exception {
    synchronized (elevationCache) {
      Map<String, ElevationObj> map = elevationCache.get(null);
      if (map != null) return map;

      map = elevationCache.get(reader);
      if (map == null) {
        String f = initArgs.get(CONFIG_FILE);
        if (f == null) {
          throw new SolrException(
              SolrException.ErrorCode.SERVER_ERROR,
              "QueryElevationComponent must specify argument: " + CONFIG_FILE);
        }
        log.info("Loading QueryElevation from data dir: " + f);

        Config cfg;

        ZkController zkController = core.getCoreDescriptor().getCoreContainer().getZkController();
        if (zkController != null) {
          cfg = new Config(core.getResourceLoader(), f, null, null);
        } else {
          InputStream is = VersionedFile.getLatestFile(core.getDataDir(), f);
          cfg = new Config(core.getResourceLoader(), f, new InputSource(is), null);
        }

        map = loadElevationMap(cfg);
        elevationCache.put(reader, map);
      }
      return map;
    }
  }
Пример #3
0
  private boolean syncReplicas(
      ZkController zkController,
      SolrCore core,
      ZkNodeProps leaderProps,
      boolean peerSyncOnlyWithActive) {
    boolean success = false;
    CloudDescriptor cloudDesc = core.getCoreDescriptor().getCloudDescriptor();
    String collection = cloudDesc.getCollectionName();
    String shardId = cloudDesc.getShardId();

    if (isClosed) {
      log.info("We have been closed, won't sync with replicas");
      return false;
    }

    // first sync ourselves - we are the potential leader after all
    try {
      success =
          syncWithReplicas(
              zkController, core, leaderProps, collection, shardId, peerSyncOnlyWithActive);
    } catch (Exception e) {
      SolrException.log(log, "Sync Failed", e);
    }
    try {
      if (isClosed) {
        log.info("We have been closed, won't attempt to sync replicas back to leader");
        return false;
      }

      if (success) {
        log.info("Sync Success - now sync replicas to me");

        syncToMe(zkController, collection, shardId, leaderProps, core.getCoreDescriptor());

      } else {
        log.info("Leader's attempt to sync with shard failed, moving to the next candidate");
        // lets see who seems ahead...
      }

    } catch (Exception e) {
      SolrException.log(log, "Sync Failed", e);
    }

    return success;
  }
  private static Runnable getListener(SolrCore core, ZkSolrResourceLoader zkSolrResourceLoader) {
    final String coreName = core.getName();
    final CoreContainer cc = core.getCoreDescriptor().getCoreContainer();
    final String overlayPath =
        zkSolrResourceLoader.getConfigSetZkPath() + "/" + ConfigOverlay.RESOURCE_NAME;
    final String solrConfigPath =
        zkSolrResourceLoader.getConfigSetZkPath() + "/" + core.getSolrConfig().getName();
    String schemaRes = null;
    if (core.getLatestSchema().isMutable()
        && core.getLatestSchema() instanceof ManagedIndexSchema) {
      ManagedIndexSchema mis = (ManagedIndexSchema) core.getLatestSchema();
      schemaRes = mis.getResourceName();
    }
    final String managedSchmaResourcePath =
        schemaRes == null ? null : zkSolrResourceLoader.getConfigSetZkPath() + "/" + schemaRes;
    return new Runnable() {
      @Override
      public void run() {
        log.info("config update listener called for core {}", coreName);
        SolrZkClient zkClient = cc.getZkController().getZkClient();
        int solrConfigversion, overlayVersion, managedSchemaVersion = 0;
        SolrConfig cfg = null;
        try (SolrCore core = cc.getCore(coreName)) {
          if (core.isClosed()) return;
          cfg = core.getSolrConfig();
          solrConfigversion = core.getSolrConfig().getOverlay().getZnodeVersion();
          overlayVersion = core.getSolrConfig().getZnodeVersion();
          if (managedSchmaResourcePath != null) {
            managedSchemaVersion =
                ((ManagedIndexSchema) core.getLatestSchema()).getSchemaZkVersion();
          }
        }
        if (cfg != null) {
          cfg.refreshRequestParams();
        }

        if (checkStale(zkClient, overlayPath, solrConfigversion)
            || checkStale(zkClient, solrConfigPath, overlayVersion)
            || checkStale(zkClient, managedSchmaResourcePath, managedSchemaVersion)) {
          log.info("core reload {}", coreName);
          cc.reload(coreName);
        }
      }
    };
  }
Пример #5
0
  private boolean syncWithReplicas(
      ZkController zkController,
      SolrCore core,
      ZkNodeProps props,
      String collection,
      String shardId,
      boolean peerSyncOnlyWithActive) {
    List<ZkCoreNodeProps> nodes =
        zkController
            .getZkStateReader()
            .getReplicaProps(
                collection,
                shardId,
                core.getCoreDescriptor().getCloudDescriptor().getCoreNodeName());

    if (nodes == null) {
      // I have no replicas
      return true;
    }

    List<String> syncWith = new ArrayList<>();
    for (ZkCoreNodeProps node : nodes) {
      syncWith.add(node.getCoreUrl());
    }

    // if we can't reach a replica for sync, we still consider the overall sync a success
    // TODO: as an assurance, we should still try and tell the sync nodes that we couldn't reach
    // to recover once more?
    PeerSync peerSync =
        new PeerSync(
            core,
            syncWith,
            core.getUpdateHandler().getUpdateLog().getNumRecordsToKeep(),
            true,
            true,
            peerSyncOnlyWithActive);
    return peerSync.sync();
  }
  @Test
  public void testCoreAdminHandler() throws Exception {
    final File workDir = createTempDir().toFile();

    final CoreContainer cores = h.getCoreContainer();

    final CoreAdminHandler admin = new CoreAdminHandler(cores);

    Path instDir;
    try (SolrCore template = cores.getCore("collection1")) {
      assertNotNull(template);
      instDir = template.getCoreDescriptor().getInstanceDir();
    }

    assertTrue("instDir doesn't exist: " + instDir, Files.exists(instDir));
    final File instPropFile = new File(workDir, "instProp");
    FileUtils.copyDirectory(instDir.toFile(), instPropFile);

    SolrQueryResponse resp = new SolrQueryResponse();
    // Sneaking in a test for using a bad core name
    try {
      admin.handleRequestBody(
          req(
              CoreAdminParams.ACTION,
              CoreAdminParams.CoreAdminAction.CREATE.toString(),
              CoreAdminParams.INSTANCE_DIR,
              instPropFile.getAbsolutePath(),
              CoreAdminParams.NAME,
              "ugly$core=name"),
          resp);

    } catch (SolrException se) {
      assertTrue(
          "Expected error message for bad core name.", se.toString().contains("Invalid core"));
    }
    CoreDescriptor cd = cores.getCoreDescriptor("ugly$core=name");
    assertNull("Should NOT have added this core!", cd);

    // create a new core (using CoreAdminHandler) w/ properties

    admin.handleRequestBody(
        req(
            CoreAdminParams.ACTION,
            CoreAdminParams.CoreAdminAction.CREATE.toString(),
            CoreAdminParams.INSTANCE_DIR,
            instPropFile.getAbsolutePath(),
            CoreAdminParams.NAME,
            "props",
            CoreAdminParams.PROPERTY_PREFIX + "hoss",
            "man",
            CoreAdminParams.PROPERTY_PREFIX + "foo",
            "baz"),
        resp);
    assertNull("Exception on create", resp.getException());

    cd = cores.getCoreDescriptor("props");
    assertNotNull("Core not added!", cd);
    assertEquals(cd.getCoreProperty("hoss", null), "man");
    assertEquals(cd.getCoreProperty("foo", null), "baz");

    // attempt to create a bogus core and confirm failure
    ignoreException("Could not load config");
    try {
      resp = new SolrQueryResponse();
      admin.handleRequestBody(
          req(
              CoreAdminParams.ACTION,
              CoreAdminParams.CoreAdminAction.CREATE.toString(),
              CoreAdminParams.NAME,
              "bogus_dir_core",
              CoreAdminParams.INSTANCE_DIR,
              "dir_does_not_exist_127896"),
          resp);
      fail("bogus collection created ok");
    } catch (SolrException e) {
      // :NOOP:
      // :TODO: CoreAdminHandler's exception messages are terrible, otherwise we could assert
      // something useful here
    }
    unIgnoreException("Could not load config");

    // check specifically for status of the failed core name
    resp = new SolrQueryResponse();
    admin.handleRequestBody(
        req(
            CoreAdminParams.ACTION,
            CoreAdminParams.CoreAdminAction.STATUS.toString(),
            CoreAdminParams.CORE,
            "bogus_dir_core"),
        resp);
    Map<String, Exception> failures = (Map<String, Exception>) resp.getValues().get("initFailures");
    assertNotNull("core failures is null", failures);

    NamedList<Object> status = (NamedList<Object>) resp.getValues().get("status");
    assertNotNull("core status is null", status);

    assertEquals("wrong number of core failures", 1, failures.size());
    Exception fail = failures.get("bogus_dir_core");
    assertNotNull("null failure for test core", fail);
    assertTrue(
        "init failure doesn't mention problem: " + fail.getCause().getMessage(),
        0 < fail.getCause().getMessage().indexOf("dir_does_not_exist"));

    assertEquals(
        "bogus_dir_core status isn't empty", 0, ((NamedList) status.get("bogus_dir_core")).size());

    // Try renaming the core, we should fail
    // First assert that the props core exists
    cd = cores.getCoreDescriptor("props");
    assertNotNull("Core disappeared!", cd);

    // now rename it something else just for kicks since we don't actually test this that I could
    // find.
    admin.handleRequestBody(
        req(
            CoreAdminParams.ACTION,
            CoreAdminParams.CoreAdminAction.RENAME.toString(),
            CoreAdminParams.CORE,
            "props",
            CoreAdminParams.OTHER,
            "rename_me"),
        resp);

    cd = cores.getCoreDescriptor("rename_me");
    assertNotNull("Core should have been renamed!", cd);

    // Rename it something bogus and see if you get an exception, the old core is still there and
    // the bogus one isn't
    try {
      admin.handleRequestBody(
          req(
              CoreAdminParams.ACTION,
              CoreAdminParams.CoreAdminAction.RENAME.toString(),
              CoreAdminParams.CORE,
              "rename_me",
              CoreAdminParams.OTHER,
              "bad$name"),
          resp);
    } catch (
        SolrException
            e) { // why the heck does create return a SolrException (admittedly wrapping an IAE)
      assertTrue(
          "Expected error message for bad core name.", e.getMessage().contains("Invalid core"));
    }

    cd = cores.getCoreDescriptor("bad$name");
    assertNull("Core should NOT exist!", cd);

    cd = cores.getCoreDescriptor("rename_me");
    assertNotNull("Core should have been renamed!", cd);

    // :TODO: because of SOLR-3665 we can't ask for status from all cores

  }
 /**
  * This will be called on each
  *
  * <ul>
  *   <li>update to the Component configuration (activate, deactivate)
  *   <li>updates on the SolrCore
  * </ul>
  *
  * on any detected change it will update the registered EnhancementEngine.
  *
  * <p>This also initialises the FST configuration.
  *
  * @param reference the ServiceRefernece for the SolrServer or <code>null</code> in case the
  *     service is no longer available.
  * @param server the SolrServer (or <code>null</code>
  */
 protected void updateEngineRegistration(ServiceReference reference, SolrServer server) {
   log.info(" ... updateEngineRegistration for {}: {}", getClass().getSimpleName(), engineName);
   if (reference != null && server == null) {
     server = solrServerTracker.getService(reference);
   }
   if (reference == null && this.indexReference == null) {
     // unregisterEngine(); //unregister existing
     return; // and return
   }
   BundleContext bundleContext = this.bundleContext;
   synchronized (this) { // init one after the other in case of multiple calls
     SolrCore core;
     IndexConfiguration indexConfig; // the indexConfig build by this call
     try { // try to init - finally unregisterEngine
       if (bundleContext == null) { // already deactivated
         return; // NOTE: unregistering is done in finally block
       }
       if (reference != null) {
         if (reference.equals(this.solrServerReference)) {
           // use the current core
           core = solrCore;
         } else { // get the SolrCore from the EmbeddedSolrServer
           core = getSolrCore(server);
         }
       } else { // SolrCore not available
         core = null;
       }
       if (core == null) { // no SolrCore
         log.info("   - SolrCore {} present", this.solrCore == null ? "not yet" : "no longer");
         return; // NOTE: unregistering is done in finally block
       } // else - we do have a SolrCore
       // File fstDir = new File(dataDir,"fst");
       // now collect the FST configuration
       indexConfig = new IndexConfiguration(fstConfig, core, fieldEncoding);
       indexConfig.setTypeField(solrTypeField);
       indexConfig.setRankingField(solrRankingField);
       // set fields parsed in the activate method
       indexConfig.setExecutorService(fstCreatorService);
       indexConfig.setRedirectField(null); // TODO add support
       // NOTE: the FST cofnig is processed even if the SolrCore has not changed
       //      because their might be config changes and/or new FST files in the
       //      FST directory of the SolrCore.
       indexConfig.setFstDirectory(getFstDirectory(core, fstFolder));
       // set the DocumentCacheFactory
       if (entityCacheSize > 0) {
         indexConfig.setEntityCacheManager(new FastLRUCacheManager(entityCacheSize));
       } // else no entityCache is used
       if (skipAltTokensConfig != null) {
         indexConfig.setSkipAltTokens(skipAltTokensConfig);
       }
       // create a new searcher for creating FSTs
       if (!indexConfig.activate()) {
         log.warn(
             "Processing of the FST configuration was not successfull "
                 + "for any language. See WARN level loggings for more details!");
         log.warn(
             "  ... FstLinkingEnigne wiht name {} will be registered but"
                 + "be inactive as there seam to be no data for linking available"
                 + "in the SolrCore {} (dir: {})",
             new Object[] {engineName, core.getName(), core.getCoreDescriptor().getInstanceDir()});
       } else { // some FST corpora initialised
         if (log.isInfoEnabled()) { // log the initialised languages
           Set<String> langSet = new HashSet<String>(indexConfig.getCorpusLanguages());
           if (langSet.remove(null)) { // replace the null for the default language
             langSet.add(""); // with an empty string
           }
           String[] langArray = langSet.toArray(new String[langSet.size()]);
           Arrays.sort(langArray, String.CASE_INSENSITIVE_ORDER);
           log.info(" ... initialised FST corpora for languages {}", Arrays.toString(langArray));
         }
       }
     } finally {
       // in any case (even an Exception) ensure that the current
       // engine registration is unregistered and the currently used
       // SolrCore is unregistered!
       unregisterEngine();
     }
     // check if we need to create some FST files
     for (CorpusInfo fstInfo : indexConfig.getCorpora()) {
       // check if the fst does not exist and the fstInfo allows creation
       if (!fstInfo.fst.exists() && fstInfo.allowCreation) {
         // create a task on the FST corpus creation service
         fstCreatorService.execute(new CorpusCreationTask(indexConfig, fstInfo));
       }
     }
     // set the default linking corpora
     String defaultLanguage = entityLinkerConfig.getDefaultLanguage();
     if (defaultLanguage == null) {
       defaultLanguage = ""; // FST uses an empty string for the default
     }
     CorpusInfo defaultCoprous = indexConfig.getCorpus(defaultLanguage);
     if (defaultCoprous != null) {
       log.info(
           " ... set '{}' as default FST Corpus: {}", defaultCoprous.language, defaultCoprous);
       indexConfig.setDefaultCorpus(defaultCoprous);
     } else {
       log.info("  ... no corpus for default language {} available", defaultCoprous);
     }
     // set the index configuration to the field;
     this.indexConfig = indexConfig;
     FstLinkingEngine engine =
         new FstLinkingEngine(engineName, indexConfig, textProcessingConfig, entityLinkerConfig);
     String[] services =
         new String[] {EnhancementEngine.class.getName(), ServiceProperties.class.getName()};
     log.info(" ... register {}: {}", engine.getClass().getSimpleName(), engineName);
     this.engineRegistration = bundleContext.registerService(services, engine, engineMetadata);
     this.solrServerReference = reference;
     this.solrCore = core;
   }
 }
Пример #8
0
 @Override
 public void inform(SolrCore core) {
   watcher = core.getCoreDescriptor().getCoreContainer().getLogging();
 }
  public void inform(SolrCore core) {

    /* The stream factory will always contain the zkUrl for the given collection
     * Adds default streams with their corresponding function names. These
     * defaults can be overridden or added to in the solrConfig in the stream
     * RequestHandler def. Example config override
     *  <lst name="streamFunctions">
     *    <str name="group">org.apache.solr.client.solrj.io.stream.ReducerStream</str>
     *    <str name="count">org.apache.solr.client.solrj.io.stream.RecordCountStream</str>
     *  </lst>
     * */

    String defaultCollection = null;
    String defaultZkhost = null;
    CoreContainer coreContainer = core.getCoreDescriptor().getCoreContainer();

    if (coreContainer.isZooKeeperAware()) {
      defaultCollection = core.getCoreDescriptor().getCollectionName();
      defaultZkhost =
          core.getCoreDescriptor().getCoreContainer().getZkController().getZkServerAddress();
      streamFactory.withCollectionZkHost(defaultCollection, defaultZkhost);
    }

    streamFactory
        // streams
        .withFunctionName("search", CloudSolrStream.class)
        .withFunctionName("merge", MergeStream.class)
        .withFunctionName("unique", UniqueStream.class)
        .withFunctionName("top", RankStream.class)
        .withFunctionName("group", GroupOperation.class)
        .withFunctionName("reduce", ReducerStream.class)
        .withFunctionName("parallel", ParallelStream.class)
        .withFunctionName("rollup", RollupStream.class)
        .withFunctionName("stats", StatsStream.class)
        .withFunctionName("innerJoin", InnerJoinStream.class)
        .withFunctionName("leftOuterJoin", LeftOuterJoinStream.class)
        .withFunctionName("hashJoin", HashJoinStream.class)
        .withFunctionName("outerHashJoin", OuterHashJoinStream.class)
        .withFunctionName("facet", FacetStream.class)

        // metrics
        .withFunctionName("min", MinMetric.class)
        .withFunctionName("max", MaxMetric.class)
        .withFunctionName("avg", MeanMetric.class)
        .withFunctionName("sum", SumMetric.class)
        .withFunctionName("count", CountMetric.class);

    // This pulls all the overrides and additions from the config
    Object functionMappingsObj = initArgs.get("streamFunctions");
    if (null != functionMappingsObj) {
      NamedList<?> functionMappings = (NamedList<?>) functionMappingsObj;
      for (Entry<String, ?> functionMapping : functionMappings) {
        Class<?> clazz =
            core.getResourceLoader()
                .findClass((String) functionMapping.getValue(), Expressible.class);
        streamFactory.withFunctionName(functionMapping.getKey(), clazz);
      }
    }

    core.addCloseHook(
        new CloseHook() {
          @Override
          public void preClose(SolrCore core) {
            // To change body of implemented methods use File | Settings | File Templates.
          }

          @Override
          public void postClose(SolrCore core) {
            clientCache.close();
          }
        });
  }
  @Override
  public void inform(SolrCore core) {
    IndexSchema schema = core.getLatestSchema();
    String a = initArgs.get(FIELD_TYPE);
    if (a != null) {
      FieldType ft = schema.getFieldTypes().get(a);
      if (ft == null) {
        throw new SolrException(
            SolrException.ErrorCode.SERVER_ERROR,
            "Unknown FieldType: '" + a + "' used in QueryElevationComponent");
      }
      analyzer = ft.getQueryAnalyzer();
    }

    SchemaField sf = schema.getUniqueKeyField();
    if (sf == null) {
      throw new SolrException(
          SolrException.ErrorCode.SERVER_ERROR,
          "QueryElevationComponent requires the schema to have a uniqueKeyField.");
    }
    idSchemaFT = sf.getType();
    idField = sf.getName();
    // register the EditorialMarkerFactory
    String excludeName = initArgs.get(QueryElevationParams.EXCLUDE_MARKER_FIELD_NAME, "excluded");
    if (excludeName == null || excludeName.equals("") == true) {
      excludeName = "excluded";
    }
    ExcludedMarkerFactory excludedMarkerFactory = new ExcludedMarkerFactory();
    core.addTransformerFactory(excludeName, excludedMarkerFactory);
    ElevatedMarkerFactory elevatedMarkerFactory = new ElevatedMarkerFactory();
    String markerName = initArgs.get(QueryElevationParams.EDITORIAL_MARKER_FIELD_NAME, "elevated");
    if (markerName == null || markerName.equals("") == true) {
      markerName = "elevated";
    }
    core.addTransformerFactory(markerName, elevatedMarkerFactory);
    forceElevation = initArgs.getBool(QueryElevationParams.FORCE_ELEVATION, forceElevation);
    try {
      synchronized (elevationCache) {
        elevationCache.clear();
        String f = initArgs.get(CONFIG_FILE);
        if (f == null) {
          throw new SolrException(
              SolrException.ErrorCode.SERVER_ERROR,
              "QueryElevationComponent must specify argument: '"
                  + CONFIG_FILE
                  + "' -- path to elevate.xml");
        }
        boolean exists = false;

        // check if using ZooKeeper
        ZkController zkController = core.getCoreDescriptor().getCoreContainer().getZkController();
        if (zkController != null) {
          // TODO : shouldn't have to keep reading the config name when it has been read before
          exists =
              zkController.configFileExists(
                  zkController
                      .getZkStateReader()
                      .readConfigName(
                          core.getCoreDescriptor().getCloudDescriptor().getCollectionName()),
                  f);
        } else {
          File fC = new File(core.getResourceLoader().getConfigDir(), f);
          File fD = new File(core.getDataDir(), f);
          if (fC.exists() == fD.exists()) {
            throw new SolrException(
                SolrException.ErrorCode.SERVER_ERROR,
                "QueryElevationComponent missing config file: '"
                    + f
                    + "\n"
                    + "either: "
                    + fC.getAbsolutePath()
                    + " or "
                    + fD.getAbsolutePath()
                    + " must exist, but not both.");
          }
          if (fC.exists()) {
            exists = true;
            log.info("Loading QueryElevation from: " + fC.getAbsolutePath());
            Config cfg = new Config(core.getResourceLoader(), f);
            elevationCache.put(null, loadElevationMap(cfg));
          }
        }
        // in other words, we think this is in the data dir, not the conf dir
        if (!exists) {
          // preload the first data
          RefCounted<SolrIndexSearcher> searchHolder = null;
          try {
            searchHolder = core.getNewestSearcher(false);
            IndexReader reader = searchHolder.get().getIndexReader();
            getElevationMap(reader, core);
          } finally {
            if (searchHolder != null) searchHolder.decref();
          }
        }
      }
    } catch (Exception ex) {
      throw new SolrException(
          SolrException.ErrorCode.SERVER_ERROR, "Error initializing QueryElevationComponent.", ex);
    }
  }
Пример #11
0
  public void inform(SolrCore core) {

    /* The stream factory will always contain the zkUrl for the given collection
     * Adds default streams with their corresponding function names. These
     * defaults can be overridden or added to in the solrConfig in the stream
     * RequestHandler def. Example config override
     *  <lst name="streamFunctions">
     *    <str name="group">org.apache.solr.client.solrj.io.stream.ReducerStream</str>
     *    <str name="count">org.apache.solr.client.solrj.io.stream.RecordCountStream</str>
     *  </lst>
     * */

    String defaultCollection;
    String defaultZkhost;
    CoreContainer coreContainer = core.getCoreDescriptor().getCoreContainer();
    this.coreName = core.getName();

    if (coreContainer.isZooKeeperAware()) {
      defaultCollection = core.getCoreDescriptor().getCollectionName();
      defaultZkhost =
          core.getCoreDescriptor().getCoreContainer().getZkController().getZkServerAddress();
      streamFactory.withCollectionZkHost(defaultCollection, defaultZkhost);
      streamFactory.withDefaultZkHost(defaultZkhost);
      modelCache = new ModelCache(250, defaultZkhost, clientCache);
    }

    streamFactory
        // source streams
        .withFunctionName("search", CloudSolrStream.class)
        .withFunctionName("facet", FacetStream.class)
        .withFunctionName("update", UpdateStream.class)
        .withFunctionName("jdbc", JDBCStream.class)
        .withFunctionName("topic", TopicStream.class)
        .withFunctionName("commit", CommitStream.class)
        .withFunctionName("random", RandomStream.class)

        // decorator streams
        .withFunctionName("merge", MergeStream.class)
        .withFunctionName("unique", UniqueStream.class)
        .withFunctionName("top", RankStream.class)
        .withFunctionName("group", GroupOperation.class)
        .withFunctionName("reduce", ReducerStream.class)
        .withFunctionName("parallel", ParallelStream.class)
        .withFunctionName("rollup", RollupStream.class)
        .withFunctionName("stats", StatsStream.class)
        .withFunctionName("innerJoin", InnerJoinStream.class)
        .withFunctionName("leftOuterJoin", LeftOuterJoinStream.class)
        .withFunctionName("hashJoin", HashJoinStream.class)
        .withFunctionName("outerHashJoin", OuterHashJoinStream.class)
        .withFunctionName("intersect", IntersectStream.class)
        .withFunctionName("complement", ComplementStream.class)
        .withFunctionName("sort", SortStream.class)
        .withFunctionName("train", TextLogitStream.class)
        .withFunctionName("features", FeaturesSelectionStream.class)
        .withFunctionName("daemon", DaemonStream.class)
        .withFunctionName("shortestPath", ShortestPathStream.class)
        .withFunctionName("gatherNodes", GatherNodesStream.class)
        .withFunctionName("nodes", GatherNodesStream.class)
        .withFunctionName("select", SelectStream.class)
        .withFunctionName("scoreNodes", ScoreNodesStream.class)
        .withFunctionName("model", ModelStream.class)
        .withFunctionName("classify", ClassifyStream.class)
        .withFunctionName("fetch", FetchStream.class)
        .withFunctionName("executor", ExecutorStream.class)
        .withFunctionName("null", NullStream.class)
        .withFunctionName("priority", PriorityStream.class)
        // metrics
        .withFunctionName("min", MinMetric.class)
        .withFunctionName("max", MaxMetric.class)
        .withFunctionName("avg", MeanMetric.class)
        .withFunctionName("sum", SumMetric.class)
        .withFunctionName("count", CountMetric.class)

        // tuple manipulation operations
        .withFunctionName("replace", ReplaceOperation.class)
        .withFunctionName("concat", ConcatOperation.class)

        // stream reduction operations
        .withFunctionName("group", GroupOperation.class)
        .withFunctionName("distinct", DistinctOperation.class)
        .withFunctionName("having", HavingStream.class)
        .withFunctionName("and", AndOperation.class)
        .withFunctionName("or", OrOperation.class)
        .withFunctionName("not", NotOperation.class)
        .withFunctionName("gt", GreaterThanOperation.class)
        .withFunctionName("lt", LessThanOperation.class)
        .withFunctionName("eq", EqualsOperation.class)
        .withFunctionName("lteq", LessThanEqualToOperation.class)
        .withFunctionName("gteq", GreaterThanEqualToOperation.class);

    // This pulls all the overrides and additions from the config
    List<PluginInfo> pluginInfos = core.getSolrConfig().getPluginInfos(Expressible.class.getName());
    for (PluginInfo pluginInfo : pluginInfos) {
      Class<? extends Expressible> clazz =
          core.getResourceLoader().findClass(pluginInfo.className, Expressible.class);
      streamFactory.withFunctionName(pluginInfo.name, clazz);
    }

    core.addCloseHook(
        new CloseHook() {
          @Override
          public void preClose(SolrCore core) {
            // To change body of implemented methods use File | Settings | File Templates.
          }

          @Override
          public void postClose(SolrCore core) {
            clientCache.close();
          }
        });
  }
Пример #12
0
  public String _format(LoggingEvent event) {
    String message = (String) event.getMessage();
    if (message == null) {
      message = "";
    }
    StringBuilder sb = new StringBuilder(message.length() + 80);

    long now = event.timeStamp;
    long timeFromStart = now - startTime;
    long timeSinceLast = now - lastTime;
    lastTime = now;
    String shortClassName =
        getShortClassName(
            event.getLocationInformation().getClassName(),
            event.getLocationInformation().getMethodName());

    /**
     * * sb.append(timeFromStart).append(' ').append(timeSinceLast); sb.append(' ');
     * sb.append(record.getSourceClassName()).append('.').append( record.getSourceMethodName());
     * sb.append(' '); sb.append(record.getLevel()); *
     */
    SolrRequestInfo requestInfo = SolrRequestInfo.getRequestInfo();
    SolrQueryRequest req = requestInfo == null ? null : requestInfo.getReq();
    SolrCore core = req == null ? null : req.getCore();
    ZkController zkController = null;
    CoreInfo info = null;

    if (core != null) {
      info = coreInfoMap.get(core.hashCode());
      if (info == null) {
        info = new CoreInfo();
        info.shortId = "C" + Integer.toString(CoreInfo.maxCoreNum++);
        coreInfoMap.put(core.hashCode(), info);

        if (sb.length() == 0) sb.append("ASYNC ");
        sb.append(" NEW_CORE " + info.shortId);
        sb.append(" name=" + core.getName());
        sb.append(" " + core);
      }

      zkController = core.getCoreDescriptor().getCoreContainer().getZkController();
      if (zkController != null) {
        if (info.url == null) {
          info.url = zkController.getBaseUrl() + "/" + core.getName();
          sb.append(" url=" + info.url + " node=" + zkController.getNodeName());
        }

        Map<String, Object> coreProps = getReplicaProps(zkController, core);
        if (info.coreProps == null || !coreProps.equals(info.coreProps)) {
          info.coreProps = coreProps;
          final String corePropsString =
              "coll:"
                  + core.getCoreDescriptor().getCloudDescriptor().getCollectionName()
                  + " core:"
                  + core.getName()
                  + " props:"
                  + coreProps;
          sb.append(" " + info.shortId + "_STATE=" + corePropsString);
        }
      }
    }

    if (sb.length() > 0) sb.append('\n');
    sb.append(timeFromStart);

    // sb.append("\nL").append(record.getSequenceNumber()); // log number is
    // useful for sequencing when looking at multiple parts of a log file, but
    // ms since start should be fine.
    appendThread(sb, event);

    appendMDC(sb);

    // todo: should be able to get port from core container for non zk tests

    if (info != null) {
      sb.append(' ').append(info.shortId); // core
    }

    if (shortClassName.length() > 0) {
      sb.append(' ').append(shortClassName);
    }

    if (event.getLevel() != Level.INFO) {
      sb.append(' ').append(event.getLevel());
    }

    sb.append(' ');
    appendMultiLineString(sb, message);
    ThrowableInformation thInfo = event.getThrowableInformation();
    if (thInfo != null) {
      Throwable th = event.getThrowableInformation().getThrowable();
      if (th != null) {
        sb.append(' ');
        String err = SolrException.toStr(th);
        String ignoredMsg = SolrException.doIgnore(th, err);
        if (ignoredMsg != null) {
          sb.append(ignoredMsg);
        } else {
          sb.append(err);
        }
      }
    }

    sb.append('\n');

    /**
     * * Isn't core specific... prob better logged from zkController if (info != null) {
     * ClusterState clusterState = zkController.getClusterState(); if (info.clusterState !=
     * clusterState) { // something has changed in the matrix... sb.append(zkController.getBaseUrl()
     * + " sees new ClusterState:"); } } *
     */
    return sb.toString();
  }