private backtype.storm.Config getStormConfig(com.typesafe.config.Config config) { backtype.storm.Config conf = new backtype.storm.Config(); conf.put(RichSpoutBatchExecutor.MAX_BATCH_SIZE_CONF, Int.box(64 * 1024)); conf.put(backtype.storm.Config.TOPOLOGY_RECEIVER_BUFFER_SIZE, Int.box(8)); conf.put(backtype.storm.Config.TOPOLOGY_TRANSFER_BUFFER_SIZE, Int.box(32)); conf.put(backtype.storm.Config.TOPOLOGY_EXECUTOR_RECEIVE_BUFFER_SIZE, Int.box(16384)); conf.put(backtype.storm.Config.TOPOLOGY_EXECUTOR_SEND_BUFFER_SIZE, Int.box(16384)); conf.put(backtype.storm.Config.NIMBUS_THRIFT_MAX_BUFFER_SIZE, Int.box(20480000)); String nimbusHost = STORM_NIMBUS_HOST_DEFAULT; if (environment.config().hasPath(STORM_NIMBUS_HOST_CONF_PATH)) { nimbusHost = environment.config().getString(STORM_NIMBUS_HOST_CONF_PATH); LOG.info("Overriding {} = {}", STORM_NIMBUS_HOST_CONF_PATH, nimbusHost); } else { LOG.info("Using default {} = {}", STORM_NIMBUS_HOST_CONF_PATH, STORM_NIMBUS_HOST_DEFAULT); } Integer nimbusThriftPort = STORM_NIMBUS_THRIFT_DEFAULT; if (environment.config().hasPath(STORM_NIMBUS_THRIFT_CONF_PATH)) { nimbusThriftPort = environment.config().getInt(STORM_NIMBUS_THRIFT_CONF_PATH); LOG.info("Overriding {} = {}", STORM_NIMBUS_THRIFT_CONF_PATH, nimbusThriftPort); } else { LOG.info("Using default {} = {}", STORM_NIMBUS_THRIFT_CONF_PATH, STORM_NIMBUS_THRIFT_DEFAULT); } conf.put(backtype.storm.Config.NIMBUS_HOST, nimbusHost); conf.put(backtype.storm.Config.NIMBUS_THRIFT_PORT, nimbusThriftPort); conf.put( Config.STORM_THRIFT_TRANSPORT_PLUGIN, "backtype.storm.security.auth.SimpleTransportPlugin"); if (config.hasPath(WORKERS)) { conf.setNumWorkers(config.getInt(WORKERS)); } if (config.hasPath(TOPOLOGY_MESSAGE_TIMEOUT_SECS)) { conf.put(TOPOLOGY_MESSAGE_TIMEOUT_SECS, config.getInt(TOPOLOGY_MESSAGE_TIMEOUT_SECS)); } return conf; }
public YarnService( Config config, String applicationName, String applicationId, YarnConfiguration yarnConfiguration, FileSystem fs, EventBus eventBus) throws Exception { this.applicationName = applicationName; this.applicationId = applicationId; this.config = config; this.eventBus = eventBus; this.gobblinMetrics = config.getBoolean(ConfigurationKeys.METRICS_ENABLED_KEY) ? Optional.of(buildGobblinMetrics()) : Optional.<GobblinMetrics>absent(); this.eventSubmitter = config.getBoolean(ConfigurationKeys.METRICS_ENABLED_KEY) ? Optional.of(buildEventSubmitter()) : Optional.<EventSubmitter>absent(); this.yarnConfiguration = yarnConfiguration; this.fs = fs; this.amrmClientAsync = closer.register( AMRMClientAsync.createAMRMClientAsync(1000, new AMRMClientCallbackHandler())); this.amrmClientAsync.init(this.yarnConfiguration); this.nmClientAsync = closer.register(NMClientAsync.createNMClientAsync(new NMClientCallbackHandler())); this.nmClientAsync.init(this.yarnConfiguration); this.initialContainers = config.getInt(GobblinYarnConfigurationKeys.INITIAL_CONTAINERS_KEY); this.requestedContainerMemoryMbs = config.getInt(GobblinYarnConfigurationKeys.CONTAINER_MEMORY_MBS_KEY); this.requestedContainerCores = config.getInt(GobblinYarnConfigurationKeys.CONTAINER_CORES_KEY); this.containerHostAffinityEnabled = config.getBoolean(GobblinYarnConfigurationKeys.CONTAINER_HOST_AFFINITY_ENABLED); this.helixInstanceMaxRetries = config.getInt(GobblinYarnConfigurationKeys.HELIX_INSTANCE_MAX_RETRIES); this.containerJvmArgs = config.hasPath(GobblinYarnConfigurationKeys.CONTAINER_JVM_ARGS_KEY) ? Optional.of(config.getString(GobblinYarnConfigurationKeys.CONTAINER_JVM_ARGS_KEY)) : Optional.<String>absent(); this.containerLaunchExecutor = Executors.newFixedThreadPool( 10, ExecutorsUtils.newThreadFactory( Optional.of(LOGGER), Optional.of("ContainerLaunchExecutor"))); this.tokens = getSecurityTokens(); }
public List<String> getWorkload() { Config config = vampires.getConfig("workload"); String task = config.getString("task"); int startCount = config.getInt("start"); int stopCount = config.getInt("stop"); return IntStream.rangeClosed(startCount, stopCount) .mapToObj(i -> String.format(task, i)) .collect(Collectors.toList()); }
/** * 初始化配置 * * @throws TaskContainerConfigException */ public TaskContainerConf() { Config config = ConfigFactory.load("taskContainer.conf"); config.getString("taskContainer.version"); if (StringUtils.isNotEmpty(config.getString("taskContainer.router"))) { router = config.getInt("taskContainer.router"); } if (StringUtils.isNotEmpty(config.getString("taskContainer.worker"))) { worker = config.getInt("taskContainer.worker"); } if (StringUtils.isNotEmpty(config.getString("taskContainer.alertPhone"))) { alertPhone = config.getString("taskContainer.alertPhone"); } if (StringUtils.isNotEmpty(config.getString("taskContainer.projectName"))) { projectName = config.getString("taskContainer.projectName"); } if (StringUtils.isNotEmpty(config.getString("taskContainer.processTaskClass"))) { processTaskClass = config.getString("taskContainer.processTaskClass"); } else { LOAD_STATE = FAIL_LOAD; throw new TaskContainerConfigException("processTaskClass is empty"); } try { aClass = Class.forName(processTaskClass); LOAD_STATE = FAIL_LOAD; processQueueInstance = aClass.newInstance(); } catch (ClassNotFoundException e) { LOAD_STATE = FAIL_LOAD; throw new TaskContainerConfigException(e); } catch (InstantiationException e) { LOAD_STATE = FAIL_LOAD; throw new TaskContainerConfigException(e); } catch (IllegalAccessException e) { LOAD_STATE = FAIL_LOAD; throw new TaskContainerConfigException(e); } maxParallel = router * worker; queues = config.getStringList("taskContainer.taskList"); logger.info("------------task container suc load conf---------------"); logger.info("project.name:{}", projectName); logger.info("router:{}", router); logger.info("worker:{}", worker); logger.info("max.parallel:{}", maxParallel); logger.info("task.list:{}", queues); logger.info("process.task.class:{}", processTaskClass); logger.info("-------------------------------------------------------"); LOAD_STATE = SUC_LOAD; }
private void setWorkerThreads(final Builder builder, final Config config) { val workerThreads = config.getInt("worker-threads"); if (workerThreads > 0) { log.info(" > worker-threads: " + workerThreads); builder.setWorkerThreads(workerThreads); } }
public int getCpuSetSize() { if (vampires.hasPath("cpuSetSize")) { return vampires.getInt("cpuSetSize"); } else { LOG.error("missing executor cpuSetSize"); } return 1; }
public static void main(String[] args) throws Exception { Queue queue = new Queue(); HostRouteHttpServer server = new HostRouteHttpServer( new HttpServerConfigurator(queue) .withAddress( new Address( CONFIG.getString("http.route.bind.host"), CONFIG.getInt("http.route.bind.port"))), new HttpClientConfigurator(queue).withTrust(new Trust())); for (Config c : CONFIG.getConfigList("http.route.map")) { server.route( c.getStringList("hosts"), new Address(c.getString("to.host"), c.getInt("to.port")), c.getString("to.path")); } server.start(); }
public CandidateFilterFactory() { Config config = ConfigUtils.getDefaultConfig(); lshSampleRatio = config.getDouble("model.lsh.sample-ratio"); numHashes = config.getInt("model.lsh.num-hashes"); candidateFilterClassName = config.hasPath("serving-layer.candidate-filter-class") ? config.getString("serving-layer.candidate-filter-class") : null; }
public void buildMetric(String name) throws ConfigurationException, DaoException, IOException { LOG.info("building component metric " + name); String type = getMetricType(name); if (type.equals("densevector.word2vec")) { initWord2Vec(name); } SRMetric metric = getMetric(name); if (type.equals("ensemble")) { ((EnsembleMetric) metric).setTrainSubmetrics(false); // Do it by hand } else if (type.equals("sparsevector.mostsimilarconcepts")) { if (mode == Mode.SIMILARITY) { LOG.warn("metric " + name + " of type " + type + " requires mostSimilar... training BOTH"); mode = Mode.BOTH; } throw new UnsupportedOperationException("This block needs to occur earlier."); } else if (type.equals("milnewitten")) { ((MilneWittenMetric) metric).setTrainSubmetrics(false); } if (metric instanceof BaseSRMetric) { ((BaseSRMetric) metric).setBuildMostSimilarCache(buildCosimilarity); } Dataset ds = getDataset(); if (mode == Mode.SIMILARITY || mode == Mode.BOTH) { if (skipBuiltMetrics && metric.similarityIsTrained()) { LOG.info("metric " + name + " similarity() is already trained... skipping"); } else { metric.trainSimilarity(ds); } } if (mode == Mode.MOSTSIMILAR || mode == Mode.BOTH) { if (skipBuiltMetrics && metric.mostSimilarIsTrained()) { LOG.info("metric " + name + " mostSimilar() is already trained... skipping"); } else { Config config = getMetricConfig(name); int n = maxResults * EnsembleMetric.SEARCH_MULTIPLIER; TIntSet validIds = validMostSimilarIds; if (config.hasPath("maxResults")) { n = config.getInt("maxResults"); } if (config.hasPath("mostSimilarConcepts")) { String path = String.format( "%s/%s.txt", config.getString("mostSimilarConcepts"), metric.getLanguage().getLangCode()); validIds = readIds(path); } metric.trainMostSimilar(ds, n, validIds); } } metric.write(); }
/** * Builds a standard connection string (list of host:port) from the zoo.cfg file * * @param zookeeper_config * @return the connection string */ public static String buildConnectionString(final Config zookeeper_config) { final int port = zookeeper_config.getInt("clientPort"); final Config servers = zookeeper_config.getConfig("server"); return servers .root() .entrySet() .stream() .map(kv -> kv.getValue().unwrapped().toString().split(":")[0] + ":" + port) .collect(Collectors.joining(",")); }
public int getInt(String key, int defaultValue) { int value = defaultValue; try { value = config.getInt(key); } catch (Exception e) { log.warn("key {} is not found in config", key); } return value; }
public ALSUpdate(Config config) { super(config); iterations = config.getInt("als.hyperparams.iterations"); implicit = config.getBoolean("als.hyperparams.implicit"); Preconditions.checkArgument(iterations > 0); hyperParamRanges = Arrays.asList( HyperParamRanges.fromConfig(config, "als.hyperparams.features"), HyperParamRanges.fromConfig(config, "als.hyperparams.lambda"), HyperParamRanges.fromConfig(config, "als.hyperparams.alpha")); noKnownItems = config.getBoolean("als.no-known-items"); }
@Nonnull private PartitionAlgorithm<Rating> getRatingPartitionAlgorithm(Config cfg) { PartitionAlgorithm<Rating> partition = new HoldoutNPartition<>(10); if (cfg.hasPath("holdout")) { partition = new HoldoutNPartition<>(cfg.getInt("holdout")); if (cfg.hasPath("holdoutFraction")) { logger.warn("holdout and holdoutFraction specified, using holdout"); } if (cfg.hasPath("retain")) { logger.warn("holdout and retain specified, using holdout"); } } else if (cfg.hasPath("holdoutFraction")) { partition = new FractionPartition<>(cfg.getDouble("holdoutFraction")); if (cfg.hasPath("retain")) { logger.warn("holdoutFraction and retain specified, using holdout"); } } else if (cfg.hasPath("retain")) { partition = new RetainNPartition<>(cfg.getInt("retain")); } return partition; }
@Test public void test() throws Exception { { final boolean isReady = true // && configService.updateIdentity() // && configService.updateVersion() // && configService.updateMaster() // ; assertTrue(isReady); } { final URL confURL = new URL("config:/instance/application.conf"); log.info("### confURL : " + confURL); } { assertTrue(configService.isIdentityValid()); assertTrue(configService.isVersionValid()); assertTrue(configService.isMasterValid()); } final Config config = configService.getMasterConfig(); log.info("### config : " + config); log.info("### identity : {}", configService.getIdentity()); log.info("### version root : {}", configService.getVersionRoot()); log.info("### version instance : {}", configService.getVersionInstance()); log.info("### master root : {}", configService.getMasterRoot()); log.info("### master instance : {}", configService.getMasterInstance()); assertEquals(config.getString("main.name"), "app name"); assertEquals(config.getString("main.version"), "1.0.6"); assertEquals(config.getInt("main.size"), 123); }
private void setIOThreads(final Builder builder, final Config config) { val ioThreads = config.getInt("io-threads"); if (ioThreads > 0) builder.setIoThreads(ioThreads); }
@SuppressWarnings("OverlyLongMethod") private void run(Config config) { System.err.println( " _\n" + " _ __| |___ __ _\n" + "| '_ \\ / _ \\/ _` |\n" + "| .__/_\\___/\\__, |\n" + "|_| |___/ stress"); final Config stressConfig = config.getConfig("plog.stress"); final int threadCount = stressConfig.getInt("threads"); log.info("Using {} threads", threadCount); final int rate = stressConfig.getInt("rate"); final RateLimiter rateLimiter = RateLimiter.create(rate); final int socketRenewRate = stressConfig.getInt("renew_rate"); final int minSize = stressConfig.getInt("min_size"); final int maxSize = stressConfig.getInt("max_size"); final int sizeIncrements = stressConfig.getInt("size_increments"); final double sizeExponent = stressConfig.getDouble("size_exponent"); final int sizeDelta = maxSize - minSize; final int differentSizes = sizeDelta / sizeIncrements; if (differentSizes == 0) { throw new RuntimeException("No sizes! Decrease plog.stress.size_increments"); } final int stopAfter = stressConfig.getInt("stop_after"); final int packetSize = stressConfig.getInt("udp.size"); final int bufferSize = stressConfig.getInt("udp.SO_SNDBUF"); final Fragmenter fragmenter = new Fragmenter(packetSize); final Random random = new Random(stressConfig.getLong("seed")); final byte[] randomBytes = new byte[maxSize]; random.nextBytes(randomBytes); final ByteBuf randomMessage = Unpooled.wrappedBuffer(randomBytes); log.info("Generating {} different hashes", differentSizes); final int[] precomputedHashes = new int[differentSizes]; for (int i = 0; i < differentSizes; i++) { precomputedHashes[i] = Murmur3.hash32(randomMessage, 0, minSize + sizeIncrements * i, 0); } final ByteBufAllocator allocator = new PooledByteBufAllocator(); final double packetLoss = stressConfig.getDouble("udp.loss"); final Meter socketMeter = registry.meter("Sockets used"); final Meter messageMeter = registry.meter("Messages sent"); final Meter packetMeter = registry.meter("Packets sent"); final Meter sendFailureMeter = registry.meter("Send failures"); final Meter lossMeter = registry.meter("Packets dropped"); final Histogram messageSizeHistogram = registry.histogram("Message size"); final Histogram packetSizeHistogram = registry.histogram("Packet size"); final InetSocketAddress target = new InetSocketAddress(stressConfig.getString("host"), stressConfig.getInt("port")); log.info("Starting with config {}", config); final long consoleRate = stressConfig.getDuration("console.interval", TimeUnit.MILLISECONDS); ConsoleReporter.forRegistry(registry).build().start(consoleRate, TimeUnit.MILLISECONDS); for (int i = 0; i < threadCount; i++) { new Thread("stress_" + i) { private DatagramChannel channel = null; @Override public void run() { try { for (int sent = 0; sent < stopAfter; sent++, messageMeter.mark()) { if (sent % socketRenewRate == 0) { if (channel != null) { channel.close(); } channel = DatagramChannel.open(); channel.socket().setSendBufferSize(bufferSize); socketMeter.mark(); } // global rate limiting rateLimiter.acquire(); final int sizeIndex = (int) (Math.pow(random.nextDouble(), sizeExponent) * differentSizes); final int messageSize = minSize + sizeIncrements * sizeIndex; final int hash = precomputedHashes[sizeIndex]; messageSizeHistogram.update(messageSize); final ByteBuf[] fragments = fragmenter.fragment(allocator, randomMessage, null, sent, messageSize, hash); for (ByteBuf fragment : fragments) { if (random.nextDouble() < packetLoss) { lossMeter.mark(); } else { final int packetSize = fragment.readableBytes(); final ByteBuffer buffer = fragment.nioBuffer(); try { channel.send(buffer, target); packetSizeHistogram.update(packetSize); packetMeter.mark(); } catch (SocketException e) { sendFailureMeter.mark(); } } fragment.release(); } } } catch (Throwable t) { t.printStackTrace(); System.exit(1); } } }.start(); } }
private void initWord2Vec(String name) throws ConfigurationException, IOException, DaoException { Config config = getMetricConfig(name).getConfig("generator"); File model = Word2VecGenerator.getModelFile(config.getString("modelDir"), language); if (skipBuiltMetrics && model.isFile()) { return; } if (config.hasPath("prebuilt") && config.getBoolean("prebuilt")) { if (model.isFile()) { return; } File downloadPath = new File(config.getString("binfile")); if (!downloadPath.isFile()) { throw new ConfigurationException( "word2vec model " + downloadPath.getAbsolutePath() + " cannot be found." + " You must download it from " + config.getString("url") + " into to the wikibrain download directory."); } if (!config.getStringList("languages").contains(language.getLangCode())) { throw new ConfigurationException( "word2vec model " + downloadPath + " does not support language" + language); } if (downloadPath.toString().toLowerCase().endsWith("gz")) { LOG.info("decompressing " + downloadPath + " to " + model); File tmp = File.createTempFile("word2vec", "bin"); try { FileUtils.deleteQuietly(tmp); GZIPInputStream gz = new GZIPInputStream(new FileInputStream(downloadPath)); FileUtils.copyInputStreamToFile(gz, tmp); gz.close(); model.getParentFile().mkdirs(); FileUtils.moveFile(tmp, model); } finally { FileUtils.deleteQuietly(tmp); } } else { FileUtils.copyFile(downloadPath, model); } return; } LinkProbabilityDao lpd = env.getConfigurator().get(LinkProbabilityDao.class); lpd.useCache(true); if (!lpd.isBuilt()) { lpd.build(); } String corpusName = config.getString("corpus"); Corpus corpus = null; if (!corpusName.equals("NONE")) { corpus = env.getConfigurator() .get(Corpus.class, config.getString("corpus"), "language", language.getLangCode()); if (!corpus.exists()) { corpus.create(); } } if (model.isFile() && (corpus == null || model.lastModified() > corpus.getCorpusFile().lastModified())) { return; } if (corpus == null) { throw new ConfigurationException( "word2vec metric " + name + " cannot build or find model!" + "configuration has no corpus, but model not found at " + model + "."); } Word2VecTrainer trainer = new Word2VecTrainer(env.getConfigurator().get(LocalPageDao.class), language); if (config.hasPath("dimensions")) { LOG.info("set number of dimensions to " + config.getInt("dimensions")); trainer.setLayer1Size(config.getInt("dimensions")); } if (config.hasPath("maxWords")) { LOG.info("set maxWords to " + config.getInt("maxWords")); trainer.setMaxWords(config.getInt("maxWords")); } if (config.hasPath("window")) { LOG.info("set window to " + config.getInt("maxWords")); trainer.setWindow(config.getInt("window")); } trainer.setKeepAllArticles(true); trainer.train(corpus.getDirectory()); trainer.save(model); }
@Override public Crossfolder buildFromSpec(SpecificationContext context, Config cfg) throws SpecificationException { Crossfolder cf = new Crossfolder(); if (cfg.hasPath("name")) { cf.setName(cfg.getString("name")); } cf.setSource(context.build(DataSource.class, cfg.getConfig("input"))); if (cfg.hasPath("partitions")) { cf.setPartitionCount(cfg.getInt("partitions")); } String method = cfg.hasPath("method") ? cfg.getString("method") : "partition-users"; switch (method) { case "partition-users": { PartitionAlgorithm<Rating> partition = getRatingPartitionAlgorithm(cfg); Order<Rating> order = getRatingOrder(cfg); cf.setMethod(CrossfoldMethods.partitionUsers(order, partition)); break; } case "sample-users": { PartitionAlgorithm<Rating> partition = getRatingPartitionAlgorithm(cfg); Order<Rating> order = getRatingOrder(cfg); int sampleSize = cfg.hasPath("sampleSize") ? cfg.getInt("sampleSize") : 1000; cf.setMethod(CrossfoldMethods.sampleUsers(order, partition, sampleSize)); break; } case "partition-ratings": cf.setMethod(CrossfoldMethods.partitionRatings()); break; default: throw new SpecificationException("invalid crossfold method " + method); } if (cfg.hasPath("includeTimestamps")) { cf.setWriteTimestamps(cfg.getBoolean("includeTimestamps")); } if (cfg.hasPath("outputDir")) { cf.setOutputDir(cfg.getString("outputDir")); } else { logger.warn("no output directory specified for crossfold {}", cf.getName()); } if (cfg.hasPath("outputFormat")) { switch (cfg.getString("outputFormat")) { case "pack": cf.setOutputFormat(OutputFormat.PACK); break; case "gzip": cf.setOutputFormat(OutputFormat.CSV_GZIP); break; case "xz": cf.setOutputFormat(OutputFormat.CSV_GZIP); break; default: throw new SpecificationException( "invalid output format " + cfg.getString("outputFormat")); } } if (cfg.hasPath("isolate")) { cf.setIsolate(cfg.getBoolean("isolate")); } return cf; }
@Override public ActorSystem buildActorSystem(Config config) throws Exception { // start redis final int redisPort = (config.hasPath("components.redis.port")) ? config.getInt("components.redis.port") : RedisURI.DEFAULT_REDIS_PORT; final String redisLogLevel = config.hasPath("components.redis.log-level") ? config.getString("components.redis.log-level") : "verbose"; String logBase = System.getenv("LOG_BASE"); if (StringUtils.isBlank(logBase)) logBase = System.getenv("TEMP"); final String redisLogFile = config.hasPath("components.redis.log-file") ? config.getString("components.redis.log-file") : logBase + "\\redis.log"; final String redisPidFile = config.hasPath("components.redis.pid-file") ? config.getString("components.redis.pid-file") : logBase + "\\redis.pid"; try { this.redis = RedisServer.builder() .redisExecProvider(RedisExecProvider.defaultProvider()) .port(redisPort) .setting("loglevel " + redisLogLevel) .setting("logfile " + redisLogFile) .setting("pidfile " + redisPidFile) .build(); } catch (Exception ex) { this.logger.error("Fail to build redis server.", ex); throw new IllegalStateException("Fail to build redis server.", ex); } new Thread() { @Override public void run() { try { redis.start(); logger.info("Started redis server on {} port", redisPort); } catch (Exception ex) { logger.error("Fail to start redis server.", ex); // @TODO Use future to stop the actor system at this point. } } }.start(); // create redis client String redisUri = "redis://" + this.getAddress().getHostAddress() + ":" + redisPort + "/0"; this.redisClient = new RedisClient(RedisURI.create(redisUri)); ActorSystem system = ActorSystem.create(this.getClusterName(), config); Camel camel = CamelExtension.get(system); this.baseUrl = "http://" + this.getAddress().getHostAddress() + ":" + this.getHttpPort() + "/" + this.getApplicationName(); String uri = "jetty:" + this.baseUrl; String recorderKeyBase = this.getClusterName() + ":" + "words"; ActorRef recordingService = system.actorOf( Props.create(RecordingService.class, recorderKeyBase, this.redisClient), "recorderService"); String tracerKey = this.getClusterName() + ":trace:node:1"; ActorRef traceLogService = system.actorOf( Props.create(TraceLogService.class, tracerKey, this.redisClient, this.jacksonMapper), "traceLogService"); ActorRef analysisService = system.actorOf( Props.create(AnalysisService.class, recordingService, traceLogService), "analysisService"); String pathBase = "akka.tcp://" + this.getClusterName() + "@" + this.getAddress().getHostAddress() + ":"; SimpleRoutingMap<String> routingMap = new SimpleRoutingMap<String>(); routingMap.putPath(new Key<String>("2551"), pathBase + "2551/user/analysisService"); routingMap.putPath(new Key<String>("2552"), pathBase + "2552/user/analysisService"); ActorRef httpClerk = system.actorOf(Props.create(WebService.class, uri, routingMap), "httpClerk"); Future<ActorRef> activationFuture = camel.activationFutureFor( httpClerk, new Timeout(Duration.create(10, TimeUnit.SECONDS)), system.dispatcher()); return system; }
ForestConfig(Config config) { this( config.getString("wood-block"), config.getString("leaves-block"), config.getString("thin-leaves-block"), config.getString("sapling-block"), config.getString("grows-on"), config.getString("seeds-on"), config.getInt("max-seed-height-difference"), config.getInt("max-generations"), config.getInt("min-generations"), config.getInt("trunk-radi"), config.getInt("trunk-height"), config.getInt("crown-radi"), config.getInt("crown-height"), config.getString("initial-state"), config.getInt("min-growth-delay"), config.getInt("random-growth-delay"), config.getInt("max-seeds-per-generation"), config.getInt("seed-every-generation"), config.getInt("random-growth"), config.getInt("leaf-decay-delay"), config.getInt("leave-sapling-probability")); }
public static InboundSettings create(Config config) { Config inbound = config.getConfig("inbound"); List<String> columnNames; if (inbound.hasPath("column-names")) { columnNames = inbound.getStringList("column-names"); } else { int numColumns = inbound.getInt("num-columns"); columnNames = new ArrayList<>(numColumns); for (int i = 0; i < numColumns; i++) { columnNames.add(String.valueOf(i)); } } Function<Object, Integer> lookup = new LookupFunction(columnNames); Collection<Integer> allColumns = Collections2.transform(columnNames, lookup); Collection<Integer> idColumns; if (inbound.hasPath("id-columns")) { idColumns = ImmutableSet.copyOf(Collections2.transform(inbound.getAnyRefList("id-columns"), lookup)); } else { idColumns = ImmutableSet.of(); } Collection<Integer> ignoredColumns; if (inbound.hasPath("ignored-columns")) { ignoredColumns = ImmutableSet.copyOf( Collections2.transform(inbound.getAnyRefList("ignored-columns"), lookup)); } else { ignoredColumns = ImmutableSet.of(); } Collection<Integer> categoricalColumns; Collection<Integer> numericColumns; if (inbound.hasPath("categorical-columns")) { Preconditions.checkState(!inbound.hasPath("numeric-columns")); categoricalColumns = new HashSet<>( Collections2.transform(inbound.getAnyRefList("categorical-columns"), lookup)); numericColumns = new HashSet<>(allColumns); numericColumns.removeAll(categoricalColumns); } else if (inbound.hasPath("numeric-columns")) { Preconditions.checkState(!inbound.hasPath("categorical-columns")); numericColumns = new HashSet<>(Collections2.transform(inbound.getAnyRefList("numeric-columns"), lookup)); categoricalColumns = new HashSet<>(allColumns); categoricalColumns.removeAll(numericColumns); } else { throw new IllegalArgumentException("No categorical-columns or numeric-columns set"); } numericColumns.removeAll(idColumns); numericColumns.removeAll(ignoredColumns); categoricalColumns.removeAll(idColumns); categoricalColumns.removeAll(ignoredColumns); Integer targetColumn = null; if (inbound.hasPath("target-column")) { targetColumn = lookup.apply(inbound.getAnyRef("target-column")); Preconditions.checkState( categoricalColumns.contains(targetColumn) || numericColumns.contains(targetColumn), "Target column not specified as numeric or categorical"); } return new InboundSettings( columnNames, idColumns, categoricalColumns, numericColumns, ignoredColumns, targetColumn); }
private void setBufferSize(final Builder builder, final Config config) { val bufferSize = config.getInt("buffer-size"); log.info(" > buffer-size: " + bufferSize); builder.setBufferSize(bufferSize); }
public JobProcessingActor() { conf = ConfigFactory.load(); learnerTimeout = conf.getInt("learner.default.timeout"); }