GossipedMasterAckManager(List<Pair<String, Integer>> peers, int port, long maxAgeMs) { group = new Group(); for (Pair<String, Integer> p : peers) { group.addNode(new TCPNodeId(p.getLeft(), p.getRight())); } this.port = port; // TODO(henry) choose the interface from config gossip = new GossipMulticast(group, new TCPNodeId("0.0.0.0", port), maxAgeMs); }
@Test public void testProbabilitySamplerSink() throws IOException, InterruptedException { System.out.println("Probability sampler"); long historyLen = 300; ScheduledHistoryReporter<CounterSink> hist = new ScheduledHistoryReporter<CounterSink>("test", longwait, historyLen, new DumbTagger()) { @Override public CounterSink newSink(Tagger format) throws IOException { CounterSink count = new CounterSink("count") { public void append(Event e) throws IOException { super.append(e); System.out.println(e); // just add a printf to the counts. } }; return count; } @Override public ReportEvent getReport() { return null; } @Override public void getReports(String namePrefix, Map<String, ReportEvent> reports) { reports.put(namePrefix + getName(), getReport()); } }; ProbabilitySampler<ScheduledHistoryReporter<CounterSink>> sample = new ProbabilitySampler<ScheduledHistoryReporter<CounterSink>>(hist, .10, 1337); sample.open(); // do a few epochs, sending in different numbers of events. int[] events = {15, 4, 234, 20}; for (int i = 0; i < events.length; i++) { for (int j = 0; j < events[i]; j++) { String s = "test " + i + "," + j; sample.append(new EventImpl(s.getBytes())); } hist.forcedRotate(); } // should match at 0, 10, 20, 30, etc.. int[] ans = {1, 0, 20, 4}; List<Pair<Long, CounterSink>> h = hist.getHistory(); int i = 0; for (Pair<Long, CounterSink> p : h) { long count = p.getRight().getCount(); System.out.println(p.getLeft() + " :: " + count); Assert.assertEquals(ans[i], count); i++; } }
@Override public ReportEvent getReport() { ArrayList<Pair<Long, Long>> list = new ArrayList<Pair<Long, Long>>(getHistory().size()); for (Pair<Long, CounterSink> p : getHistory()) { list.add(new Pair<Long, Long>(p.getLeft(), p.getRight().getCount())); } String report = ChartPackage.createTimelineGen().generate(list); return ReportEvent.createLegacyHtmlReport(name, report); }
@Override public ReportEvent getMetrics() { ReportEvent rpt = new ReportEvent(getName()); StringWriter sw = new StringWriter(); PrintWriter out = new PrintWriter(sw); StringWriter csvsw = new StringWriter(); PrintWriter csvpw = new PrintWriter(csvsw); for (Pair<Benchmark, StringWriter> e : benchmarks.values()) { out.print(e.getRight().getBuffer().toString()); e.getLeft().printCsvLog(csvpw); } out.close(); csvpw.close(); Attributes.setString(rpt, A_BENCHMARK_RPT, sw.toString()); Attributes.setString(rpt, A_BENCHMARK_CSV, csvsw.toString()); return rpt; }
/** * Returns a Flume Node with settings from specified command line parameters. (See usage for * instructions) * * @param argv * @return * @throws IOException */ public static FlumeNode setup(String[] argv) throws IOException { logVersion(LOG); logEnvironment(LOG); // Make sure the Java version is not older than 1.6 if (!CheckJavaVersion.isVersionOk()) { LOG.error("Exiting because of an old Java version or Java version in bad format"); System.exit(-1); } LOG.info("Starting flume agent on: " + NetUtils.localhost()); LOG.info(" Working directory is: " + new File(".").getAbsolutePath()); FlumeConfiguration.hardExitLoadConfig(); // will exit if conf file is bad. CommandLine cmd = null; Options options = new Options(); options.addOption("c", true, "Load initial config from cmdline arg"); options.addOption("n", true, "Set node name"); options.addOption("s", false, "Do not start local flume status server on node"); options.addOption("1", false, "Make flume node one shot (if closes or errors, exits)"); options.addOption("m", false, "Have flume hard exit if in likely GC thrash situation"); options.addOption("h", false, "Print help information"); options.addOption("v", false, "Print version information"); try { CommandLineParser parser = new PosixParser(); cmd = parser.parse(options, argv); } catch (ParseException e) { HelpFormatter fmt = new HelpFormatter(); fmt.printHelp("FlumeNode", options, true); return null; } // dump version info only if (cmd != null && cmd.hasOption("v")) { return null; } // dump help info. if (cmd != null && cmd.hasOption("h")) { HelpFormatter fmt = new HelpFormatter(); fmt.printHelp("FlumeNode", options, true); return null; } // Check FlumeConfiguration file for settings that may cause node to fail. nodeConfigChecksOk(); String nodename = NetUtils.localhost(); // default to local host name. if (cmd != null && cmd.hasOption("n")) { // select a different name, allow for multiple processes configured // differently on same node. nodename = cmd.getOptionValue("n"); } boolean startHttp = false; if (cmd != null && !cmd.hasOption("s")) { // no -s option, start the local status server startHttp = true; } boolean oneshot = false; if (cmd != null && cmd.hasOption("1")) { oneshot = true; } FormatFactory.loadOutputFormatPlugins(); // Instantiate the flume node. FlumeConfiguration conf = FlumeConfiguration.get(); FlumeNode flume = new FlumeNode(nodename, conf, startHttp, oneshot); flume.start(); // load an initial configuration from command line if (cmd != null && cmd.hasOption("c")) { String spec = cmd.getOptionValue("c"); LOG.info("Loading spec from command line: '" + spec + "'"); try { // node name is the default logical and physical name. Context ctx = new LogicalNodeContext(nodename, nodename); Map<String, Pair<String, String>> cfgs = FlumeBuilder.parseConf(ctx, spec); Pair<String, String> node = cfgs.get(nodename); FlumeConfigData fcd = new FlumeConfigData(0, node.getLeft(), node.getRight(), 0, 0, null); flume.nodesMan.spawn(ctx, nodename, fcd); } catch (Exception e) { LOG.warn("Caught exception loading node:" + e.getMessage()); LOG.debug("Exception: ", e); if (oneshot) { System.exit(0); // exit cleanly } } } else { try { // default to null configurations. Context ctx = new LogicalNodeContext(nodename, nodename); FlumeConfigData fcd = new FlumeConfigData(0, "null", "null", 0, 0, null); flume.nodesMan.spawn(ctx, nodename, fcd); } catch (Exception e) { LOG.error("Caught exception loading node", e); } } if (cmd != null && cmd.hasOption("m")) { // setup memory use monitor LOG.info("Setup hard exit on memory exhaustion"); MemoryMonitor.setupHardExitMemMonitor(FlumeConfiguration.get().getAgentMemoryThreshold()); } try { tryKerberosLogin(); } catch (IOException ioe) { LOG.error("Failed to kerberos login.", ioe); } // hangout, waiting for other agent thread to exit. return flume; }