/** histograms are sampled, but we just update points */ public void mergeHistograms( MetricInfo metricInfo, String meta, Map<Integer, MetricSnapshot> data, Map<String, Integer> metaCounters, Map<String, Map<Integer, Histogram>> histograms) { Map<Integer, MetricSnapshot> existing = metricInfo.get_metrics().get(meta); if (existing == null) { metricInfo.put_to_metrics(meta, data); Map<Integer, Histogram> histogramMap = new HashMap<>(); for (Map.Entry<Integer, MetricSnapshot> dataEntry : data.entrySet()) { Histogram histogram = MetricUtils.metricSnapshot2Histogram(dataEntry.getValue()); histogramMap.put(dataEntry.getKey(), histogram); } histograms.put(meta, histogramMap); } else { for (Map.Entry<Integer, MetricSnapshot> dataEntry : data.entrySet()) { Integer win = dataEntry.getKey(); MetricSnapshot snapshot = dataEntry.getValue(); MetricSnapshot old = existing.get(win); if (old == null) { existing.put(win, snapshot); histograms.get(meta).put(win, MetricUtils.metricSnapshot2Histogram(snapshot)); } else { if (snapshot.get_ts() >= old.get_ts()) { old.set_ts(snapshot.get_ts()); // update points MetricUtils.updateHistogramPoints(histograms.get(meta).get(win), snapshot.get_points()); } } } } updateMetricCounters(meta, metaCounters); }
/** meters are not sampled. */ public void mergeMeters( MetricInfo metricInfo, String meta, Map<Integer, MetricSnapshot> data, Map<String, Integer> metaCounters) { Map<Integer, MetricSnapshot> existing = metricInfo.get_metrics().get(meta); if (existing == null) { metricInfo.put_to_metrics(meta, data); } else { for (Map.Entry<Integer, MetricSnapshot> dataEntry : data.entrySet()) { Integer win = dataEntry.getKey(); MetricSnapshot snapshot = dataEntry.getValue(); MetricSnapshot old = existing.get(win); if (old == null) { existing.put(win, snapshot); } else { if (snapshot.get_ts() >= old.get_ts()) { old.set_ts(snapshot.get_ts()); old.set_mean(old.get_mean() + snapshot.get_mean()); old.set_m1(old.get_m1() + snapshot.get_m1()); old.set_m5(old.get_m5() + snapshot.get_m5()); old.set_m15(old.get_m15() + snapshot.get_m15()); } } } } updateMetricCounters(meta, metaCounters); }
public void mergeGauges( TopologyMetric tpMetric, MetaType metaType, String meta, Map<Integer, MetricSnapshot> data) { MetricInfo metricInfo = getMetricInfoByType(tpMetric, metaType); Map<Integer, MetricSnapshot> existing = metricInfo.get_metrics().get(meta); if (existing == null) { metricInfo.put_to_metrics(meta, data); } else { for (Map.Entry<Integer, MetricSnapshot> dataEntry : data.entrySet()) { Integer win = dataEntry.getKey(); MetricSnapshot snapshot = dataEntry.getValue(); MetricSnapshot old = existing.get(win); if (old == null) { existing.put(win, snapshot); } else { if (snapshot.get_ts() >= old.get_ts()) { old.set_ts(snapshot.get_ts()); if (metaType != MetaType.TOPOLOGY) { old.set_doubleValue(snapshot.get_doubleValue()); } else { // for topology metric, gauge might be add-able, e.g., cpu, memory, etc. old.set_doubleValue(old.get_doubleValue() + snapshot.get_doubleValue()); } } } } } }
public void mergeCounters( TopologyMetric tpMetric, MetaType metaType, String meta, Map<Integer, MetricSnapshot> data) { MetricInfo metricInfo = getMetricInfoByType(tpMetric, metaType); Map<Integer, MetricSnapshot> existing = metricInfo.get_metrics().get(meta); if (existing == null) { metricInfo.put_to_metrics(meta, data); } else { for (Map.Entry<Integer, MetricSnapshot> dataEntry : data.entrySet()) { Integer win = dataEntry.getKey(); MetricSnapshot snapshot = dataEntry.getValue(); MetricSnapshot old = existing.get(win); if (old == null) { existing.put(win, snapshot); } else { old.set_ts(snapshot.get_ts()); old.set_longValue(old.get_longValue() + snapshot.get_longValue()); } } } }
/** timers are sampled, we just update points */ public void mergeTimers( MetricInfo metricInfo, String meta, Map<Integer, MetricSnapshot> data, Map<String, Integer> metaCounters, Map<String, Map<Integer, Timer>> timers) { Map<Integer, MetricSnapshot> existing = metricInfo.get_metrics().get(meta); if (existing == null) { metricInfo.put_to_metrics(meta, data); Map<Integer, Timer> timerMap = new HashMap<>(); for (Map.Entry<Integer, MetricSnapshot> dataEntry : data.entrySet()) { Timer timer = MetricUtils.metricSnapshot2Timer(dataEntry.getValue()); timerMap.put(dataEntry.getKey(), timer); } timers.put(meta, timerMap); } else { for (Map.Entry<Integer, MetricSnapshot> dataEntry : data.entrySet()) { Integer win = dataEntry.getKey(); MetricSnapshot snapshot = dataEntry.getValue(); MetricSnapshot old = existing.get(win); if (old == null) { existing.put(win, snapshot); timers.get(meta).put(win, MetricUtils.metricSnapshot2Timer(snapshot)); } else { if (snapshot.get_ts() >= old.get_ts()) { old.set_ts(snapshot.get_ts()); old.set_m1(old.get_m1() + snapshot.get_m1()); old.set_m5(old.get_m5() + snapshot.get_m5()); old.set_m15(old.get_m15() + snapshot.get_m15()); // update points MetricUtils.updateTimerPoints(timers.get(meta).get(win), snapshot.get_points()); } } } } updateMetricCounters(meta, metaCounters); }
public TopologyMetric mergeMetrics() { long start = System.currentTimeMillis(); if (getMemCache().size() == 0) { // LOG.info("topology:{}, metric size is 0, skip...", topologyId); return null; } if (isMerging()) { LOG.info("topology {} is already merging, skip...", topologyId); return null; } setMerging(true); try { Map<String, MetricInfo> workerMetricMap = this.memCache; // reset mem cache this.memCache = new ConcurrentHashMap<>(); MetricInfo topologyMetrics = MetricUtils.mkMetricInfo(); MetricInfo componentMetrics = MetricUtils.mkMetricInfo(); MetricInfo taskMetrics = MetricUtils.mkMetricInfo(); MetricInfo streamMetrics = MetricUtils.mkMetricInfo(); MetricInfo workerMetrics = MetricUtils.mkMetricInfo(); MetricInfo nettyMetrics = MetricUtils.mkMetricInfo(); TopologyMetric tpMetric = new TopologyMetric( topologyMetrics, componentMetrics, workerMetrics, taskMetrics, streamMetrics, nettyMetrics); // metric name => worker count Map<String, Integer> metricNameCounters = new HashMap<>(); // special for histograms & timers, we merge the points to get a new snapshot data. Map<String, Map<Integer, Histogram>> histograms = new HashMap<>(); Map<String, Map<Integer, Timer>> timers = new HashMap<>(); // iterate metrics of all workers within the same topology for (ConcurrentMap.Entry<String, MetricInfo> metricEntry : workerMetricMap.entrySet()) { MetricInfo metricInfo = metricEntry.getValue(); // merge counters: add old and new values, note we only add incoming new metrics and // overwrite // existing data, same for all below. Map<String, Map<Integer, MetricSnapshot>> metrics = metricInfo.get_metrics(); for (Map.Entry<String, Map<Integer, MetricSnapshot>> metric : metrics.entrySet()) { String metricName = metric.getKey(); Map<Integer, MetricSnapshot> data = metric.getValue(); MetaType metaType = MetricUtils.metaType(metricName); MetricType metricType = MetricUtils.metricType(metricName); if (metricType == MetricType.COUNTER) { mergeCounters(tpMetric, metaType, metricName, data); } else if (metricType == MetricType.GAUGE) { mergeGauges(tpMetric, metaType, metricName, data); } else if (metricType == MetricType.METER) { mergeMeters( getMetricInfoByType(tpMetric, metaType), metricName, data, metricNameCounters); } else if (metricType == MetricType.HISTOGRAM) { mergeHistograms( getMetricInfoByType(tpMetric, metaType), metricName, data, metricNameCounters, histograms); } else if (metricType == MetricType.TIMER) { mergeTimers( getMetricInfoByType(tpMetric, metaType), metricName, data, metricNameCounters, timers); } } } adjustHistogramTimerMetrics(tpMetric, metricNameCounters, histograms, timers); // for counters, we only report delta data every time, need to sum with old data // adjustCounterMetrics(tpMetric, oldTpMetric); LOG.info( "merge topology metrics:{}, cost:{}", topologyId, System.currentTimeMillis() - start); // debug logs // MetricUtils.printMetricWinSize(componentMetrics); return tpMetric; } finally { setMerging(false); } }