@Test public void testHistogramDisabled() { // create timer with id "testMetricAlerts" and register with metric registry, bump up value to // 4. Histogram h = MetricsConfigurator.createHistogram5Min( metrics, "testHistogramDisabled", PIPELINE_NAME, REVISION); h.update(1000); MetricsRuleDefinition metricsRuleDefinition = new MetricsRuleDefinition( "testHistogramDisabled", "testHistogramDisabled", "testHistogramDisabled", MetricType.HISTOGRAM, MetricElement.HISTOGRAM_COUNT, "${value()==1}", false, false); MetricRuleEvaluator metricRuleEvaluator = new MetricRuleEvaluator( metricsRuleDefinition, metrics, new AlertManager( PIPELINE_NAME, REVISION, null, metrics, runtimeInfo, new EventListenerManager()), Collections.<String>emptyList()); metricRuleEvaluator.checkForAlerts(); // get alert gauge Gauge<Object> gauge = MetricsConfigurator.getGauge( metrics, AlertsUtil.getAlertGaugeName(metricsRuleDefinition.getId())); Assert.assertNull(gauge); }
private void assertMetricCount(String sourceId, String metricName, int expectedCount) { String key = sourceId + "." + metricName; SourceMetric sourceMetric = sourceMetrics.metrics.get(key); if (sourceMetric.isHistogram()) { Histogram histogram = (Histogram) sourceMetric.getMetric(); assertThat(histogram.getCount(), is((long) expectedCount)); } else { Meter meter = (Meter) sourceMetric.getMetric(); assertThat(meter.getCount(), is((long) expectedCount)); } }
@Override public void run() { samplerLock.lock(); try { for (Histogram histogram : schedulerHistogramList) { Timer timer = histogramTimerMap.get(histogram); histogram.update((int) timer.getSnapshot().getMean()); } } finally { samplerLock.unlock(); } }
private void updateRequestSizeHistogram(String topic, int length) { requestSizeHistogramGlobal.update(length); if (!requestSizeHistogramByTopic.containsKey(topic)) { Histogram requestSizeHistogram = HermesMetricsRegistry.getMetricRegistry() .histogram( MetricRegistry.name( SubscriptionPushService.class, "SubscriptionPushService", topic, "BodySize")); requestSizeHistogramByTopic.put(topic, requestSizeHistogram); } Histogram requestSizeHistogram = requestSizeHistogramByTopic.get(topic); requestSizeHistogram.update(length); }
/** @return a summary of {@code hist}. */ public static String getHistogramReport(final Histogram hist) { Snapshot sn = hist.getSnapshot(); return ", mean=" + DOUBLE_FORMAT.format(sn.getMean()) + ", min=" + DOUBLE_FORMAT.format(sn.getMin()) + ", max=" + DOUBLE_FORMAT.format(sn.getMax()) + ", stdDev=" + DOUBLE_FORMAT.format(sn.getStdDev()) + ", 50th=" + DOUBLE_FORMAT.format(sn.getMedian()) + ", 75th=" + DOUBLE_FORMAT.format(sn.get75thPercentile()) + ", 95th=" + DOUBLE_FORMAT.format(sn.get95thPercentile()) + ", 99th=" + DOUBLE_FORMAT.format(sn.get99thPercentile()) + ", 99.9th=" + DOUBLE_FORMAT.format(sn.get999thPercentile()) + ", 99.99th=" + DOUBLE_FORMAT.format(sn.getValue(0.9999)) + ", 99.999th=" + DOUBLE_FORMAT.format(sn.getValue(0.99999)); }
private void serializeEnum(BluefloodEnumRollup rollup, byte[] buf) throws IOException { CodedOutputStream out = CodedOutputStream.newInstance(buf); enumRollupSize.update(buf.length); out.writeRawByte(Constants.VERSION_1_ENUM_ROLLUP); out.writeRawVarint32(rollup.getCount()); Map<Long, Long> enValues = rollup.getHashedEnumValuesWithCounts(); for (Long i : enValues.keySet()) { out.writeRawVarint64(i); out.writeRawVarint64(enValues.get(i)); } }
@Test public void testHistogram() { final Histogram histogram = registry.histogram(name("foo", "bar")); histogram.update(20); histogram.update(40); reportAndRefresh(); SearchResponse searchResponse = client().prepareSearch(indexWithDate).setTypes("histogram").execute().actionGet(); org.assertj.core.api.Assertions.assertThat(searchResponse.getHits().totalHits()).isEqualTo(1L); Map<String, Object> hit = searchResponse.getHits().getAt(0).sourceAsMap(); assertTimestamp(hit); assertKey(hit, "name", prefix + ".foo.bar"); assertKey(hit, "count", 2); assertKey(hit, "max", 40); assertKey(hit, "min", 20); assertKey(hit, "mean", 30.0); assertKey(hit, "host", "localhost"); }
private void collectHistogramReports( List<DBObject> docs, SortedMap<String, Histogram> histograms, Date timestamp) { if (histograms.isEmpty()) return; for (Map.Entry<String, Histogram> entry : histograms.entrySet()) { final BasicDBObject report = getBasicDBObject(timestamp, entry.getKey(), "histogram"); final Histogram histogram = entry.getValue(); final Snapshot s = histogram.getSnapshot(); report.put("count", s.size()); report.put("75th_percentile", s.get75thPercentile()); report.put("95th_percentile", s.get95thPercentile()); report.put("98th_percentile", s.get98thPercentile()); report.put("99th_percentile", s.get99thPercentile()); report.put("999th_percentile", s.get999thPercentile()); report.put("max", s.getMax()); report.put("min", s.getMin()); report.put("mean", s.getMean()); report.put("median", s.getMedian()); report.put("std_dev", s.getStdDev()); docs.add(report); } }
@Test public void testHistogram() { System.out.println("******************************* HISTOGRAM *******************************"); histogram = registry.histogram("histogram"); try { for (int i = 0; i < ITER_COUNT; i++) { histogram.update(i); Thread.sleep(SLEEP_MS); } } catch (InterruptedException ex) { Thread.currentThread().interrupt(); } }
@Override public final AttributeSet resolve(ResolverContext context) throws Exception { checkArgument(context.getDescriptor().getId().equals(descriptor.getId())); if (log.isDebugEnabled()) { log.debug( "Retrieving attributes via resolver id=\"{}\" name=\"{}\"", descriptor.getId(), descriptor.getName()); } Timer.Context timerCtx = timer.time(); try { return AttributeSet.builder(descriptor) .attributes(doResolve(context)) .ticker(context.getTicker()) .build(); } catch (Exception e) { if (log.isDebugEnabled()) { log.debug(e.getMessage(), e); } throw e; } finally { histogram.update(timerCtx.stop()); } }
/** Submit results to index and return the queue messages to be ack'd */ private List<QueueMessage> submitToIndex(List<IndexEventResult> indexEventResults) { // if nothing came back then return empty list if (indexEventResults == null) { return new ArrayList<>(0); } IndexOperationMessage combined = new IndexOperationMessage(); List<QueueMessage> queueMessages = indexEventResults .stream() // filter out messages that are not present, they were not processed and put into the // results .filter(result -> result.getQueueMessage().isPresent()) .map( indexEventResult -> { // record the cycle time messageCycle.update( System.currentTimeMillis() - indexEventResult.getCreationTime()); // ingest each index op into our combined, single index op for the index producer if (indexEventResult.getIndexOperationMessage().isPresent()) { combined.ingest(indexEventResult.getIndexOperationMessage().get()); } return indexEventResult.getQueueMessage().get(); }) // collect into a list of QueueMessages that can be ack'd later .collect(Collectors.toList()); queueIndexOperationMessage(combined); return queueMessages; }
protected Map<String, Object> buildHistogramMap(Histogram h) { Map<String, Object> metrics = Maps.newHashMap(); if (h == null) { return metrics; } Map<String, Object> time = Maps.newHashMap(); time.put("max", h.getSnapshot().getMax()); time.put("min", h.getSnapshot().getMin()); time.put("mean", (long) h.getSnapshot().getMean()); time.put("95th_percentile", (long) h.getSnapshot().get95thPercentile()); time.put("98th_percentile", (long) h.getSnapshot().get98thPercentile()); time.put("99th_percentile", (long) h.getSnapshot().get99thPercentile()); time.put("std_dev", (long) h.getSnapshot().getStdDev()); metrics.put("time", time); metrics.put("count", h.getCount()); return metrics; }
private void adjustMetrics( Map<String, Map<Integer, MetricSnapshot>> metrics, Map<String, Integer> metaCounters, Map<String, Map<Integer, Histogram>> histograms, Map<String, Map<Integer, Timer>> timers) { for (Map.Entry<String, Map<Integer, MetricSnapshot>> metricEntry : metrics.entrySet()) { String meta = metricEntry.getKey(); MetricType metricType = MetricUtils.metricType(meta); MetaType metaType = MetricUtils.metaType(meta); Map<Integer, MetricSnapshot> winData = metricEntry.getValue(); if (metricType == MetricType.HISTOGRAM) { for (Map.Entry<Integer, MetricSnapshot> dataEntry : winData.entrySet()) { MetricSnapshot snapshot = dataEntry.getValue(); Integer cnt = metaCounters.get(meta); Histogram histogram = histograms.get(meta).get(dataEntry.getKey()); if (cnt != null && cnt > 1) { Snapshot snapshot1 = histogram.getSnapshot(); snapshot.set_mean(snapshot1.getMean()); snapshot.set_p50(snapshot1.getMedian()); snapshot.set_p75(snapshot1.get75thPercentile()); snapshot.set_p95(snapshot1.get95thPercentile()); snapshot.set_p98(snapshot1.get98thPercentile()); snapshot.set_p99(snapshot1.get99thPercentile()); snapshot.set_p999(snapshot1.get999thPercentile()); snapshot.set_stddev(snapshot1.getStdDev()); snapshot.set_min(snapshot1.getMin()); snapshot.set_max(snapshot1.getMax()); if (metaType == MetaType.TOPOLOGY) { snapshot.set_points(Arrays.asList(ArrayUtils.toObject(snapshot1.getValues()))); } } if (metaType != MetaType.TOPOLOGY) { snapshot.set_points(new ArrayList<Long>(0)); } } } else if (metricType == MetricType.TIMER) { for (Map.Entry<Integer, MetricSnapshot> dataEntry : winData.entrySet()) { MetricSnapshot snapshot = dataEntry.getValue(); Integer cnt = metaCounters.get(meta); if (cnt != null && cnt > 1) { Timer timer = timers.get(meta).get(dataEntry.getKey()); Snapshot snapshot1 = timer.getSnapshot(); snapshot.set_p50(snapshot1.getMedian()); snapshot.set_p75(snapshot1.get75thPercentile()); snapshot.set_p95(snapshot1.get95thPercentile()); snapshot.set_p98(snapshot1.get98thPercentile()); snapshot.set_p99(snapshot1.get99thPercentile()); snapshot.set_p999(snapshot1.get999thPercentile()); snapshot.set_stddev(snapshot1.getStdDev()); snapshot.set_min(snapshot1.getMin()); snapshot.set_max(snapshot1.getMax()); } snapshot.set_points(new ArrayList<Long>(0)); } } } }
public void update(long value) { if (isEnable == true) { instance.update(value); } }
public String generateRealTimeTrackingMetrics() { // JVM double jvmFreeMemoryGB, jvmMaxMemoryGB, jvmTotalMemoryGB; if (jvmFreeMemoryGauge == null && metrics.getGauges().containsKey("variable.jvm.free.memory")) { jvmFreeMemoryGauge = metrics.getGauges().get("variable.jvm.free.memory"); } if (jvmMaxMemoryGauge == null && metrics.getGauges().containsKey("variable.jvm.max.memory")) { jvmMaxMemoryGauge = metrics.getGauges().get("variable.jvm.max.memory"); } if (jvmTotalMemoryGauge == null && metrics.getGauges().containsKey("variable.jvm.total.memory")) { jvmTotalMemoryGauge = metrics.getGauges().get("variable.jvm.total.memory"); } jvmFreeMemoryGB = jvmFreeMemoryGauge == null ? 0 : Double.parseDouble(jvmFreeMemoryGauge.getValue().toString()) / 1024 / 1024 / 1024; jvmMaxMemoryGB = jvmMaxMemoryGauge == null ? 0 : Double.parseDouble(jvmMaxMemoryGauge.getValue().toString()) / 1024 / 1024 / 1024; jvmTotalMemoryGB = jvmTotalMemoryGauge == null ? 0 : Double.parseDouble(jvmTotalMemoryGauge.getValue().toString()) / 1024 / 1024 / 1024; // number of running applications/containers String numRunningApps, numRunningContainers; if (numRunningAppsGauge == null && metrics.getGauges().containsKey("variable.running.application")) { numRunningAppsGauge = metrics.getGauges().get("variable.running.application"); } if (numRunningContainersGauge == null && metrics.getGauges().containsKey("variable.running.container")) { numRunningContainersGauge = metrics.getGauges().get("variable.running.container"); } numRunningApps = numRunningAppsGauge == null ? "0" : numRunningAppsGauge.getValue().toString(); numRunningContainers = numRunningContainersGauge == null ? "0" : numRunningContainersGauge.getValue().toString(); // cluster available/allocate resource double allocatedMemoryGB, allocatedVCoresGB, availableMemoryGB, availableVCoresGB; if (allocatedMemoryGauge == null && metrics.getGauges().containsKey("variable.cluster.allocated.memory")) { allocatedMemoryGauge = metrics.getGauges().get("variable.cluster.allocated.memory"); } if (allocatedVCoresGauge == null && metrics.getGauges().containsKey("variable.cluster.allocated.vcores")) { allocatedVCoresGauge = metrics.getGauges().get("variable.cluster.allocated.vcores"); } if (availableMemoryGauge == null && metrics.getGauges().containsKey("variable.cluster.available.memory")) { availableMemoryGauge = metrics.getGauges().get("variable.cluster.available.memory"); } if (availableVCoresGauge == null && metrics.getGauges().containsKey("variable.cluster.available.vcores")) { availableVCoresGauge = metrics.getGauges().get("variable.cluster.available.vcores"); } allocatedMemoryGB = allocatedMemoryGauge == null ? 0 : Double.parseDouble(allocatedMemoryGauge.getValue().toString()) / 1024; allocatedVCoresGB = allocatedVCoresGauge == null ? 0 : Double.parseDouble(allocatedVCoresGauge.getValue().toString()); availableMemoryGB = availableMemoryGauge == null ? 0 : Double.parseDouble(availableMemoryGauge.getValue().toString()) / 1024; availableVCoresGB = availableVCoresGauge == null ? 0 : Double.parseDouble(availableVCoresGauge.getValue().toString()); // scheduler operation double allocateTimecost, handleTimecost; if (allocateTimecostHistogram == null && metrics.getHistograms().containsKey("sampler.scheduler.operation.allocate.timecost")) { allocateTimecostHistogram = metrics.getHistograms().get("sampler.scheduler.operation.allocate.timecost"); } if (handleTimecostHistogram == null && metrics.getHistograms().containsKey("sampler.scheduler.operation.handle.timecost")) { handleTimecostHistogram = metrics.getHistograms().get("sampler.scheduler.operation.handle.timecost"); } allocateTimecost = allocateTimecostHistogram == null ? 0.0 : allocateTimecostHistogram.getSnapshot().getMean() / 1000000; handleTimecost = handleTimecostHistogram == null ? 0.0 : handleTimecostHistogram.getSnapshot().getMean() / 1000000; // various handle operation Map<SchedulerEventType, Double> handleOperTimecostMap = new HashMap<SchedulerEventType, Double>(); for (SchedulerEventType e : SchedulerEventType.values()) { String key = "sampler.scheduler.operation.handle." + e + ".timecost"; if (!handleOperTimecostHistogramMap.containsKey(e) && metrics.getHistograms().containsKey(key)) { handleOperTimecostHistogramMap.put(e, metrics.getHistograms().get(key)); } double timecost = handleOperTimecostHistogramMap.containsKey(e) ? handleOperTimecostHistogramMap.get(e).getSnapshot().getMean() / 1000000 : 0; handleOperTimecostMap.put(e, timecost); } // allocated resource for each queue Map<String, Double> queueAllocatedMemoryMap = new HashMap<String, Double>(); Map<String, Long> queueAllocatedVCoresMap = new HashMap<String, Long>(); for (String queue : wrapper.getQueueSet()) { // memory String key = "counter.queue." + queue + ".allocated.memory"; if (!queueAllocatedMemoryCounterMap.containsKey(queue) && metrics.getCounters().containsKey(key)) { queueAllocatedMemoryCounterMap.put(queue, metrics.getCounters().get(key)); } double queueAllocatedMemoryGB = queueAllocatedMemoryCounterMap.containsKey(queue) ? queueAllocatedMemoryCounterMap.get(queue).getCount() / 1024.0 : 0; queueAllocatedMemoryMap.put(queue, queueAllocatedMemoryGB); // vCores key = "counter.queue." + queue + ".allocated.cores"; if (!queueAllocatedVCoresCounterMap.containsKey(queue) && metrics.getCounters().containsKey(key)) { queueAllocatedVCoresCounterMap.put(queue, metrics.getCounters().get(key)); } long queueAllocatedVCores = queueAllocatedVCoresCounterMap.containsKey(queue) ? queueAllocatedVCoresCounterMap.get(queue).getCount() : 0; queueAllocatedVCoresMap.put(queue, queueAllocatedVCores); } // package results StringBuilder sb = new StringBuilder(); sb.append("{"); sb.append("\"time\":") .append(System.currentTimeMillis()) .append(",\"jvm.free.memory\":") .append(jvmFreeMemoryGB) .append(",\"jvm.max.memory\":") .append(jvmMaxMemoryGB) .append(",\"jvm.total.memory\":") .append(jvmTotalMemoryGB) .append(",\"running.applications\":") .append(numRunningApps) .append(",\"running.containers\":") .append(numRunningContainers) .append(",\"cluster.allocated.memory\":") .append(allocatedMemoryGB) .append(",\"cluster.allocated.vcores\":") .append(allocatedVCoresGB) .append(",\"cluster.available.memory\":") .append(availableMemoryGB) .append(",\"cluster.available.vcores\":") .append(availableVCoresGB); for (String queue : wrapper.getQueueSet()) { sb.append(",\"queue.") .append(queue) .append(".allocated.memory\":") .append(queueAllocatedMemoryMap.get(queue)); sb.append(",\"queue.") .append(queue) .append(".allocated.vcores\":") .append(queueAllocatedVCoresMap.get(queue)); } // scheduler allocate & handle sb.append(",\"scheduler.allocate.timecost\":").append(allocateTimecost); sb.append(",\"scheduler.handle.timecost\":").append(handleTimecost); for (SchedulerEventType e : SchedulerEventType.values()) { sb.append(",\"scheduler.handle-") .append(e) .append(".timecost\":") .append(handleOperTimecostMap.get(e)); } sb.append("}"); return sb.toString(); }
/** {@inheritDoc} */ @Override public void recordConnectionUsageMillis(final long elapsedBorrowedMillis) { connectionUsage.update(elapsedBorrowedMillis); }
public void addHistogram(String name, Histogram histogram) { final Long countDelta = deltaTracker.getDelta(name, histogram.getCount()); maybeAdd(COUNT, name, countDelta); final boolean convertDurations = false; addSampling(name, histogram, convertDurations); }