@Test public void testTimerMatch() { // create timer with id "testMetricAlerts" and register with metric registry, bump up value to // 4. Timer t = MetricsConfigurator.createTimer(metrics, "testTimerMatch", PIPELINE_NAME, REVISION); t.update(1000, TimeUnit.MILLISECONDS); t.update(2000, TimeUnit.MILLISECONDS); t.update(3000, TimeUnit.MILLISECONDS); MetricsRuleDefinition metricsRuleDefinition = new MetricsRuleDefinition( "testTimerMatch", "testTimerMatch", "testTimerMatch", MetricType.TIMER, MetricElement.TIMER_COUNT, "${value()>2}", false, true); MetricRuleEvaluator metricRuleEvaluator = new MetricRuleEvaluator( metricsRuleDefinition, metrics, new AlertManager( PIPELINE_NAME, REVISION, null, metrics, runtimeInfo, new EventListenerManager()), Collections.<String>emptyList()); metricRuleEvaluator.checkForAlerts(); // get alert gauge Gauge<Object> gauge = MetricsConfigurator.getGauge( metrics, AlertsUtil.getAlertGaugeName(metricsRuleDefinition.getId())); Assert.assertNotNull(gauge); Assert.assertEquals((long) 3, ((Map<String, Object>) gauge.getValue()).get("currentValue")); }
private static void testInsertions( int tests, int perTestCount, float testKeyRatio, int modificationBatchSize, boolean quickEquality) throws ExecutionException, InterruptedException { int batchesPerTest = perTestCount / modificationBatchSize; int testKeyRange = (int) (perTestCount * testKeyRatio); long totalCount = (long) perTestCount * tests; log( "Performing %d tests of %d operations, with %.2f max size/key-range ratio in batches of ~%d ops", tests, perTestCount, 1 / testKeyRatio, modificationBatchSize); // if we're not doing quick-equality, we can spam with garbage for all the checks we perform, so // we'll split the work into smaller chunks int chunkSize = quickEquality ? tests : (int) (100000 / Math.pow(perTestCount, 2)); for (int chunk = 0; chunk < tests; chunk += chunkSize) { final List<ListenableFutureTask<List<ListenableFuture<?>>>> outer = new ArrayList<>(); for (int i = 0; i < chunkSize; i++) { int maxRunLength = modificationBatchSize == 1 ? 1 : ThreadLocalRandom.current().nextInt(1, modificationBatchSize); outer.add( doOneTestInsertions( testKeyRange, maxRunLength, modificationBatchSize, batchesPerTest, quickEquality)); } final List<ListenableFuture<?>> inner = new ArrayList<>(); long complete = 0; int reportInterval = Math.max(1000, (int) (totalCount / 10000)); long lastReportAt = 0; for (ListenableFutureTask<List<ListenableFuture<?>>> f : outer) { inner.addAll(f.get()); complete += perTestCount; if (complete - lastReportAt >= reportInterval) { long done = (chunk * perTestCount) + complete; float ratio = done / (float) totalCount; log("Completed %.1f%% (%d of %d operations)", ratio * 100, done, totalCount); lastReportAt = complete; } } Futures.allAsList(inner).get(); } Snapshot snap = BTREE_TIMER.getSnapshot(); log( "btree: %.2fns, %.2fns, %.2fns", snap.getMedian(), snap.get95thPercentile(), snap.get999thPercentile()); snap = TREE_TIMER.getSnapshot(); log( "java: %.2fns, %.2fns, %.2fns", snap.getMedian(), snap.get95thPercentile(), snap.get999thPercentile()); log("Done"); }
private PerfContext start(MetricRegistry metrics, String name) { Timer timer = metrics.timer(name); final Timer.Context ctx = timer.time(); return new PerfContext() { @Override public void end() { ctx.stop(); } }; }
@Override public void run() { samplerLock.lock(); try { for (Histogram histogram : schedulerHistogramList) { Timer timer = histogramTimerMap.get(histogram); histogram.update((int) timer.getSnapshot().getMean()); } } finally { samplerLock.unlock(); } }
@Override public Allocation allocate( ApplicationAttemptId attemptId, List<ResourceRequest> resourceRequests, List<ContainerId> containerIds, List<String> strings, List<String> strings2) { if (metricsON) { final Timer.Context context = schedulerAllocateTimer.time(); Allocation allocation = null; try { allocation = scheduler.allocate(attemptId, resourceRequests, containerIds, strings, strings2); return allocation; } finally { context.stop(); schedulerAllocateCounter.inc(); try { updateQueueWithAllocateRequest(allocation, attemptId, resourceRequests, containerIds); } catch (IOException e) { e.printStackTrace(); } } } else { return scheduler.allocate(attemptId, resourceRequests, containerIds, strings, strings2); } }
@Override protected void onComplete(Request request, Response response) { activeRequestsCounter.dec(); Long duration = computeDurationMillis(START_TIMES_BY_CORRELATION_ID.remove(request.getCorrelationId())); if (duration != null && duration.longValue() > 0) { allTimesTimer.update(duration, TimeUnit.MILLISECONDS); String name = getRouteName(request); if (name == null || name.isEmpty()) return; ROUTE_TIMERS.get(name).update(duration, TimeUnit.MILLISECONDS); } Counter responseCounter = COUNTERS_BY_RESPONSE.get(response.getResponseStatus().getCode()); if (responseCounter == null) { responseCounter = metrics.counter(getResponseCounterName(response.getResponseStatus())); COUNTERS_BY_RESPONSE.putIfAbsent(response.getResponseStatus().getCode(), responseCounter); } responseCounter.inc(); publish(request, response, duration); }
@Override public void run() { Timer.Context rollupTimerContext = rollupTimer.time(); try { Rollup.Type rollupComputer = RollupRunnable.getRollupComputer(RollupType.BF_BASIC, Granularity.FULL); Rollup rollup = rollupComputer.compute(points); writer.enqueueRollupForWrite( new SingleRollupWriteContext( rollup, loc, Granularity.MIN_5, CassandraModel.CF_METRICS_5M, range.getStart())); log.info("Calculated and queued rollup for " + loc + " within range " + range); } catch (Exception e) { // I want to be very harsh with exceptions encountered while validating and computing rollups. // Just stop everything. log.error("Error encountered while validating and calculating rollups", e); rollupValidationAndComputeFailed.inc(); RollupGenerator.rollupExecutors.shutdownNow(); OutOFBandRollup.getRollupGeneratorThread().interrupt(); // Stop the monitoring thread OutOFBandRollup.getMonitoringThread().interrupt(); // Stop the file handler thread pool from sending data to buildstore FileHandler.handlerThreadPool.shutdownNow(); throw new RuntimeException(e); } finally { rollupTimerContext.stop(); } }
@Override protected boolean doProcess(Record record) { Timer.Context timerContext = elapsedTime.time(); try { XContentBuilder documentBuilder = jsonBuilder().startObject(); Map<String, Collection<Object>> map = record.getFields().asMap(); for (Map.Entry<String, Collection<Object>> entry : map.entrySet()) { String key = entry.getKey(); Iterator<Object> iterator = entry.getValue().iterator(); while (iterator.hasNext()) { documentBuilder.field(key, iterator.next()); } } documentBuilder.endObject(); loader.addDocument(documentBuilder.bytes(), indexName, indexType, ttl); } catch (Exception e) { throw new MorphlineRuntimeException(e); } finally { timerContext.stop(); } // pass record to next command in chain: return super.doProcess(record); }
private void test(DataSource dataSource) throws SQLException { for (int i = 0; i < callCount; i++) { long startNanos = System.nanoTime(); try (Connection connection = dataSource.getConnection()) {} timer.update(System.nanoTime() - startNanos, TimeUnit.NANOSECONDS); } logReporter.report(); }
@Override public void handle( String path, Request request, HttpServletRequest httpRequest, HttpServletResponse httpResponse) throws IOException, ServletException { activeDispatches.inc(); final long start; final HttpChannelState state = request.getHttpChannelState(); if (state.isInitial()) { // new request activeRequests.inc(); start = request.getTimeStamp(); } else { // resumed request start = System.currentTimeMillis(); activeSuspended.dec(); if (state.getState() == State.DISPATCHED) { asyncDispatches.mark(); } } try { super.handle(path, request, httpRequest, httpResponse); } finally { final long now = System.currentTimeMillis(); final long dispatched = now - start; activeDispatches.dec(); dispatches.update(dispatched, TimeUnit.MILLISECONDS); if (state.isSuspended()) { if (state.isInitial()) { state.addListener(listener); } activeSuspended.inc(); } else if (state.isInitial()) { requests.update(dispatched, TimeUnit.MILLISECONDS); updateResponses(request); } // else onCompletion will handle it. } }
@Test public void callLambdaExpressionTimedStaticMethodOnce() { // Call the timed static method and assert it's been timed once TimedStaticMethodWithNameFromElExpression.lambdaExpressionStaticTimedMethod(); assertThat( "Shared metric registry is not created", SharedMetricRegistries.names(), hasItem(REGISTRY_NAME)); MetricRegistry registry = SharedMetricRegistries.getOrCreate(REGISTRY_NAME); assertThat("Timer is not registered correctly", registry.getTimers(), hasKey(TIMER_NAME)); Timer timer = registry.getTimers().get(TIMER_NAME); assertThat( "Timer count is incorrect", timer.getCount(), is(equalTo(TIMER_COUNT.incrementAndGet()))); }
private void collectTimerReports( List<DBObject> docs, SortedMap<String, Timer> timers, Date timestamp) { if (timers.isEmpty()) return; for (Map.Entry<String, Timer> entry : timers.entrySet()) { final BasicDBObject report = getBasicDBObject(timestamp, entry.getKey(), "timer"); final Timer v = entry.getValue(); final Snapshot s = v.getSnapshot(); // meter part report.put("count", v.getCount()); report.put("rate-unit", getRateUnit()); report.put("1-minute-rate", convertRate(v.getOneMinuteRate())); report.put("5-minute-rate", convertRate(v.getFiveMinuteRate())); report.put("15-minute-rate", convertRate(v.getFifteenMinuteRate())); report.put("mean-rate", convertRate(v.getMeanRate())); // histogram part report.put("duration-unit", getDurationUnit()); report.put("75-percentile", convertDuration(s.get75thPercentile())); report.put("95-percentile", convertDuration(s.get95thPercentile())); report.put("98-percentile", convertDuration(s.get98thPercentile())); report.put("99-percentile", convertDuration(s.get99thPercentile())); report.put("999-percentile", convertDuration(s.get999thPercentile())); report.put("max", convertDuration(s.getMax())); report.put("min", convertDuration(s.getMin())); report.put("mean", convertDuration(s.getMean())); report.put("median", convertDuration(s.getMedian())); report.put("stddev", convertDuration(s.getStdDev())); docs.add(report); } }
private void sendMessage(byte[] encodedMessage) { Timer.Context sendMessageTimer = messageSendingTimer.time(); messageProducer.produce(encodedMessage); sendMessageTimer.stop(); if (log.isDebugEnabled()) { log.debug("Completed send of message: " + new String(encodedMessage)); } }
@Before public void setUp() throws Exception { producer = new TimerProducer(endpoint); inOrder = Mockito.inOrder(endpoint, exchange, registry, timer, context, in); when(endpoint.getRegistry()).thenReturn(registry); when(registry.timer(METRICS_NAME)).thenReturn(timer); when(timer.time()).thenReturn(context); when(exchange.getIn()).thenReturn(in); }
public void printMetrics() { logger.info("Metrics"); Metrics metrics = session.getCluster().getMetrics(); Gauge<Integer> gauge = metrics.getConnectedToHosts(); Integer numberOfHosts = gauge.getValue(); logger.info("Number of hosts: " + numberOfHosts); Metrics.Errors errors = metrics.getErrorMetrics(); Counter counter = errors.getReadTimeouts(); logger.info("Number of read timeouts:" + counter.getCount()); com.codahale.metrics.Timer timer = metrics.getRequestsTimer(); Timer.Context context = timer.time(); try { long numberUserRequests = timer.getCount(); logger.info("Number of user requests:" + numberUserRequests); } finally { context.stop(); } }
@Test @SuppressWarnings("squid:S2925") public void testTimer() throws Exception { final Timer timer = registry.timer(name("foo", "bar")); final Timer.Context timerContext = timer.time(); Thread.sleep(200); timerContext.stop(); reportAndRefresh(); SearchResponse searchResponse = client().prepareSearch(indexWithDate).setTypes("timer").execute().actionGet(); org.assertj.core.api.Assertions.assertThat(searchResponse.getHits().totalHits()).isEqualTo(1L); Map<String, Object> hit = searchResponse.getHits().getAt(0).sourceAsMap(); assertTimestamp(hit); assertKey(hit, "name", prefix + ".foo.bar"); assertKey(hit, "count", 1); assertKey(hit, "host", "localhost"); }
private void updateResponses(Request request) { final int response = request.getResponse().getStatus() / 100; if (response >= 1 && response <= 5) { responses[response - 1].mark(); } activeRequests.dec(); final long elapsedTime = System.currentTimeMillis() - request.getTimeStamp(); requests.update(elapsedTime, TimeUnit.MILLISECONDS); requestTimer(request.getMethod()).update(elapsedTime, TimeUnit.MILLISECONDS); }
private byte[] encodeMessage(Tx tx, Op op) { Timer.Context encodingTimer = messageEncodingTimer.time(); byte[] encodedMessage = messageEncoder.encode(tx, op); encodingTimer.stop(); if (log.isTraceEnabled()) { log.trace("Result of message encoding is = " + new String(encodedMessage)); } return encodedMessage; }
/** Tests GetAllMetrics method. */ @Test public void testGetAllMetrics() { Counter onosCounter = new Counter(); onosCounter.inc(); Meter onosMeter = new Meter(); onosMeter.mark(); Timer onosTimer = new Timer(); onosTimer.update(1, TimeUnit.MILLISECONDS); ImmutableMap<String, Metric> metrics = new ImmutableMap.Builder<String, Metric>() .put("onosCounter", onosCounter) .put("onosMeter", onosMeter) .put("onosTimer", onosTimer) .build(); expect(mockMetricsService.getMetrics()).andReturn(metrics).anyTimes(); replay(mockMetricsService); WebTarget wt = target(); String response = wt.path("metrics").request().get(String.class); assertThat(response, containsString("{\"metrics\":[")); JsonObject result = Json.parse(response).asObject(); assertThat(result, notNullValue()); JsonArray jsonMetrics = result.get("metrics").asArray(); assertThat(jsonMetrics, notNullValue()); assertThat(jsonMetrics.size(), is(3)); assertTrue( matchesMetric(metrics.get("onosCounter")).matchesSafely(jsonMetrics.get(0).asObject())); assertTrue( matchesMetric(metrics.get("onosMeter")).matchesSafely(jsonMetrics.get(1).asObject())); assertTrue( matchesMetric(metrics.get("onosTimer")).matchesSafely(jsonMetrics.get(2).asObject())); }
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { final StatusExposingServletResponse wrappedResponse = new StatusExposingServletResponse((HttpServletResponse) response); activeRequests.inc(); final Timer.Context context = requestTimer.time(); try { chain.doFilter(request, wrappedResponse); } finally { context.stop(); activeRequests.dec(); markMeterForStatusCode(wrappedResponse.getStatus()); } }
private void callConnectionMethods(Timer timer) throws SQLException { long startNanos = System.nanoTime(); try { connectionDecorator.getMetaData(); String javaVersion = System.getProperty("java.version"); if (javaVersion.contains("1.7") || javaVersion.contains("1.8")) { connectionDecorator.setSchema("schema"); connectionDecorator.abort(null); } } finally { long endNanos = System.nanoTime(); timer.update((endNanos - startNanos), TimeUnit.NANOSECONDS); } }
@Test public void testTimer() { System.out.println("******************************* TIMER *******************************"); timer = registry.timer("timer"); try { for (int i = 0; i < ITER_COUNT; i++) { final Timer.Context context = timer.time(); Thread.sleep(SLEEP_MS); context.stop(); } } catch (InterruptedException ex) { Thread.currentThread().interrupt(); } }
private Status processOperation(Tx tx, Op op) { Timer.Context timer = operationProcessingTimer.time(); Status status = Status.OK; try { encodeAndSend(tx, op); } catch (RuntimeException re) { operationProcessingErrorMeter.mark(); log.error("Error processing operation: " + op.toString(), re); status = Status.ABEND; } timer.stop(); return status; }
@Override public String toString() { StringWriter sw = new StringWriter(); Locale locale = Locale.ROOT; try (PrintWriter output = new PrintWriter(sw)) { final Snapshot snapshot = timer.getSnapshot(); output.printf(locale, "Benchmark Results%n"); output.printf(locale, " count = %d%n", timer.getCount()); output.printf(locale, " mean rate = %2.2f calls/%s%n", timer.getMeanRate(), "s"); output.printf( locale, " min = %d %s%n", TimeUnit.NANOSECONDS.toMillis(snapshot.getMin()), "ms"); output.printf( locale, " max = %d %s%n", TimeUnit.NANOSECONDS.toMillis(snapshot.getMax()), "ms"); output.printf(locale, " mean = %2.2f %s%n", snapshot.getMean() / 1000000, "ms"); output.printf( locale, " stddev = %2.2f %s%n", snapshot.getStdDev() / 1000000, "ms"); output.printf( locale, " median = %2.2f %s%n", snapshot.getMedian() / 1000000, "ms"); output.printf( locale, " 75%% <= %2.2f %s%n", snapshot.get75thPercentile() / 1000000, "ms"); output.printf( locale, " 95%% <= %2.2f %s%n", snapshot.get95thPercentile() / 1000000, "ms"); output.printf( locale, " 99.9%% <= %2.2f %s%n", snapshot.get999thPercentile() / 1000000, "ms"); } return sw.toString(); }
/** * Check to see if a plaintext input matches a hash * * @param input the input * @param hashed the hash * @return <code>true</code> if it matches, <code>false</code> if not */ public static boolean matches(final String input, final String hashed) { checkNotNull(hashed, "Cannot compare NULL"); LOOKUPS.mark(); final Timer.Context context = GETS.time(); try { boolean result = false; try { result = CACHE.get(new TwoTuple<>(input, hashed)); } catch (ExecutionException e) { LOGGER.error("Failed to hash input password", e); } return result; } finally { context.stop(); } }
@Override public ServiceResults postCollection(ServiceContext context) throws Exception { logger.info("NotificationService: start request."); Timer.Context timer = postTimer.time(); postMeter.mark(); try { validate(null, context.getPayload()); Notification.PathTokens pathTokens = getPathTokens(context.getRequest().getOriginalParameters()); context.getProperties().put("state", Notification.State.CREATED); context.getProperties().put("pathQuery", pathTokens); context.setOwner(sm.getApplication()); ServiceResults results = super.postCollection(context); Notification notification = (Notification) results.getEntity(); // update Notification properties if (notification.getStarted() == null || notification.getStarted() == 0) { long now = System.currentTimeMillis(); notification.setStarted(System.currentTimeMillis()); Map<String, Object> properties = new HashMap<String, Object>(2); properties.put("started", notification.getStarted()); properties.put("state", notification.getState()); notification.addProperties(properties); logger.info( "ApplicationQueueMessage: notification {} properties updated in duration {} ms", notification.getUuid(), System.currentTimeMillis() - now); } long now = System.currentTimeMillis(); notificationQueueManager.queueNotification(notification, null); logger.info( "NotificationService: notification {} post queue duration {} ms ", notification.getUuid(), System.currentTimeMillis() - now); // future: somehow return 202? return results; } catch (Exception e) { logger.error("serialization failed", e); throw e; } finally { timer.stop(); } }
protected ExecuteResult flushBufferedDocs(DocBuffer b) { int numDocsInBatch = b.buffer.size(); if (numDocsInBatch == 0) { b.reset(); return ExecuteResult.ACK; } Timer.Context timer = (sendBatchToSolr != null) ? sendBatchToSolr.time() : null; try { sendBatchToSolr(b); } finally { if (timer != null) timer.stop(); if (indexedCounter != null) indexedCounter.inc(numDocsInBatch); b.reset(); } return ExecuteResult.ACK; }
@Override public final AttributeSet resolve(ResolverContext context) throws Exception { checkArgument(context.getDescriptor().getId().equals(descriptor.getId())); if (log.isDebugEnabled()) { log.debug( "Retrieving attributes via resolver id=\"{}\" name=\"{}\"", descriptor.getId(), descriptor.getName()); } Timer.Context timerCtx = timer.time(); try { return AttributeSet.builder(descriptor) .attributes(doResolve(context)) .ticker(context.getTicker()) .build(); } catch (Exception e) { if (log.isDebugEnabled()) { log.debug(e.getMessage(), e); } throw e; } finally { histogram.update(timerCtx.stop()); } }
private void adjustMetrics( Map<String, Map<Integer, MetricSnapshot>> metrics, Map<String, Integer> metaCounters, Map<String, Map<Integer, Histogram>> histograms, Map<String, Map<Integer, Timer>> timers) { for (Map.Entry<String, Map<Integer, MetricSnapshot>> metricEntry : metrics.entrySet()) { String meta = metricEntry.getKey(); MetricType metricType = MetricUtils.metricType(meta); MetaType metaType = MetricUtils.metaType(meta); Map<Integer, MetricSnapshot> winData = metricEntry.getValue(); if (metricType == MetricType.HISTOGRAM) { for (Map.Entry<Integer, MetricSnapshot> dataEntry : winData.entrySet()) { MetricSnapshot snapshot = dataEntry.getValue(); Integer cnt = metaCounters.get(meta); Histogram histogram = histograms.get(meta).get(dataEntry.getKey()); if (cnt != null && cnt > 1) { Snapshot snapshot1 = histogram.getSnapshot(); snapshot.set_mean(snapshot1.getMean()); snapshot.set_p50(snapshot1.getMedian()); snapshot.set_p75(snapshot1.get75thPercentile()); snapshot.set_p95(snapshot1.get95thPercentile()); snapshot.set_p98(snapshot1.get98thPercentile()); snapshot.set_p99(snapshot1.get99thPercentile()); snapshot.set_p999(snapshot1.get999thPercentile()); snapshot.set_stddev(snapshot1.getStdDev()); snapshot.set_min(snapshot1.getMin()); snapshot.set_max(snapshot1.getMax()); if (metaType == MetaType.TOPOLOGY) { snapshot.set_points(Arrays.asList(ArrayUtils.toObject(snapshot1.getValues()))); } } if (metaType != MetaType.TOPOLOGY) { snapshot.set_points(new ArrayList<Long>(0)); } } } else if (metricType == MetricType.TIMER) { for (Map.Entry<Integer, MetricSnapshot> dataEntry : winData.entrySet()) { MetricSnapshot snapshot = dataEntry.getValue(); Integer cnt = metaCounters.get(meta); if (cnt != null && cnt > 1) { Timer timer = timers.get(meta).get(dataEntry.getKey()); Snapshot snapshot1 = timer.getSnapshot(); snapshot.set_p50(snapshot1.getMedian()); snapshot.set_p75(snapshot1.get75thPercentile()); snapshot.set_p95(snapshot1.get95thPercentile()); snapshot.set_p98(snapshot1.get98thPercentile()); snapshot.set_p99(snapshot1.get99thPercentile()); snapshot.set_p999(snapshot1.get999thPercentile()); snapshot.set_stddev(snapshot1.getStdDev()); snapshot.set_min(snapshot1.getMin()); snapshot.set_max(snapshot1.getMax()); } snapshot.set_points(new ArrayList<Long>(0)); } } } }
@Override public boolean matchesSafely(JsonObject jsonObject) { JsonObject jsonMetric = jsonObject.get("metric").asObject(); JsonObject jsonCounter; JsonObject jsonMeter; JsonObject jsonTimer; Counter counter; Meter meter; Timer timer; // check counter metric if (jsonMetric.get("counter") != null) { jsonCounter = jsonMetric.get("counter").asObject(); counter = (Counter) metric; if (jsonCounter.get("counter").asLong() != counter.getCount()) { reason = "counter " + counter.getCount(); return false; } } // check meter metric if (jsonMetric.get("meter") != null) { jsonMeter = jsonMetric.get("meter").asObject(); meter = (Meter) metric; if (jsonMeter.get("counter").asLong() != meter.getCount()) { reason = "counter " + meter.getCount(); return false; } if (jsonMeter.get("1_min_rate").asDouble() != meter.getOneMinuteRate()) { reason = "1 minute rate " + meter.getOneMinuteRate(); return false; } if (jsonMeter.get("5_min_rate").asDouble() != meter.getOneMinuteRate()) { reason = "5 minute rate " + meter.getFiveMinuteRate(); return false; } if (jsonMeter.get("15_min_rate").asDouble() != meter.getFifteenMinuteRate()) { reason = "15 minute rate " + meter.getFifteenMinuteRate(); return false; } } if (jsonMetric.get("timer") != null) { jsonTimer = jsonMetric.get("timer").asObject(); timer = (Timer) metric; if (jsonTimer.get("counter").asLong() != timer.getCount()) { reason = "counter " + timer.getCount(); return false; } if (jsonTimer.get("1_min_rate").asDouble() != timer.getOneMinuteRate()) { reason = "1 minute rate " + timer.getOneMinuteRate(); return false; } if (jsonTimer.get("5_min_rate").asDouble() != timer.getOneMinuteRate()) { reason = "5 minute rate " + timer.getFiveMinuteRate(); return false; } if (jsonTimer.get("15_min_rate").asDouble() != timer.getFifteenMinuteRate()) { reason = "15 minute rate " + timer.getFifteenMinuteRate(); return false; } if (jsonTimer.get("mean").asDouble() != nanoToMs(timer.getSnapshot().getMean())) { reason = "mean " + timer.getSnapshot().getMean(); return false; } if (jsonTimer.get("min").asDouble() != nanoToMs(timer.getSnapshot().getMin())) { reason = "min " + timer.getSnapshot().getMin(); return false; } if (jsonTimer.get("max").asDouble() != nanoToMs(timer.getSnapshot().getMax())) { reason = "max " + timer.getSnapshot().getMax(); return false; } if (jsonTimer.get("stddev").asDouble() != nanoToMs(timer.getSnapshot().getStdDev())) { reason = "stddev " + timer.getSnapshot().getStdDev(); return false; } } return true; }