private List<String> getCurrentServerIds(boolean nag, boolean lagged) { try (Jedis jedis = pool.getResource()) { long time = getRedisTime(jedis.time()); int nagTime = 0; if (nag) { nagTime = nagAboutServers.decrementAndGet(); if (nagTime <= 0) { nagAboutServers.set(10); } } ImmutableList.Builder<String> servers = ImmutableList.builder(); Map<String, String> heartbeats = jedis.hgetAll("heartbeats"); for (Map.Entry<String, String> entry : heartbeats.entrySet()) { try { long stamp = Long.parseLong(entry.getValue()); if (lagged ? time >= stamp + 30 : time <= stamp + 30) servers.add(entry.getKey()); else if (nag && nagTime <= 0) { getLogger() .severe( entry.getKey() + " is " + (time - stamp) + " seconds behind! (Time not synchronized or server down?)"); } } catch (NumberFormatException ignored) { } } return servers.build(); } catch (JedisConnectionException e) { getLogger().log(Level.SEVERE, "Unable to fetch server IDs", e); return Collections.singletonList(configuration.getServerId()); } }
/* * Get the exponentially-decayed approximate counts of values in multiple buckets. The elements in * the provided list denote the upper bound each of the buckets and must be sorted in ascending * order. * * The approximate count in each bucket is guaranteed to be within 2 * totalCount * maxError of * the real count. */ public List<Bucket> getHistogram(List<Long> bucketUpperBounds) { checkArgument( Ordering.natural().isOrdered(bucketUpperBounds), "buckets must be sorted in increasing order"); final ImmutableList.Builder<Bucket> builder = ImmutableList.builder(); final PeekingIterator<Long> iterator = Iterators.peekingIterator(bucketUpperBounds.iterator()); final AtomicDouble sum = new AtomicDouble(); final AtomicDouble lastSum = new AtomicDouble(); // for computing weighed average of values in bucket final AtomicDouble bucketWeightedSum = new AtomicDouble(); final double normalizationFactor = weight(TimeUnit.NANOSECONDS.toSeconds(ticker.read())); postOrderTraversal( root, new Callback() { @Override public boolean process(Node node) { while (iterator.hasNext() && iterator.peek() <= node.getUpperBound()) { double bucketCount = sum.get() - lastSum.get(); Bucket bucket = new Bucket( bucketCount / normalizationFactor, bucketWeightedSum.get() / bucketCount); builder.add(bucket); lastSum.set(sum.get()); bucketWeightedSum.set(0); iterator.next(); } bucketWeightedSum.addAndGet(node.getMiddle() * node.weightedCount); sum.addAndGet(node.weightedCount); return iterator.hasNext(); } }); while (iterator.hasNext()) { double bucketCount = sum.get() - lastSum.get(); Bucket bucket = new Bucket(bucketCount / normalizationFactor, bucketWeightedSum.get() / bucketCount); builder.add(bucket); iterator.next(); } return builder.build(); }
/** * Gets the values at the specified quantiles +/- maxError. The list of quantiles must be sorted * in increasing order, and each value must be in the range [0, 1] */ public List<Long> getQuantiles(List<Double> quantiles) { checkArgument( Ordering.natural().isOrdered(quantiles), "quantiles must be sorted in increasing order"); for (double quantile : quantiles) { checkArgument(quantile >= 0 && quantile <= 1, "quantile must be between [0,1]"); } final ImmutableList.Builder<Long> builder = ImmutableList.builder(); final PeekingIterator<Double> iterator = Iterators.peekingIterator(quantiles.iterator()); postOrderTraversal( root, new Callback() { private double sum = 0; @Override public boolean process(Node node) { sum += node.weightedCount; while (iterator.hasNext() && sum > iterator.peek() * weightedCount) { iterator.next(); // we know the max value ever seen, so cap the percentile to provide better error // bounds in this case long value = Math.min(node.getUpperBound(), max); builder.add(value); } return iterator.hasNext(); } }); // we finished the traversal without consuming all quantiles. This means the remaining quantiles // correspond to the max known value while (iterator.hasNext()) { builder.add(max); iterator.next(); } return builder.build(); }