@VisibleForTesting QuantileDigest(double maxError, double alpha, Ticker ticker, boolean compressAutomatically) { checkArgument(maxError >= 0 && maxError <= 1, "maxError must be in range [0, 1]"); checkArgument(alpha >= 0 && alpha < 1, "alpha must be in range [0, 1)"); this.maxError = maxError; this.alpha = alpha; this.ticker = ticker; this.compressAutomatically = compressAutomatically; landmarkInSeconds = TimeUnit.NANOSECONDS.toSeconds(ticker.read()); }
/* * Get the exponentially-decayed approximate counts of values in multiple buckets. The elements in * the provided list denote the upper bound each of the buckets and must be sorted in ascending * order. * * The approximate count in each bucket is guaranteed to be within 2 * totalCount * maxError of * the real count. */ public List<Bucket> getHistogram(List<Long> bucketUpperBounds) { checkArgument( Ordering.natural().isOrdered(bucketUpperBounds), "buckets must be sorted in increasing order"); final ImmutableList.Builder<Bucket> builder = ImmutableList.builder(); final PeekingIterator<Long> iterator = Iterators.peekingIterator(bucketUpperBounds.iterator()); final AtomicDouble sum = new AtomicDouble(); final AtomicDouble lastSum = new AtomicDouble(); // for computing weighed average of values in bucket final AtomicDouble bucketWeightedSum = new AtomicDouble(); final double normalizationFactor = weight(TimeUnit.NANOSECONDS.toSeconds(ticker.read())); postOrderTraversal( root, new Callback() { @Override public boolean process(Node node) { while (iterator.hasNext() && iterator.peek() <= node.getUpperBound()) { double bucketCount = sum.get() - lastSum.get(); Bucket bucket = new Bucket( bucketCount / normalizationFactor, bucketWeightedSum.get() / bucketCount); builder.add(bucket); lastSum.set(sum.get()); bucketWeightedSum.set(0); iterator.next(); } bucketWeightedSum.addAndGet(node.getMiddle() * node.weightedCount); sum.addAndGet(node.weightedCount); return iterator.hasNext(); } }); while (iterator.hasNext()) { double bucketCount = sum.get() - lastSum.get(); Bucket bucket = new Bucket(bucketCount / normalizationFactor, bucketWeightedSum.get() / bucketCount); builder.add(bucket); iterator.next(); } return builder.build(); }
/** Adds a value to this digest. The value must be {@code >= 0} */ public void add(long value, long count) { checkArgument(count > 0, "count must be > 0"); long nowInSeconds = TimeUnit.NANOSECONDS.toSeconds(ticker.read()); int maxExpectedNodeCount = 3 * calculateCompressionFactor(); if (nowInSeconds - landmarkInSeconds >= RESCALE_THRESHOLD_SECONDS) { rescale(nowInSeconds); compress(); // need to compress to get rid of nodes that may have decayed to ~ 0 } else if (nonZeroNodeCount > MAX_SIZE_FACTOR * maxExpectedNodeCount && compressAutomatically) { // The size (number of non-zero nodes) of the digest is at most 3 * compression factor // If we're over MAX_SIZE_FACTOR of the expected size, compress // Note: we don't compress as soon as we go over expectedNodeCount to avoid unnecessarily // running a compression for every new added element when we're close to boundary compress(); } double weight = weight(TimeUnit.NANOSECONDS.toSeconds(ticker.read())) * count; max = Math.max(max, value); min = Math.min(min, value); insert(longToBits(value), weight); }
private void rescaleToCommonLandmark(QuantileDigest one, QuantileDigest two) { long nowInSeconds = TimeUnit.NANOSECONDS.toSeconds(ticker.read()); // 1. rescale this and other to common landmark long targetLandmark = Math.max(one.landmarkInSeconds, two.landmarkInSeconds); if (nowInSeconds - targetLandmark >= RESCALE_THRESHOLD_SECONDS) { targetLandmark = nowInSeconds; } if (targetLandmark != one.landmarkInSeconds) { one.rescale(targetLandmark); } if (targetLandmark != two.landmarkInSeconds) { two.rescale(targetLandmark); } }
/** * Create a QuantileDigest with a maximum error guarantee of "maxError" and exponential decay with * factor "alpha". * * @param maxError the max error tolerance * @param alpha the exponential decay factor */ public QuantileDigest(double maxError, double alpha) { this(maxError, alpha, Ticker.systemTicker(), true); }
/** Number (decayed) of elements added to this quantile digest */ public double getCount() { return weightedCount / weight(TimeUnit.NANOSECONDS.toSeconds(ticker.read())); }