/* * Get the exponentially-decayed approximate counts of values in multiple buckets. The elements in * the provided list denote the upper bound each of the buckets and must be sorted in ascending * order. * * The approximate count in each bucket is guaranteed to be within 2 * totalCount * maxError of * the real count. */ public List<Bucket> getHistogram(List<Long> bucketUpperBounds) { checkArgument( Ordering.natural().isOrdered(bucketUpperBounds), "buckets must be sorted in increasing order"); final ImmutableList.Builder<Bucket> builder = ImmutableList.builder(); final PeekingIterator<Long> iterator = Iterators.peekingIterator(bucketUpperBounds.iterator()); final AtomicDouble sum = new AtomicDouble(); final AtomicDouble lastSum = new AtomicDouble(); // for computing weighed average of values in bucket final AtomicDouble bucketWeightedSum = new AtomicDouble(); final double normalizationFactor = weight(TimeUnit.NANOSECONDS.toSeconds(ticker.read())); postOrderTraversal( root, new Callback() { @Override public boolean process(Node node) { while (iterator.hasNext() && iterator.peek() <= node.getUpperBound()) { double bucketCount = sum.get() - lastSum.get(); Bucket bucket = new Bucket( bucketCount / normalizationFactor, bucketWeightedSum.get() / bucketCount); builder.add(bucket); lastSum.set(sum.get()); bucketWeightedSum.set(0); iterator.next(); } bucketWeightedSum.addAndGet(node.getMiddle() * node.weightedCount); sum.addAndGet(node.weightedCount); return iterator.hasNext(); } }); while (iterator.hasNext()) { double bucketCount = sum.get() - lastSum.get(); Bucket bucket = new Bucket(bucketCount / normalizationFactor, bucketWeightedSum.get() / bucketCount); builder.add(bucket); iterator.next(); } return builder.build(); }
/** * Gets the values at the specified quantiles +/- maxError. The list of quantiles must be sorted * in increasing order, and each value must be in the range [0, 1] */ public List<Long> getQuantiles(List<Double> quantiles) { checkArgument( Ordering.natural().isOrdered(quantiles), "quantiles must be sorted in increasing order"); for (double quantile : quantiles) { checkArgument(quantile >= 0 && quantile <= 1, "quantile must be between [0,1]"); } final ImmutableList.Builder<Long> builder = ImmutableList.builder(); final PeekingIterator<Double> iterator = Iterators.peekingIterator(quantiles.iterator()); postOrderTraversal( root, new Callback() { private double sum = 0; @Override public boolean process(Node node) { sum += node.weightedCount; while (iterator.hasNext() && sum > iterator.peek() * weightedCount) { iterator.next(); // we know the max value ever seen, so cap the percentile to provide better error // bounds in this case long value = Math.min(node.getUpperBound(), max); builder.add(value); } return iterator.hasNext(); } }); // we finished the traversal without consuming all quantiles. This means the remaining quantiles // correspond to the max known value while (iterator.hasNext()) { builder.add(max); iterator.next(); } return builder.build(); }
@Override public Map<String, Object> getValuesDeep() { final Tag tag = this.findLastTag(this.path, false); if (!(tag instanceof CompoundTag)) { return Collections.emptyMap(); } final Queue<Node> node = new ArrayDeque<Node>( (Collection<? extends Node>) ImmutableList.of((Object) new Node(tag))); final Map<String, Object> values = (Map<String, Object>) Maps.newHashMap(); while (!node.isEmpty()) { final Node root = node.poll(); for (final Map.Entry<String, Tag> entry : root.values.entrySet()) { final String key = this.createRelativeKey(root.parent, entry.getKey()); if (entry.getValue() instanceof CompoundTag) { node.add(new Node(key, entry.getValue())); } else { values.put(key, entry.getValue().getValue()); } } } return values; }
/** * Gets the value at the specified quantile +/- maxError. The quantile must be in the range [0, 1] */ public long getQuantile(double quantile) { return getQuantiles(ImmutableList.of(quantile)).get(0); }