public void merge(QuantileDigest other) { rescaleToCommonLandmark(this, other); // 2. merge other into this (don't modify other) root = merge(root, other.root); max = Math.max(max, other.max); min = Math.min(min, other.min); // 3. compress to remove unnecessary nodes compress(); }
/** Adds a value to this digest. The value must be {@code >= 0} */ public void add(long value, long count) { checkArgument(count > 0, "count must be > 0"); long nowInSeconds = TimeUnit.NANOSECONDS.toSeconds(ticker.read()); int maxExpectedNodeCount = 3 * calculateCompressionFactor(); if (nowInSeconds - landmarkInSeconds >= RESCALE_THRESHOLD_SECONDS) { rescale(nowInSeconds); compress(); // need to compress to get rid of nodes that may have decayed to ~ 0 } else if (nonZeroNodeCount > MAX_SIZE_FACTOR * maxExpectedNodeCount && compressAutomatically) { // The size (number of non-zero nodes) of the digest is at most 3 * compression factor // If we're over MAX_SIZE_FACTOR of the expected size, compress // Note: we don't compress as soon as we go over expectedNodeCount to avoid unnecessarily // running a compression for every new added element when we're close to boundary compress(); } double weight = weight(TimeUnit.NANOSECONDS.toSeconds(ticker.read())) * count; max = Math.max(max, value); min = Math.min(min, value); insert(longToBits(value), weight); }