/** {@inheritDoc} */ @Override public void accumulate(NamedList stv) { if (computeCount) { count += (Long) stv.get("count"); } if (computeMissing) { missing += (Long) stv.get("missing"); } if (computeCalcDistinct) { distinctValues.addAll((Collection<T>) stv.get("distinctValues")); countDistinct = distinctValues.size(); } if (computeMinOrMax) { updateMinMax((T) stv.get("min"), (T) stv.get("max")); } if (computeCardinality) { byte[] data = (byte[]) stv.get("cardinality"); HLL other = HLL.fromBytes(data); if (hll.getType().equals(HLLType.EMPTY)) { // The HLL.union method goes out of it's way not to modify the "other" HLL. // Which means in the case of merging into an "EMPTY" HLL (garunteed to happen at // least once in every coordination of shard requests) it always clones all // of the internal storage -- but since we're going to throw "other" away after // the merge, this just means a short term doubling of RAM that we can skip. hll = other; } else { hll.union(other); } } updateTypeSpecificStats(stv); NamedList f = (NamedList) stv.get(FACETS); if (f == null) { return; } for (int i = 0; i < f.size(); i++) { String field = f.getName(i); NamedList vals = (NamedList) f.getVal(i); Map<String, StatsValues> addTo = facets.get(field); if (addTo == null) { addTo = new HashMap<>(); facets.put(field, addTo); } for (int j = 0; j < vals.size(); j++) { String val = vals.getName(j); StatsValues vvals = addTo.get(val); if (vvals == null) { vvals = StatsValuesFactory.createStatsValues(statsField); addTo.put(val, vvals); } vvals.accumulate((NamedList) vals.getVal(j)); } } }
/** {@inheritDoc} */ @Override public NamedList<?> getStatsValues() { NamedList<Object> res = new SimpleOrderedMap<>(); if (statsField.includeInResponse(Stat.min)) { res.add("min", min); } if (statsField.includeInResponse(Stat.max)) { res.add("max", max); } if (statsField.includeInResponse(Stat.count)) { res.add("count", count); } if (statsField.includeInResponse(Stat.missing)) { res.add("missing", missing); } if (statsField.includeInResponse(Stat.distinctValues)) { res.add("distinctValues", distinctValues); } if (statsField.includeInResponse(Stat.countDistinct)) { res.add("countDistinct", countDistinct); } if (statsField.includeInResponse(Stat.cardinality)) { if (statsField.getIsShard()) { res.add("cardinality", hll.toBytes()); } else { res.add("cardinality", hll.cardinality()); } } addTypeSpecificStats(res); if (!facets.isEmpty()) { // add the facet stats NamedList<NamedList<?>> nl = new SimpleOrderedMap<>(); for (Map.Entry<String, Map<String, StatsValues>> entry : facets.entrySet()) { NamedList<NamedList<?>> nl2 = new SimpleOrderedMap<>(); nl.add(entry.getKey(), nl2); for (Map.Entry<String, StatsValues> e2 : entry.getValue().entrySet()) { nl2.add(e2.getKey(), e2.getValue().getStatsValues()); } } res.add(FACETS, nl); } return res; }
public void accumulate(T value, int count) { assert null != value : "Can't accumulate null"; if (computeCount) { this.count += count; } if (computeCalcDistinct) { distinctValues.add(value); countDistinct = distinctValues.size(); } if (computeMinOrMax) { updateMinMax(value, value); } if (computeCardinality) { if (null == hasher) { assert value instanceof Number : "pre-hashed value support only works with numeric longs"; hll.addRaw(((Number) value).longValue()); } else { hll.addRaw(hash(value)); } } updateTypeSpecificStats(value, count); }