/** * Adds given data to cluster. It can work in two different ways: * * <ul> * <li>It can modify this cluster's centroid as new voxels are added. * <li>The centroid can remain static and the news voxels are added independently, forming a new * TAC. * </ul> * * The second way is the standard way of creating a new cluster, where the centroid serves as the * reference for the distances. In any case, as there are some approaches in which the centroid is * built along the way, the {@code Cluster} constructor will cause this cluster to modify its * centroid if the coordinates for the first voxel are provided. This method always uses the * default behavior used at creation time. * * @param data Dynamic data to be added * @param x X-coordinate for added TAC. * @param y Y-coordinate for added TAC. * @param slice Slice (1-based) for added TAC. */ public void add(double[] data, int x, int y, int slice) { if (!isEmpty()) { // This is not the first voxel. double[] temp = new double[data.length]; // Re-normalize the centroid: multiply it by the number of clusters // before this addition and then divide by one plus that amount. double[] d = modify_centroid ? centroid : cluster_tac; for (int i = 0; i < data.length; i++) { temp[i] = (d[i] * size + data[i]) / (double) (size + 1); } if (modify_centroid) centroid = temp; else cluster_tac = temp; } else { if (modify_centroid) centroid = data; else cluster_tac = data; } peak_stats.addValue(StatUtils.max(data)); size++; coordinates.add(new Integer[] {x, y, slice}); // Also update the spatial centroid double old_x = centroid_spatial[0] * size; centroid_spatial[0] = (old_x + x) / (size + 1); double old_y = centroid_spatial[1] * size; centroid_spatial[1] = (old_y + y) / (size + 1); double old_slice = centroid_spatial[2] * size; centroid_spatial[2] = (old_slice + slice) / (size + 1); }
private void logMemoryConsumption() { double memoryConsumption = (((double) (Runtime.getRuntime().totalMemory() / 1024) / 1024) - ((double) (Runtime.getRuntime().freeMemory() / 1024) / 1024)); // filter NaNs if (!Double.isNaN(memoryConsumption)) { memStats.addValue(memoryConsumption); } }
@Override public void process(Exchange exchange) throws Exception { String message = exchange.getIn().getBody(String.class); ObjectMapper objectMapper = new ObjectMapper(); TypeFactory typeFactory = objectMapper.getTypeFactory(); List<Double> values = objectMapper.readValue( message, typeFactory.constructCollectionType(List.class, Double.class)); SummaryStatistics summaryStatistics = new SummaryStatistics(); List<Double> list = new ObjectMapper().readValue(message, List.class); for (Double value : list) { summaryStatistics.addValue(value); } String variance = Double.toString(summaryStatistics.getVariance()); exchange.getOut().setBody(variance); }
/** * Timing. * * @param repeatChunk Each timing measurement will done done for that number of repeats of the * code. * @param repeatStat Timing will be averaged over that number of runs. * @param runGC Call {@code System.gc()} between each timed block. When set to {@code true}, the * test will run much slower. * @param methods Codes being timed. * @return for each of the given {@code methods}, a {@link StatisticalSummary} of the average * times (in milliseconds) taken by a single call to the {@code call} method (i.e. the time * taken by each timed block divided by {@code repeatChunk}). */ public static StatisticalSummary[] time( int repeatChunk, int repeatStat, boolean runGC, Callable<Double>... methods) { final double[][][] times = timesAndResults(repeatChunk, repeatStat, runGC, methods); final int len = methods.length; final StatisticalSummary[] stats = new StatisticalSummary[len]; for (int j = 0; j < len; j++) { final SummaryStatistics s = new SummaryStatistics(); for (int k = 0; k < repeatStat; k++) { s.addValue(times[j][k][0]); } stats[j] = s.getSummary(); } return stats; }
/** * Cars Per Hour model works by creating 24 normal distributions (1 for each hour of the day). * After the model is generated, you can sample it with the sampleCarsPerHourModel function */ public void generateCarsPerHourModel() { // Get statistical data for counts at each hour Map<Integer, SummaryStatistics> summaries = new HashMap<>(); for (Map.Entry<String, Integer> count : hourlyTrafficCountData.entrySet()) { // Java stores times in milliseconds since epoch, hence the *1000. String key = count.getKey(); long timestamp = Long.parseLong(key) * 1000; int cars_per_hour = count.getValue(); DateTime time = new DateTime(timestamp, DateTimeZone.UTC); int hour = time.getHourOfDay(); SummaryStatistics stats; if (summaries.containsKey(hour)) { // Started gathering statistics, add to it stats = summaries.get(hour); } else { // Will now start gathering statistics stats = new SummaryStatistics(); summaries.put(hour, stats); } stats.addValue(cars_per_hour); } // Make model based on statistics gathered carsPerHour = new HashMap<>(); for (Map.Entry<Integer, SummaryStatistics> per_hour_stats : summaries.entrySet()) { int hour = per_hour_stats.getKey(); SummaryStatistics stats = per_hour_stats.getValue(); // Create distribution based on stats NormalDistribution distribution; try { distribution = new NormalDistribution(stats.getMean(), stats.getStandardDeviation()); } catch (Exception ignored) { // No distribution made, as all counts had the same number (no deviation). // We'll assume our counts are pretty close to historical values. distribution = new NormalDistribution(stats.getMean(), 10 / stats.getN()); } carsPerHour.put(hour, distribution); } }
/** * Public constructor with a pre-defined centroid and coordinates for it. If this method is used * as a constructor, it is assumed that the the centroid for this cluster will be modified with * each new TAC that is added to this object. * * @param centroid Centroid vector. * @param x X-coordinate for the centroid TAC. * @param y Y-coordinate for the centroid TAC. * @param slice Slice (1-based) for the centroid TAC. TAC. */ public Cluster(double[] centroid, int x, int y, int slice) { this(); this.centroid = centroid; this.modify_centroid = true; // If the centroid is to be modified, this method must also // start including data into the peak_stats object and increment // the size. double max = StatUtils.max(centroid); peak_stats.addValue(max); size++; // Add the coordinates coordinates.add(new Integer[] {x, y, slice}); // Spatial centroid centroid_spatial[0] = x; centroid_spatial[1] = y; centroid_spatial[2] = slice; }
/** * Add the stats contained by <tt>stats<tt> to their corresponding <tt>SummaryStats</tt> * objects. * * @param stats the stats of a stream that will be added. */ public void add(MediaStreamStats stats) { downloadJitterMs.addValue(stats.getDownloadJitterMs()); downloadPercentLoss.addValue(stats.getDownloadPercentLoss()); downloadRateKiloBitPerSec.addValue(stats.getDownloadRateKiloBitPerSec()); jitterBufferDelayMs.addValue(stats.getJitterBufferDelayMs()); jitterBufferDelayPackets.addValue(stats.getJitterBufferDelayPackets()); nbDiscarded.addValue(stats.getNbDiscarded()); nbDiscardedFull.addValue(stats.getNbDiscardedFull()); nbDiscardedLate.addValue(stats.getNbDiscardedLate()); nbDiscardedReset.addValue(stats.getNbDiscardedReset()); nbDiscardedShrink.addValue(stats.getNbDiscardedShrink()); nbFec.addValue(stats.getNbFec()); nbPackets.addValue(stats.getNbPackets()); nbPacketsLost.addValue(stats.getNbPacketsLost()); nbReceivedBytes.addValue(stats.getNbReceivedBytes()); nbSentBytes.addValue(stats.getNbSentBytes()); packetQueueCountPackets.addValue(stats.getPacketQueueCountPackets()); packetQueueSize.addValue(stats.getPacketQueueSize()); percentDiscarded.addValue(stats.getPercentDiscarded()); rttMs.addValue(stats.getRttMs()); uploadJitterMs.addValue(stats.getUploadJitterMs()); uploadPercentLoss.addValue(stats.getUploadPercentLoss()); uploadRateKiloBitPerSec.addValue(stats.getUploadRateKiloBitPerSec()); }
private void update(SummaryStatistics stats, AResult res) { if (res.getIsAvailable()) { stats.addValue(1); } else stats.addValue(0); }