/** * Center the values around the median per column. * * @param result the input and output matrix where the first dimension are rows and the second * columns. */ private static void centerAroundColumnMedian(final double[][] result) { for (int i = 0; i < result[0].length; i++) { final DescriptiveStatistics stats = new DescriptiveStatistics(); for (final double[] aResult : result) { stats.addValue(aResult[i]); } final double median = stats.getPercentile(50); for (int j = 0; j < result.length; j++) { result[j][i] -= median; } } }
protected void autoCorrelation() { DescriptiveStatistics descriptiveStatistics = new DescriptiveStatistics(simulations); double mean = descriptiveStatistics.getMean(); double variance = descriptiveStatistics.getVariance(); int numberOfAutocorrelations = simulations.length / 2; double[] autoCorrelations = new double[numberOfAutocorrelations]; for (int i = 1; i <= numberOfAutocorrelations; ++i) { autoCorrelations[i - 1] = autoCovariance(i, mean) / variance; } plot("Auto Correlations", autoCorrelations); }
/** * @param args * @throws IOException */ public void results(String[] f) throws IOException { String[] indicators = {"EPSILON", "IGD", "SPREAD", "HV"}; String[] operators = {"BLX", "SBX" // ,"DEX" }; int runs = 15; double aux = 0; String str = ""; BufferedReader br = null; Vector<PrintWriter> pw = new Vector<PrintWriter>(); DescriptiveStatistics d = new DescriptiveStatistics(); for (int i = 0; i < operators.length; i++) { for (int j = 0; j < indicators.length; j++) { pw.add( new PrintWriter( "C:\\Users\\9dgonzalezg\\Desktop\\workspace\\GA\\GA" + "\\resultsMyNSGAIIExperiment\\data\\" + new String(f[i]) + "_" + indicators[j] + ".res")); for (int k = 0; k < runs; k++) { br = new BufferedReader( new FileReader( "C:\\Users\\9dgonzalezg" + "\\Desktop\\workspace\\GA\\GA\\resultsMyNSGAIIExperiment\\" + "data\\" + f[i] + "\\" + new Integer(k).toString() + "\\" + indicators[j])); str = br.readLine(); while (str != null) { aux = Double.parseDouble(str); d.addValue(aux); str = br.readLine(); } br.close(); pw.get(j).println(d.getMean()); d.clear(); } pw.get(j).close(); } pw.clear(); } }
@Override public void execute() { double util = 0; double power = 0; double prevSlavWork; double prevWork; // store current work and SLA violated work values prevSlavWork = totalSlavWork; prevWork = totalWork; // reset total work values totalSlavWork = 0; totalWork = 0; totalPower = 0; for (Host host : dc.getHosts()) { // store host CPU utilization if (!hostUtil.containsKey(host)) { hostUtil.put(host, new DescriptiveStatistics()); } hostUtil.get(host).addValue(host.getCpuManager().getCpuInUse()); util += host.getCpuManager().getCpuInUse(); // get VM SLA values for (VMAllocation vmAlloc : host.getVMAllocations()) { totalSlavWork += vmAlloc.getVm().getApplication().getTotalSLAViolatedWork(); totalWork += vmAlloc .getVm() .getApplication() .getTotalIncomingWork(); // NOTE: This ONLY works with SINGLE TIERED applications. // For multi-tiered applications, this will count incoming // work multiple times!! } // get power consumption power += host.getCurrentPowerConsumption(); totalPower += host.getPowerConsumed(); } dcUtil.addValue(util); dcPower.addValue(power); dcPowerEfficiency.addValue(util / power); double optimalPowerConsumption = calculateOptimalPowerConsumption(util); dcOptimalPower.addValue(optimalPowerConsumption); dcOptimalPowerEfficiency.addValue(util / optimalPowerConsumption); dcOptimalPowerRatio.addValue((util / optimalPowerConsumption) / (util / power)); // records the total fraction of SLA violated incoming work since the last time interval dcSla.addValue((totalSlavWork - prevSlavWork) / (totalWork - prevWork)); }
private double calcSTDev( HashMap<Integer, String> singleGeneCaseValueMap, String groupType, String profileStableId) { switch (groupType) { case "altered": DescriptiveStatistics stats_altered = new DescriptiveStatistics(); for (Integer alteredSampleId : alteredSampleIds) { if (singleGeneCaseValueMap.containsKey(alteredSampleId)) { if (profileStableId.indexOf("rna_seq") != -1) { try { stats_altered.addValue( Math.log(Double.parseDouble(singleGeneCaseValueMap.get(alteredSampleId))) / Math.log(2)); } catch (NumberFormatException e) { e.getStackTrace(); } } else { try { stats_altered.addValue( Double.parseDouble(singleGeneCaseValueMap.get(alteredSampleId))); } catch (NumberFormatException e) { e.getStackTrace(); } } } } return stats_altered.getStandardDeviation(); case "unaltered": DescriptiveStatistics stats_unaltered = new DescriptiveStatistics(); for (Integer unalteredSampleId : unalteredSampleIds) { if (singleGeneCaseValueMap.containsKey(unalteredSampleId)) { if (profileStableId.indexOf("rna_seq") != -1) { try { stats_unaltered.addValue( Math.log(Double.parseDouble(singleGeneCaseValueMap.get(unalteredSampleId))) / Math.log(2)); } catch (NumberFormatException e) { e.getStackTrace(); } } else { try { stats_unaltered.addValue( Double.parseDouble(singleGeneCaseValueMap.get(unalteredSampleId))); } catch (NumberFormatException e) { e.getStackTrace(); } } } } return stats_unaltered.getStandardDeviation(); default: return Double.NaN; // error } }
@Test public void testDataJsonConsumption() throws Exception { JsonArray ginzburg = createJson("ginzburg", 1, 10); assertEquals(ginzburg.size(), 1); BasicSampleExtractor extractor = new BasicSampleExtractor(); JsonObject jsonObject = ginzburg.getJsonObject(0); Optional<SampleData> sampleData = extractor.extractSample(jsonObject); assertTrue(sampleData.isPresent()); assertEquals(sampleData.get().getPublishId(), "ginzburg"); assertFalse(sampleData.get().getTime().isEmpty()); double[] readings = extractor.extractReadings(jsonObject.getString(READINGS)); assertEquals(readings.length, 10); DescriptiveStatistics stats = new DescriptiveStatistics(readings); double median = stats.getPercentile(50); assertEquals(sampleData.get().getMedian(), median); }
/** * Normalize (standardize) the sample, so it is has a mean of 0 and a standard deviation of 1. * * @param sample Sample to normalize. * @return normalized (standardized) sample. * @since 2.2 */ public static double[] normalize(final double[] sample) { DescriptiveStatistics stats = new DescriptiveStatistics(); // Add the data from the series to stats for (int i = 0; i < sample.length; i++) { stats.addValue(sample[i]); } // Compute mean and standard deviation double mean = stats.getMean(); double standardDeviation = stats.getStandardDeviation(); // initialize the standardizedSample, which has the same length as the sample double[] standardizedSample = new double[sample.length]; for (int i = 0; i < sample.length; i++) { // z = (x- mean)/standardDeviation standardizedSample[i] = (sample[i] - mean) / standardDeviation; } return standardizedSample; }
public Vector<double[]> getMeanSd(svm_node[][] node) { // TODO Auto-generated method stub DescriptiveStatistics statistics = new DescriptiveStatistics(); int nAttr = node[0].length; int nSample = node.length; double[] meanValues = new double[nAttr]; double[] sdValues = new double[nAttr]; for (int i = 0; i < nAttr; i++) { statistics.clear(); for (int j = 0; j < nSample; j++) { statistics.addValue(node[j][i].value); } // 获取中值及标准差 meanValues[i] = statistics.getMean(); sdValues[i] = statistics.getStandardDeviation(); } Vector<double[]> meanSd = new Vector<double[]>(); meanSd.add(meanValues); meanSd.add(sdValues); return meanSd; }
/** * Construct a new DCUtilizationMonitor * * @param simulation * @param frequency The frequency in milliseconds to run this monitor * @param windowSize The number of historical values to use in calculations * @param dc */ public DCUtilizationMonitor( Simulation simulation, long frequency, int windowSize, DataCentre dc) { super(simulation, frequency); this.windowSize = windowSize; this.dc = dc; dcUtil.setWindowSize(windowSize); dcSla.setWindowSize(windowSize); dcPower.setWindowSize(windowSize); dcOptimalPower.setWindowSize(windowSize); dcPowerEfficiency.setWindowSize(windowSize); dcOptimalPowerEfficiency.setWindowSize(windowSize); dcOptimalPowerRatio.setWindowSize(windowSize); // initialize host values for (Host host : dc.getHosts()) { hostUtil.put(host, new DescriptiveStatistics(windowSize)); } }
/** * Returns summary statistics for all attributes. * * @param listwiseDeletion A flag enabling list-wise deletion * @return */ @SuppressWarnings({"unchecked", "rawtypes"}) public <T> Map<String, StatisticsSummary<?>> getSummaryStatistics(boolean listwiseDeletion) { Map<String, DescriptiveStatistics> statistics = new HashMap<String, DescriptiveStatistics>(); Map<String, StatisticsSummaryOrdinal> ordinal = new HashMap<String, StatisticsSummaryOrdinal>(); Map<String, DataScale> scales = new HashMap<String, DataScale>(); // Detect scales for (int col = 0; col < handle.getNumColumns(); col++) { // Meta String attribute = handle.getAttributeName(col); DataType<?> type = handle.getDataType(attribute); // Scale DataScale scale = type.getDescription().getScale(); // Try to replace nominal scale with ordinal scale based on base data type if (scale == DataScale.NOMINAL && handle.getGeneralization(attribute) != 0) { if (!(handle.getBaseDataType(attribute) instanceof ARXString) && getHierarchy(col, true) != null) { scale = DataScale.ORDINAL; } } // Store scales.put(attribute, scale); statistics.put(attribute, new DescriptiveStatistics()); ordinal.put( attribute, getSummaryStatisticsOrdinal( handle.getGeneralization(attribute), handle.getDataType(attribute), handle.getBaseDataType(attribute), getHierarchy(col, true))); } // Compute summary statistics for (int row = 0; row < handle.getNumRows(); row++) { // Check, if we should include this row boolean include = true; if (listwiseDeletion) { for (int col = 0; col < handle.getNumColumns(); col++) { if (handle.isSuppressed(row) || DataType.isNull(handle.getValue(row, col))) { include = false; break; } } } // Check checkInterrupt(); // If yes, add if (include) { // For each column for (int col = 0; col < handle.getNumColumns(); col++) { // Meta String value = handle.getValue(row, col); String attribute = handle.getAttributeName(col); DataType<?> type = handle.getDataType(attribute); // Analyze if (!value.equals(handle.getSuppressionString()) && !DataType.isNull(value)) { ordinal.get(attribute).addValue(value); if (type instanceof DataTypeWithRatioScale) { statistics .get(attribute) .addValue(((DataTypeWithRatioScale) type).toDouble(type.parse(value))); } } } } } // Convert Map<String, StatisticsSummary<?>> result = new HashMap<String, StatisticsSummary<?>>(); for (int col = 0; col < handle.getNumColumns(); col++) { // Check checkInterrupt(); // Depending on scale String attribute = handle.getAttributeName(col); DataScale scale = scales.get(attribute); DataType<T> type = (DataType<T>) handle.getDataType(attribute); ordinal.get(attribute).analyze(); if (scale == DataScale.NOMINAL) { StatisticsSummaryOrdinal stats = ordinal.get(attribute); result.put( attribute, new StatisticsSummary<T>( DataScale.NOMINAL, stats.getNumberOfMeasures(), stats.getMode(), type.parse(stats.getMode()))); } else if (scale == DataScale.ORDINAL) { StatisticsSummaryOrdinal stats = ordinal.get(attribute); result.put( attribute, new StatisticsSummary<T>( DataScale.ORDINAL, stats.getNumberOfMeasures(), stats.getMode(), type.parse(stats.getMode()), stats.getMedian(), type.parse(stats.getMedian()), stats.getMin(), type.parse(stats.getMin()), stats.getMax(), type.parse(stats.getMax()))); } else if (scale == DataScale.INTERVAL) { StatisticsSummaryOrdinal stats = ordinal.get(attribute); DescriptiveStatistics stats2 = statistics.get(attribute); boolean isPeriod = type.getDescription().getWrappedClass() == Date.class; // TODO: Something is wrong with commons math's kurtosis double kurtosis = stats2.getKurtosis(); kurtosis = kurtosis < 0d ? Double.NaN : kurtosis; double range = stats2.getMax() - stats2.getMin(); double stddev = Math.sqrt(stats2.getVariance()); result.put( attribute, new StatisticsSummary<T>( DataScale.INTERVAL, stats.getNumberOfMeasures(), stats.getMode(), type.parse(stats.getMode()), stats.getMedian(), type.parse(stats.getMedian()), stats.getMin(), type.parse(stats.getMin()), stats.getMax(), type.parse(stats.getMax()), toString(type, stats2.getMean(), false, false), toValue(type, stats2.getMean()), stats2.getMean(), toString(type, stats2.getVariance(), isPeriod, true), toValue(type, stats2.getVariance()), stats2.getVariance(), toString(type, stats2.getPopulationVariance(), isPeriod, true), toValue(type, stats2.getPopulationVariance()), stats2.getPopulationVariance(), toString(type, stddev, isPeriod, false), toValue(type, stddev), stddev, toString(type, range, isPeriod, false), toValue(type, range), stats2.getMax() - stats2.getMin(), toString(type, kurtosis, isPeriod, false), toValue(type, kurtosis), kurtosis)); } else if (scale == DataScale.RATIO) { StatisticsSummaryOrdinal stats = ordinal.get(attribute); DescriptiveStatistics stats2 = statistics.get(attribute); // TODO: Something is wrong with commons math's kurtosis double kurtosis = stats2.getKurtosis(); kurtosis = kurtosis < 0d ? Double.NaN : kurtosis; double range = stats2.getMax() - stats2.getMin(); double stddev = Math.sqrt(stats2.getVariance()); result.put( attribute, new StatisticsSummary<T>( DataScale.RATIO, stats.getNumberOfMeasures(), stats.getMode(), type.parse(stats.getMode()), stats.getMedian(), type.parse(stats.getMedian()), stats.getMin(), type.parse(stats.getMin()), stats.getMax(), type.parse(stats.getMax()), toString(type, stats2.getMean(), false, false), toValue(type, stats2.getMean()), stats2.getMean(), toString(type, stats2.getVariance(), false, false), toValue(type, stats2.getVariance()), stats2.getVariance(), toString(type, stats2.getPopulationVariance(), false, false), toValue(type, stats2.getPopulationVariance()), stats2.getPopulationVariance(), toString(type, stddev, false, false), toValue(type, stddev), stddev, toString(type, range, false, false), toValue(type, range), range, toString(type, kurtosis, false, false), toValue(type, kurtosis), kurtosis, toString(type, stats2.getGeometricMean(), false, false), toValue(type, stats2.getGeometricMean()), stats2.getGeometricMean())); } } return result; }
public static void main(String[] args) { // TEST MEASURE // Point p1 = new Point(-1d, -1d); // Point p2 = new Point(2d, 3d); // System.out.println(measure.d(p1, p2)); // System.out.println(measure.s(p1, p2)); // return; Double[][] data = FileHandler.readFile(fileName); // cannot display points if dimension is > 2 if (data[0].length != 2) canDisplay = false; // build graphic points from coords' array buildPointsFromData(data); Config.computeBoundingRect(points); // init display if (canDisplay) { disp = new Display(); disp.setVisible(true); for (Point p : points) { disp.addObject(p); } } testResults = new double[nbTests]; for (int t = 0; t < nbTests; ++t) { // define K clusters and K temporary centres clusters = new ArrayList<Cluster>(); for (int i = 0; i < K; ++i) { clusters.add(new Cluster()); } setRandomCenters(); for (Cluster c : clusters) { System.out.println("center for cluster " + c + ": " + c.getCenter()); } if (canDisplay) pause(1000); // variables used in for loops double minDist, currDist, diff; Double[] prevCoords, newCoords; Cluster alloc; Point newCenter; for (int i = 0; i < maxIter; ++i) { if (canDisplay) { disp.setLabel("[ iteration #" + (i + 1) + " ]"); } else { System.out.println("------> iteration #" + (i + 1)); } // allocate points to group which center is closest for (Point p : points) { minDist = Config.MAX; alloc = clusters.get(0); // default initialization for (Cluster c : clusters) { currDist = measure.d(p, c.getCenter()); if (currDist < minDist) { minDist = currDist; alloc = c; } } alloc.addPoint(p); } // recenter: calculate gravity centers for formed groups diff = 0; prevCoords = null; for (Cluster c : clusters) { // delete previous center if it not a Point of the Cluster if (canDisplay && !c.getPoints().contains(c.getCenter())) { disp.removeObject(c.getCenter()); } if (stopOnConverge) { prevCoords = c.getCenter().getCoords(); } newCenter = c.makeGravityCenter(); if (stopOnConverge) { newCoords = c.getCenter().getCoords(); for (int k = 0; k < prevCoords.length; ++k) { diff += Math.abs(prevCoords[k] - newCoords[k]); } } if (canDisplay) { disp.addObject(newCenter); } else { // System.out.println("\tcenter for " + c + ": " + c.getCenter()); System.out.println(c.getCenter()); } } // loop over clusters if (canDisplay) { disp.repaint(); } // if Clusters' centers don't change anymore, then stop (algorithm converged) if (diff == 0 && stopOnConverge) { testResults[t] = (double) i; if (canDisplay) { disp.setLabel("[ Converged at iteration #" + (i) + " ]"); disp.repaint(); } else { System.out.println("[ Converged at iteration #" + (i) + " ]"); } break; } pause(100); } // loop over iterations if (testResults[t] == 0) { System.out.println("!!!!!!!!!! Test #" + t + " did not converge."); if (stopOnConverge) return; } // reset display if (canDisplay && t + 1 < nbTests) { for (Point p : points) p.setCluster(null); for (Cluster c : clusters) disp.removeObject(c.getCenter()); } } // loop over tests // display test results and compute means DescriptiveStatistics stats = new DescriptiveStatistics(testResults); System.out.println("=========> Results:"); for (int t = 0; t < nbTests; ++t) { System.out.println("--> [ " + testResults[t] + " ]"); } System.out.println("=========> Means: " + stats.getMean()); System.out.println("=========> Std dev: " + stats.getStandardDeviation()); }