public void addPSM(PSM aPSM) { if (aPSM.getSequence().equalsIgnoreCase(getSequence())) { psms.add(aPSM); confidences.addValue(aPSM.getConfidence()); mass_errors.addValue(aPSM.getMass_error()); assayCount.putIfAbsent(aPSM.getAssay(), assayCount.getOrDefault(aPSM.getAssay(), 0) + 1); } else { throw new IllegalArgumentException("Sequences of the PSM and the peptide do not match !"); } }
private Integer getSkeletonCategoryFromPercentiles( Integer value, DescriptiveStatistics windowStats) { Integer skeletonCategory = 0; // Calculate skeleton category if (value == (int) windowStats.getMin()) { skeletonCategory = 10; } else if (value < windowStats.getPercentile(10)) { skeletonCategory = 9; } else if (value < windowStats.getPercentile(15)) { skeletonCategory = 8; } else if (value < windowStats.getPercentile(20)) { skeletonCategory = 7; } else if (value < windowStats.getPercentile(25)) { skeletonCategory = 6; } else if (value < windowStats.getPercentile(30)) { skeletonCategory = 5; } else if (value < windowStats.getPercentile(35)) { skeletonCategory = 4; } else if (value < windowStats.getPercentile(40)) { skeletonCategory = 3; } else if (value < windowStats.getPercentile(45)) { skeletonCategory = 2; } else if (value < windowStats.getPercentile(50)) { skeletonCategory = 1; } return skeletonCategory; }
private void writeErrorFile(DescriptiveStatistics error, String file, boolean append) { try { BufferedWriter writer = new BufferedWriter(new FileWriter(file, append)); if (!append) { // write header writer.write("mean\tvar\tstderr\tmin\tmax"); writer.newLine(); } writer.write(String.valueOf(error.getMean())); writer.write("\t"); writer.write(String.valueOf(error.getVariance())); writer.write("\t"); writer.write(String.valueOf(error.getStandardDeviation())); writer.write("\t"); writer.write(String.valueOf(error.getMin())); writer.write("\t"); writer.write(String.valueOf(error.getMax())); writer.newLine(); writer.close(); } catch (IOException e) { e.printStackTrace(); } }
private void runTest(AbstractTest test, String name, byte[] conf) { if (true) { // Create the repository directory File dir = new File(new File("target", "repository"), name + "-" + test); dir.mkdirs(); try { // Copy the configuration file into the repository directory File xml = new File(dir, "repository.xml"); OutputStream output = FileUtils.openOutputStream(xml); try { output.write(conf, 0, conf.length); } finally { output.close(); } // Create the repository RepositoryImpl repository = createRepository(dir, xml); try { // Run the test DescriptiveStatistics statistics = runTest(test, repository); if (statistics.getN() > 0) { writeReport(test.toString(), name, statistics); } } finally { repository.shutdown(); } } catch (Throwable t) { System.out.println("Unable to run " + test + ": " + t.getMessage()); t.printStackTrace(); } finally { // FileUtils.deleteQuietly(dir); } } }
@Override public float execute_float(ProgramChromosome c, int n, Object[] args) { int size = size(); DescriptiveStatistics stats = new DescriptiveStatistics(); for (int i = 0; i < size; i++) { stats.addValue(c.execute_float(n, i, args)); } return (float) stats.getSkewness(); }
/** * Applies a Gaussian filter to the list to filter. The parameter is 1/(2*standard deviation^2). * * @param listToFilter the list to filter * @return An order n one-dimensional median filtered list. */ public static List<Double> gaussianFilter(final List<Double> listToFilter) { final DescriptiveStatistics stats = new DescriptiveStatistics(); // Add the data from the array for (final Double value : listToFilter) { stats.addValue(value); } final double std = stats.getStandardDeviation(); final double parameter = 1.0 / (std * std * 2); return gaussianFilter(listToFilter, parameter); }
public MarketRecord(Market market) { this.t = market.t; this.p_f_t = market.p_f_t; this.p_t = market.p_t[market.t]; this.spread = market.a_q_t - market.b_q_t; this.p_lt = market.p_lt[market.lt]; this.hftWealth = new DescriptiveStatistics(); this.hftFreq = new DescriptiveStatistics(); for (HftAgent agent : market.marketMakers) { hftWealth.addValue(agent.getWealth()); hftFreq.addValue(agent.lambda_m); } }
public String toString() { String format = "%d,%.4f,%.4f,%.4f," + "%.6f,%.6f,%.4f," + "%.4f,%.4f," + "%.4f,%.4f\n"; return String.format( format, t, p_f_t, p_t, p_lt, r_log_t, r_abs_t, spread, hftWealth.getMean(), hftWealth.getVariance(), hftFreq.getMean(), hftFreq.getVariance()); }
public Void call() throws Exception { List<String> urls = Lists.newArrayList(); final BufferedReader in; if (urlFile != null) { in = new BufferedReader(new InputStreamReader(new FileInputStream(urlFile))); } else { in = new BufferedReader(new InputStreamReader(System.in)); } for (String line = in.readLine(); line != null; line = in.readLine()) { if (maxRequests >= 0) { if (maxRequests == 0) { break; } else if (maxRequests > 0) { maxRequests--; } } urls.add(line); } Optional<StepFunction> of = mvel(stepFunction); if (!of.isPresent()) { of = clojure(stepFunction); } if (!of.isPresent()) { System.err.printf("'%s' is an invalid step function\n", stepFunction); return null; } StepFunction step = of.get(); if (labels) { System.out.printf("clients\ttp%.1f\tmean\treqs/sec\n", percentile); } int concurrency = start; do { DescriptiveStatistics stats = new Fight(concurrency, urls).call(); System.out.printf( "%d\t%.2f\t%.2f\t%.2f\n", concurrency, stats.getPercentile(percentile), stats.getMean(), (1000 / stats.getMean()) * concurrency); concurrency = step.step(concurrency); } while (concurrency < limit); return null; }
@Override public String toString() { NumberFormat fmt = NumberFormat.getNumberInstance(); fmt.setMaximumFractionDigits(1); return description + ":" + " avg=" + fmt.format(stats.getMean()) + " stdev=" + fmt.format(stats.getStandardDeviation()) + " " + units + " (" + stats.getN() + " samples)"; }
/** * Calculates the average, min ad max throughput time out of the throughput times of all traces in * piList. Next to this, the arrival rate is calculated. All metrics are based on the process * instances in piList only * * @param piList ArrayList: the process instances used * @param fitOption int: the fit option used (how to deal with non-conformance) * @throws Exception */ public void calculateMetrics(ArrayList piList, int fitOption) throws Exception { properFrequency = 0; timeStats.clear(); arrivalStats.clear(); ArrayList arrivalDates = new ArrayList(); ListIterator lit = piList.listIterator(); while (lit.hasNext()) { ExtendedLogTrace currentTrace = (ExtendedLogTrace) lit.next(); if (currentTrace.hasProperlyTerminated() && currentTrace.hasSuccessfullyExecuted()) { properFrequency++; } try { long tp = (currentTrace.getEndDate().getTime() - currentTrace.getBeginDate().getTime()); if (fitOption == 0) { // timeStats based on all traces timeStats.addValue(tp); arrivalDates.add(currentTrace.getBeginDate()); } if (currentTrace.hasProperlyTerminated() && currentTrace.hasSuccessfullyExecuted()) { if (fitOption == 1) { // timeStats based on fitting traces only timeStats.addValue(tp); arrivalDates.add(currentTrace.getBeginDate()); } } } catch (NullPointerException ex) { ex.printStackTrace(); } } Date[] arrivals = (Date[]) arrivalDates.toArray(new Date[0]); // make sure arrivaldates are sorted Arrays.sort(arrivals); if (arrivals.length > 1) { for (int i = 1; i < arrivals.length; i++) { long t1 = arrivals[i].getTime(); long t2 = arrivals[i - 1].getTime(); long iat = arrivals[i].getTime() - arrivals[i - 1].getTime(); if (iat >= 0) { arrivalStats.addValue(iat); } } } }
@Override protected DescriptiveStatistics statistics( Collection<? extends Person> persons, String purpose, String mode) { DescriptiveStatistics stats = super.statistics(persons, purpose, mode); if (threshold > 0) { DescriptiveStatistics newStats = new DescriptiveStatistics(); for (int i = 0; i < stats.getN(); i++) { double val = stats.getElement(i); if (val >= threshold) { newStats.addValue(val); } } return newStats; } else { return stats; } }
public void test() { SparseGraphBuilder builder = new SparseGraphBuilder(); SparseGraph graph = builder.createGraph(); SparseVertex v1 = builder.addVertex(graph); SparseVertex v2 = builder.addVertex(graph); SparseVertex v3 = builder.addVertex(graph); SparseVertex v4 = builder.addVertex(graph); SparseVertex v5 = builder.addVertex(graph); SparseVertex v6 = builder.addVertex(graph); builder.addEdge(graph, v1, v2); builder.addEdge(graph, v2, v3); builder.addEdge(graph, v3, v4); builder.addEdge(graph, v4, v5); builder.addEdge(graph, v5, v6); builder.addEdge(graph, v6, v1); builder.addEdge(graph, v2, v5); Degree degree = Degree.getInstance(); DescriptiveStatistics stats = degree.statistics(graph.getVertices()); assertEquals(2.33, stats.getMean(), 0.01); assertEquals(2.0, stats.getMin()); assertEquals(3.0, stats.getMax()); TObjectDoubleHashMap<? extends Vertex> values = degree.values(graph.getVertices()); TObjectDoubleIterator<? extends Vertex> it = values.iterator(); int count2 = 0; int count3 = 0; for (int i = 0; i < values.size(); i++) { it.advance(); if (it.value() == 2) count2++; else if (it.value() == 3) count3++; } assertEquals(4, count2); assertEquals(2, count3); assertEquals(-0.166, degree.assortativity(graph), 0.001); }
private void populateWell(Integer plateColumn, Integer plateRow, Well zStackWell) { List<String> readouts = TdsUtils.flattenReadoutNames(curPlateSelection); for (String readoutName : readouts) { DescriptiveStatistics descStats = new DescriptiveStatistics(); for (Plate plate : curPlateSelection) { Well curWell = plate.getWell(plateColumn, plateRow); if (curWell == null) continue; Double readoutValue = curWell.getReadout(readoutName); if (readoutValue != null) { descStats.addValue(readoutValue); } } zStackWell.getWellStatistics().put(readoutName, descStats.getMean()); } }
@Override public String getSummaryReport() { final StringBuilder outBuffer = new StringBuilder(); final DescriptiveStatistics ds = computeStats(); outBuffer.append("Aggregate P@" + N + " Statistics:\n"); outBuffer.append(String.format("%-15s\t%6d\n", "num_q", ds.getN())); outBuffer.append(String.format("%-15s\t%6.4f\n", "min", ds.getMin())); outBuffer.append(String.format("%-15s\t%6.4f\n", "max", ds.getMax())); outBuffer.append(String.format("%-15s\t%6.4f\n", "mean", ds.getMean())); outBuffer.append(String.format("%-15s\t%6.4f\n", "std dev", ds.getStandardDeviation())); outBuffer.append(String.format("%-15s\t%6.4f\n", "median", ds.getPercentile(50))); outBuffer.append(String.format("%-15s\t%6.4f\n", "skewness", ds.getSkewness())); outBuffer.append(String.format("%-15s\t%6.4f\n", "kurtosis", ds.getKurtosis())); return outBuffer.toString(); }
@Override public double evaluate(Trajectory trajectory) { double score = 0; for (int i = 1; i < trajectory.getElements().size(); i += 2) { double t = trajectory.getTransitions().get(i + 1) - trajectory.getTransitions().get(i); score += beta * t; } if (isLogging) stats.addValue(score); return score; }
public static void main(String args[]) { DescriptiveStatistics stats = DescriptiveStatistics.newInstance(); /* stats.addValue(2); stats.addValue(23); stats.addValue(28); stats.addValue(69); stats.addValue(87); stats.addValue(111); stats.addValue(125);*/ stats.addValue(2); stats.addValue(4); stats.addValue(6); double p25 = stats.getPercentile(25); double p50 = stats.getPercentile(50); double p75 = stats.getPercentile(75); // System.out.println(stats.toString()); double delta = p75 - p25; double center = p25 + (delta / 2); double spread = delta / 2; System.out.println("p25:" + p25 + " - p50:" + p50 + " - p75:" + p75); System.out.println("center: " + center + " - spread: " + spread); }
private Integer getSkeletonCategoryFromCropper1979( Integer value, DescriptiveStatistics windowStats, Double criticalLevel) { Integer skeletonCategory = 0; if (criticalLevel == null) criticalLevel = 0.5; double mean = windowStats.getMean(); double stdev = windowStats.getStandardDeviation(); double smallRingThreshold = mean - (stdev * criticalLevel); int min = (int) windowStats.getMin(); if (value == min) { skeletonCategory = 10; } else if (value > smallRingThreshold) { skeletonCategory = 0; } else { Integer range = (int) (smallRingThreshold - min); Integer categoryStepSize = range / 10; skeletonCategory = (int) (0 - ((value - smallRingThreshold) / categoryStepSize)); } return skeletonCategory; }
private DescriptiveStatistics runTest(AbstractTest test, Repository repository) throws Exception { DescriptiveStatistics statistics = new DescriptiveStatistics(); test.setUp(repository, credentials); try { // Run a few iterations to warm up the system long warmupEnd = System.currentTimeMillis() + warmup * 1000; while (System.currentTimeMillis() < warmupEnd) { test.execute(); } // Run test iterations, and capture the execution times long runtimeEnd = System.currentTimeMillis() + runtime * 1000; while (System.currentTimeMillis() < runtimeEnd) { statistics.addValue(test.execute()); } } finally { test.tearDown(); } return statistics; }
private void writeReport(String test, String name, DescriptiveStatistics statistics) throws IOException { File report = new File("target", test + ".txt"); boolean needsPrefix = !report.exists(); PrintWriter writer = new PrintWriter(new FileWriterWithEncoding(report, "UTF-8", true)); try { if (needsPrefix) { writer.format("# %-34.34s min 10%% 50%% 90%% max%n", test); } writer.format( "%-36.36s %6.0f %6.0f %6.0f %6.0f %6.0f%n", name, statistics.getMin(), statistics.getPercentile(10.0), statistics.getPercentile(50.0), statistics.getPercentile(90.0), statistics.getMax()); } finally { writer.close(); } }
/** * Calculates the average of the (fastestpercentage) fast traces, the (slowestPercentage) slow * traces and the (100% - fastestPercentage - slowestPercentage) normal speed traces and returns * these averages in an array, where [0]: avg fast throughput time [1]: avg slow throughput time * [2]: avg middle throughput time * * @param fastestPercentage double: the percentage of measurements that is to be counted as fast * @param slowestPercentage double: the percentage of measurements that is to be counted as slow * @return double[] */ public double[] getAverageTimes(double fastestPercentage, double slowestPercentage) { // initialize arrays double[] timeList = timeStats.getSortedValues(); double[] avgTimes = new double[3]; long total = 0; // obtain the number of fast , slow, normal traces int[] sizes = getSizes(fastestPercentage, slowestPercentage); int fastSize = sizes[0], slowSize = sizes[1], middleSize = sizes[2]; for (int i = 0; i < fastSize; i++) { total += timeList[i]; } // calculate average of the fastest traces double avgFastestTime = 0.0; if (fastSize != 0) { avgFastestTime = (total * 1.0) / fastSize; } // calculate average of the slowest traces int upperSize = timeList.length - slowSize; total = 0; for (int i = upperSize; i < timeList.length; i++) { total += timeList[i]; } double avgSlowestTime = 0.0; if (slowSize > 0) { avgSlowestTime = (total * 1.0) / slowSize; } // calculate the middle/normal-speed traces total = 0; for (int i = fastSize; i < upperSize; i++) { total += timeList[i]; } double avgMiddleTime = 0.0; if (middleSize > 0) { avgMiddleTime = (total * 1.0) / middleSize; } avgTimes[0] = avgFastestTime; avgTimes[1] = avgSlowestTime; avgTimes[2] = avgMiddleTime; return avgTimes; }
/** * Returns an array containing the number of process instances that are considered to be fast, * i.e. have a low throughput time (place 0 in array), the number of process instances that are * slow (place 1 in array) and the number of process instances that are considered to be of normal * speed (place 2 in array). Based on fastestPercentage, slowestPercentage and timeList (thus * method calculateProcessMetrics() should be called before this one) * * @param fastestPercentage double: the percentage of measurements that is to be counted as fast * @param slowestPercentage double: the percentage of measurements that is to be counted as slow * @return int[] */ public int[] getSizes(double fastestPercentage, double slowestPercentage) { int[] sizes = new int[3]; String sizeString; int length = timeStats.getValues().length; sizeString = Math.round((length * fastestPercentage) / 100.0) + ""; sizes[0] = Integer.parseInt(sizeString); if (sizes[0] != length) { sizeString = Math.round((length * slowestPercentage) / 100.0) + ""; sizes[1] = Integer.parseInt(sizeString); if ((sizes[0] + sizes[1]) > length) { // Make sure that sizes[0] + sizes[1] remains smaller than // the number of measurements in timeList (rounding could mess // this up) sizes[1] = length - sizes[0]; } } else { sizes[1] = 0; } sizes[2] = length - sizes[0] - sizes[1]; return sizes; }
public double getBestConfidence() { if (confidences.getMax() > 1) { return confidences.getMax() / 100; } return confidences.getMax(); }
/** * Contains all the performance results obtained during log replay analysis. Can be used to retrieve * values for the performance metrics and to get extended visualizations. * * @see PerformanceMeasurer * @author Peter T.G. Hornix ([email protected]) */ public class PerformanceLogReplayResult extends LogReplayAnalysisResult { // DescriptiveStatistics-object in which throughput times can be stored private DescriptiveStatistics timeStats = DescriptiveStatistics.newInstance(); // SummaryStatistics to obtain mean inter arrival times private SummaryStatistics arrivalStats = SummaryStatistics.newInstance(); // number of log traces that can be replayed normally private int properFrequency; public PerformanceLogReplayResult( AnalysisConfiguration analysisOptions, PetriNet net, LogReader log, LogReplayAnalysisMethod method) { // call the constructor of the superclass super(analysisOptions, net, log, method); } /** * Initializes the diagnostic data structures needed to store the measurements taken during the * log replay analysis. */ protected void initDiagnosticDataStructures() { replayedLog = new ExtendedLogReader(inputLogReader); replayedPetriNet = new ExtendedPetriNet(inputPetriNet, replayedLog.getLogTraceIDs()); } // ////////////////////////////METRICS-RELATED // METHODS/////////////////////////// /** * Calculates the average, min ad max throughput time out of the throughput times of all traces in * piList. Next to this, the arrival rate is calculated. All metrics are based on the process * instances in piList only * * @param piList ArrayList: the process instances used * @param fitOption int: the fit option used (how to deal with non-conformance) * @throws Exception */ public void calculateMetrics(ArrayList piList, int fitOption) throws Exception { properFrequency = 0; timeStats.clear(); arrivalStats.clear(); ArrayList arrivalDates = new ArrayList(); ListIterator lit = piList.listIterator(); while (lit.hasNext()) { ExtendedLogTrace currentTrace = (ExtendedLogTrace) lit.next(); if (currentTrace.hasProperlyTerminated() && currentTrace.hasSuccessfullyExecuted()) { properFrequency++; } try { long tp = (currentTrace.getEndDate().getTime() - currentTrace.getBeginDate().getTime()); if (fitOption == 0) { // timeStats based on all traces timeStats.addValue(tp); arrivalDates.add(currentTrace.getBeginDate()); } if (currentTrace.hasProperlyTerminated() && currentTrace.hasSuccessfullyExecuted()) { if (fitOption == 1) { // timeStats based on fitting traces only timeStats.addValue(tp); arrivalDates.add(currentTrace.getBeginDate()); } } } catch (NullPointerException ex) { ex.printStackTrace(); } } Date[] arrivals = (Date[]) arrivalDates.toArray(new Date[0]); // make sure arrivaldates are sorted Arrays.sort(arrivals); if (arrivals.length > 1) { for (int i = 1; i < arrivals.length; i++) { long t1 = arrivals[i].getTime(); long t2 = arrivals[i - 1].getTime(); long iat = arrivals[i].getTime() - arrivals[i - 1].getTime(); if (iat >= 0) { arrivalStats.addValue(iat); } } } } /** * Exports the throughput times of all process instances in piList to a comma-seperated text-file. * * @param piList ArrayList: the process instances used * @param file File: the file to which the times are exported * @param divider long: the time divider used * @param sort String: the time sort used * @param fitOption int: the fit option used (how to deal with non-conformance) * @throws IOException */ public void exportToFile(ArrayList piList, File file, long divider, String sort, int fitOption) throws IOException { Writer output = new BufferedWriter(new FileWriter(file)); String line = "Log Trace,Throughput time (" + sort + ")\n"; output.write(line); ListIterator lit = piList.listIterator(); while (lit.hasNext()) { ExtendedLogTrace currentTrace = (ExtendedLogTrace) lit.next(); try { double tp = (currentTrace.getEndDate().getTime() - currentTrace.getBeginDate().getTime()) * 1.0 / divider; if (fitOption == 0) { // times based on all traces line = currentTrace.getName() + "," + tp + "\n"; // write line to the file output.write(line); } if (fitOption == 1 && currentTrace.hasProperlyTerminated() && currentTrace.hasSuccessfullyExecuted()) { // times based on fitting traces only line = currentTrace.getName() + "," + tp + "\n"; // write line to the file output.write(line); } } catch (NullPointerException npe) { } } // close the file output.close(); } // ////////////////////////////GET // METHODS/////////////////////////////////////// /** * Calculates and returns the stdev in throughput time out of the throughput times in timeStats. * (make sure calculateProcessMetrics() is called before this method). * * @return double */ public double getStdevThroughputTime() { return timeStats.getStandardDeviation(); } /** * Calculates the average of the (fastestpercentage) fast traces, the (slowestPercentage) slow * traces and the (100% - fastestPercentage - slowestPercentage) normal speed traces and returns * these averages in an array, where [0]: avg fast throughput time [1]: avg slow throughput time * [2]: avg middle throughput time * * @param fastestPercentage double: the percentage of measurements that is to be counted as fast * @param slowestPercentage double: the percentage of measurements that is to be counted as slow * @return double[] */ public double[] getAverageTimes(double fastestPercentage, double slowestPercentage) { // initialize arrays double[] timeList = timeStats.getSortedValues(); double[] avgTimes = new double[3]; long total = 0; // obtain the number of fast , slow, normal traces int[] sizes = getSizes(fastestPercentage, slowestPercentage); int fastSize = sizes[0], slowSize = sizes[1], middleSize = sizes[2]; for (int i = 0; i < fastSize; i++) { total += timeList[i]; } // calculate average of the fastest traces double avgFastestTime = 0.0; if (fastSize != 0) { avgFastestTime = (total * 1.0) / fastSize; } // calculate average of the slowest traces int upperSize = timeList.length - slowSize; total = 0; for (int i = upperSize; i < timeList.length; i++) { total += timeList[i]; } double avgSlowestTime = 0.0; if (slowSize > 0) { avgSlowestTime = (total * 1.0) / slowSize; } // calculate the middle/normal-speed traces total = 0; for (int i = fastSize; i < upperSize; i++) { total += timeList[i]; } double avgMiddleTime = 0.0; if (middleSize > 0) { avgMiddleTime = (total * 1.0) / middleSize; } avgTimes[0] = avgFastestTime; avgTimes[1] = avgSlowestTime; avgTimes[2] = avgMiddleTime; return avgTimes; } /** * Returns an array containing the number of process instances that are considered to be fast, * i.e. have a low throughput time (place 0 in array), the number of process instances that are * slow (place 1 in array) and the number of process instances that are considered to be of normal * speed (place 2 in array). Based on fastestPercentage, slowestPercentage and timeList (thus * method calculateProcessMetrics() should be called before this one) * * @param fastestPercentage double: the percentage of measurements that is to be counted as fast * @param slowestPercentage double: the percentage of measurements that is to be counted as slow * @return int[] */ public int[] getSizes(double fastestPercentage, double slowestPercentage) { int[] sizes = new int[3]; String sizeString; int length = timeStats.getValues().length; sizeString = Math.round((length * fastestPercentage) / 100.0) + ""; sizes[0] = Integer.parseInt(sizeString); if (sizes[0] != length) { sizeString = Math.round((length * slowestPercentage) / 100.0) + ""; sizes[1] = Integer.parseInt(sizeString); if ((sizes[0] + sizes[1]) > length) { // Make sure that sizes[0] + sizes[1] remains smaller than // the number of measurements in timeList (rounding could mess // this up) sizes[1] = length - sizes[0]; } } else { sizes[1] = 0; } sizes[2] = length - sizes[0] - sizes[1]; return sizes; } /** * Calculates and returns the arrival rate of the traces in piList * * @return double */ public double getArrivalRate() { double arrivalRate = 0; if (arrivalStats.getN() > 0 && arrivalStats.getMean() != 0) { // mean arrivalRate is 1 divided by the mean of the inter-arrival // times arrivalRate = 1 / arrivalStats.getMean(); } return arrivalRate; } /** * Returns the arrival Stats of the traces in piList * * @return SummaryStatistics */ public SummaryStatistics getArrivalStats() { return arrivalStats; } /** * Returns the mean throughput time * * @return double */ public double getMeanThroughputTime() { return timeStats.getMean(); } /** * Returns the minimal throughput time. Note that method calculateProcessMetrics() should be * called before this method. * * @return double */ public double getMinThroughputTime() { return timeStats.getMin(); } /** * Returns the maximal throughput time * * @return double */ public double getMaxThroughputTime() { return timeStats.getMax(); } /** * returns the number of cases that execute successfully and complete properly * * @return int */ public int getProperFrequency() { return (properFrequency); } // ////////////////////////////GRAPPA-RELATED // METHODS/////////////////////////// /** * Creates a visualization of the performance analysis results. Note that a change of the display * state by the user will have no effect before calling this methods. This is intended to prevent * unnecessary cloning of the extended petri net, which actually delivers the custom visualization * of the performance analysis results. * * @param selectedInstances The process instances that have been selected for updating the * visualization. * @return The visualization wrapped in a ModelGraphPanel. */ public ModelGraphPanel getVisualization(ArrayList selectedInstances) { // sets the currentlySelectedInstances attribute, which is necessary // because // the writeToDot() method has a fixed interface, though the // visualization should // be able to take them into account ((ExtendedPetriNet) replayedPetriNet).currentlySelectedInstances = selectedInstances; ModelGraphPanel myResultVisualization; myResultVisualization = ((ExtendedPetriNet) replayedPetriNet).getGrappaVisualization(); return myResultVisualization; } }
/** * Returns the maximal throughput time * * @return double */ public double getMaxThroughputTime() { return timeStats.getMax(); }
/** * Returns the minimal throughput time. Note that method calculateProcessMetrics() should be * called before this method. * * @return double */ public double getMinThroughputTime() { return timeStats.getMin(); }
/** * Calculates and returns the stdev in throughput time out of the throughput times in timeStats. * (make sure calculateProcessMetrics() is called before this method). * * @return double */ public double getStdevThroughputTime() { return timeStats.getStandardDeviation(); }
void addValue(double d) { stats.addValue(d); }
@Override public void notifyAfterMobsim(AfterMobsimEvent event) { Network network = event.getServices().getScenario().getNetwork(); DescriptiveStatistics error = new DescriptiveStatistics(); DescriptiveStatistics errorAbs = new DescriptiveStatistics(); DescriptivePiStatistics errorWeighted = new WSMStatsFactory().newInstance(); TDoubleArrayList errorVals = new TDoubleArrayList(); TDoubleArrayList caps = new TDoubleArrayList(); TDoubleArrayList speeds = new TDoubleArrayList(); for (Count count : counts.getCounts().values()) { if (!count.getId().toString().startsWith(ODCalibrator.VIRTUAL_ID_PREFIX)) { double obsVal = 0; for (int i = 1; i < 25; i++) { obsVal += count.getVolume(i).getValue(); } if (obsVal > 0) { double simVal = calculator.getOccupancy(count.getId()); simVal *= factor; double err = (simVal - obsVal) / obsVal; error.addValue(err); errorAbs.addValue(Math.abs(err)); errorWeighted.addValue(Math.abs(err), 1 / obsVal); Link link = network.getLinks().get(count.getId()); errorVals.add(Math.abs(err)); caps.add(link.getCapacity()); speeds.add(link.getFreespeed()); } } } logger.info( String.format( "Relative counts error: mean = %s, var = %s, stderr = %s, min = %s, max = %s", error.getMean(), error.getVariance(), error.getStandardDeviation(), error.getMin(), error.getMax())); logger.info( String.format( "Absolute relative counts error: mean = %s, var = %s, stderr = %s, min = %s, max = %s", errorAbs.getMean(), errorAbs.getVariance(), errorAbs.getStandardDeviation(), errorAbs.getMin(), errorAbs.getMax())); logger.info( String.format( "Absolute weigthed relative counts error: mean = %s, var = %s, stderr = %s, min = %s, max = %s", errorWeighted.getMean(), errorWeighted.getVariance(), errorWeighted.getStandardDeviation(), errorWeighted.getMin(), errorWeighted.getMax())); String outdir = event.getServices().getControlerIO().getIterationPath(event.getIteration()); try { TDoubleDoubleHashMap map = Correlations.mean(caps.toArray(), errorVals.toArray()); StatsWriter.writeHistogram( map, "capacity", "counts", String.format("%s/countsError.capacity.txt", outdir)); map = Correlations.mean(speeds.toArray(), errorVals.toArray()); StatsWriter.writeHistogram( map, "speed", "counts", String.format("%s/countsError.speed.txt", outdir)); StatsWriter.writeHistogram( Histogram.createHistogram(error, new LinearDiscretizer(0.1), false), "Error", "Frequency", String.format("%s/countsError.hist.txt", outdir)); StatsWriter.writeHistogram( Histogram.createHistogram(errorAbs, new LinearDiscretizer(0.1), false), "Error (absolute)", "Frequency", String.format("%s/countsErrorAbs.hist.txt", outdir)); StatsWriter.writeHistogram( Histogram.createHistogram(errorWeighted, new LinearDiscretizer(0.1), true), "Error (weighted)", "Frequency", String.format("%s/countsErrorWeighted.hist.txt", outdir)); CountsCompare2GeoJSON.write(calculator, counts, factor, network, outdir); NetworkLoad2GeoJSON.write( event.getServices().getScenario().getNetwork(), calculator, factor, outdir + "/network.json"); } catch (Exception e) { e.printStackTrace(); } String rootOutDir = event.getServices().getControlerIO().getOutputPath(); boolean append = false; if (event.getIteration() > 0) { append = true; } writeErrorFile(error, String.format("%s/countsError.txt", rootOutDir), append); writeErrorFile(errorAbs, String.format("%s/countsAbsError.txt", rootOutDir), append); }
public Void call() throws Exception { List<String> urls = Lists.newArrayList(); final BufferedReader in; if (urlFile != null) { in = new BufferedReader(new InputStreamReader(new FileInputStream(urlFile))); } else { in = new BufferedReader(new InputStreamReader(System.in)); } for (String line = in.readLine(); line != null; line = in.readLine()) { if (maxRequests >= 0) { if (maxRequests == 0) { break; } else if (maxRequests > 0) { maxRequests--; } } urls.add(line); } if (labels) { System.out.printf("clients\ttp%.1f\tmean\treqs/sec\n", percentile); } int best_concurency = start; int concurrency = start; DescriptiveStatistics result; DescriptiveStatistics best_result = null; double reqs_per_sec; double res = 1; while ((result = new Fight(concurrency, urls).call()).getPercentile(percentile) < target) { res = result.getPercentile(percentile); reqs_per_sec = ((1000 / result.getMean()) * concurrency); System.out.printf("%d\t%.2f\t%.2f\t%.2f\n", concurrency, res, result.getMean(), reqs_per_sec); best_concurency = concurrency; best_result = result; concurrency = concurrency * 2; } reqs_per_sec = ((1000 / result.getMean()) * concurrency); System.out.printf( "%d\t%.2f\t%.2f\t%.2f\n", concurrency, result.getPercentile(percentile), result.getMean(), reqs_per_sec); int increment = (int) Math.sqrt((concurrency)); concurrency = concurrency / 2; while ((result = new Fight(concurrency, urls).call()).getPercentile(percentile) < target) { res = result.getPercentile(percentile); reqs_per_sec = ((1000 / result.getMean()) * concurrency); System.out.printf("%d\t%.2f\t%.2f\t%.2f\n", concurrency, res, result.getMean(), reqs_per_sec); best_concurency = concurrency; best_result = result; concurrency += increment; } reqs_per_sec = ((1000 / result.getMean()) * concurrency); System.out.printf( "%d\t%.2f\t%.2f\t%.2f\n", concurrency, result.getPercentile(percentile), result.getMean(), reqs_per_sec); increment = (int) Math.sqrt(Math.sqrt(concurrency)); concurrency = concurrency - (2 * increment); while ((result = new Fight(concurrency, urls).call()).getPercentile(percentile) < target) { res = result.getPercentile(percentile); reqs_per_sec = ((1000 / result.getMean()) * concurrency); System.out.printf("%d\t%.2f\t%.2f\t%.2f\n", concurrency, res, result.getMean(), reqs_per_sec); best_concurency = concurrency; best_result = result; concurrency += increment; } reqs_per_sec = ((1000 / result.getMean()) * concurrency); System.out.printf( "%d\t%.2f\t%.2f\t%.2f\n", concurrency, result.getPercentile(percentile), result.getMean(), reqs_per_sec); assert best_result != null; reqs_per_sec = ((1000 / best_result.getMean()) * best_concurency); System.out.printf( "%d\t%.2f\t%.2f\t%.2f\n", best_concurency, best_result.getPercentile(percentile), best_result.getMean(), reqs_per_sec); return null; }