public String toString() {
   String format = "%d,%.4f,%.4f,%.4f," + "%.6f,%.6f,%.4f," + "%.4f,%.4f," + "%.4f,%.4f\n";
   return String.format(
       format,
       t,
       p_f_t,
       p_t,
       p_lt,
       r_log_t,
       r_abs_t,
       spread,
       hftWealth.getMean(),
       hftWealth.getVariance(),
       hftFreq.getMean(),
       hftFreq.getVariance());
 }
  private void writeErrorFile(DescriptiveStatistics error, String file, boolean append) {
    try {
      BufferedWriter writer = new BufferedWriter(new FileWriter(file, append));
      if (!append) {
        // write header
        writer.write("mean\tvar\tstderr\tmin\tmax");
        writer.newLine();
      }

      writer.write(String.valueOf(error.getMean()));
      writer.write("\t");
      writer.write(String.valueOf(error.getVariance()));
      writer.write("\t");
      writer.write(String.valueOf(error.getStandardDeviation()));
      writer.write("\t");
      writer.write(String.valueOf(error.getMin()));
      writer.write("\t");
      writer.write(String.valueOf(error.getMax()));
      writer.newLine();

      writer.close();
    } catch (IOException e) {
      e.printStackTrace();
    }
  }
示例#3
0
文件: Step.java 项目: ning/pummel
  public Void call() throws Exception {
    List<String> urls = Lists.newArrayList();
    final BufferedReader in;
    if (urlFile != null) {
      in = new BufferedReader(new InputStreamReader(new FileInputStream(urlFile)));
    } else {
      in = new BufferedReader(new InputStreamReader(System.in));
    }
    for (String line = in.readLine(); line != null; line = in.readLine()) {
      if (maxRequests >= 0) {
        if (maxRequests == 0) {
          break;
        } else if (maxRequests > 0) {
          maxRequests--;
        }
      }
      urls.add(line);
    }

    Optional<StepFunction> of = mvel(stepFunction);
    if (!of.isPresent()) {
      of = clojure(stepFunction);
    }

    if (!of.isPresent()) {
      System.err.printf("'%s' is an invalid step function\n", stepFunction);
      return null;
    }

    StepFunction step = of.get();

    if (labels) {
      System.out.printf("clients\ttp%.1f\tmean\treqs/sec\n", percentile);
    }
    int concurrency = start;
    do {
      DescriptiveStatistics stats = new Fight(concurrency, urls).call();
      System.out.printf(
          "%d\t%.2f\t%.2f\t%.2f\n",
          concurrency,
          stats.getPercentile(percentile),
          stats.getMean(),
          (1000 / stats.getMean()) * concurrency);
      concurrency = step.step(concurrency);
    } while (concurrency < limit);
    return null;
  }
    @Override
    public String toString() {
      NumberFormat fmt = NumberFormat.getNumberInstance();
      fmt.setMaximumFractionDigits(1);

      return description
          + ":"
          + " avg="
          + fmt.format(stats.getMean())
          + " stdev="
          + fmt.format(stats.getStandardDeviation())
          + " "
          + units
          + " ("
          + stats.getN()
          + " samples)";
    }
示例#5
0
  @Override
  public String getSummaryReport() {
    final StringBuilder outBuffer = new StringBuilder();

    final DescriptiveStatistics ds = computeStats();
    outBuffer.append("Aggregate P@" + N + " Statistics:\n");
    outBuffer.append(String.format("%-15s\t%6d\n", "num_q", ds.getN()));
    outBuffer.append(String.format("%-15s\t%6.4f\n", "min", ds.getMin()));
    outBuffer.append(String.format("%-15s\t%6.4f\n", "max", ds.getMax()));
    outBuffer.append(String.format("%-15s\t%6.4f\n", "mean", ds.getMean()));
    outBuffer.append(String.format("%-15s\t%6.4f\n", "std dev", ds.getStandardDeviation()));
    outBuffer.append(String.format("%-15s\t%6.4f\n", "median", ds.getPercentile(50)));
    outBuffer.append(String.format("%-15s\t%6.4f\n", "skewness", ds.getSkewness()));
    outBuffer.append(String.format("%-15s\t%6.4f\n", "kurtosis", ds.getKurtosis()));

    return outBuffer.toString();
  }
示例#6
0
  public void test() {
    SparseGraphBuilder builder = new SparseGraphBuilder();
    SparseGraph graph = builder.createGraph();

    SparseVertex v1 = builder.addVertex(graph);
    SparseVertex v2 = builder.addVertex(graph);
    SparseVertex v3 = builder.addVertex(graph);
    SparseVertex v4 = builder.addVertex(graph);
    SparseVertex v5 = builder.addVertex(graph);
    SparseVertex v6 = builder.addVertex(graph);

    builder.addEdge(graph, v1, v2);
    builder.addEdge(graph, v2, v3);
    builder.addEdge(graph, v3, v4);
    builder.addEdge(graph, v4, v5);
    builder.addEdge(graph, v5, v6);
    builder.addEdge(graph, v6, v1);
    builder.addEdge(graph, v2, v5);

    Degree degree = Degree.getInstance();

    DescriptiveStatistics stats = degree.statistics(graph.getVertices());
    assertEquals(2.33, stats.getMean(), 0.01);
    assertEquals(2.0, stats.getMin());
    assertEquals(3.0, stats.getMax());

    TObjectDoubleHashMap<? extends Vertex> values = degree.values(graph.getVertices());
    TObjectDoubleIterator<? extends Vertex> it = values.iterator();

    int count2 = 0;
    int count3 = 0;
    for (int i = 0; i < values.size(); i++) {
      it.advance();
      if (it.value() == 2) count2++;
      else if (it.value() == 3) count3++;
    }

    assertEquals(4, count2);
    assertEquals(2, count3);

    assertEquals(-0.166, degree.assortativity(graph), 0.001);
  }
  private void populateWell(Integer plateColumn, Integer plateRow, Well zStackWell) {
    List<String> readouts = TdsUtils.flattenReadoutNames(curPlateSelection);

    for (String readoutName : readouts) {

      DescriptiveStatistics descStats = new DescriptiveStatistics();

      for (Plate plate : curPlateSelection) {
        Well curWell = plate.getWell(plateColumn, plateRow);
        if (curWell == null) continue;

        Double readoutValue = curWell.getReadout(readoutName);
        if (readoutValue != null) {
          descStats.addValue(readoutValue);
        }
      }

      zStackWell.getWellStatistics().put(readoutName, descStats.getMean());
    }
  }
  private Integer getSkeletonCategoryFromCropper1979(
      Integer value, DescriptiveStatistics windowStats, Double criticalLevel) {
    Integer skeletonCategory = 0;

    if (criticalLevel == null) criticalLevel = 0.5;
    double mean = windowStats.getMean();
    double stdev = windowStats.getStandardDeviation();
    double smallRingThreshold = mean - (stdev * criticalLevel);
    int min = (int) windowStats.getMin();

    if (value == min) {
      skeletonCategory = 10;
    } else if (value > smallRingThreshold) {
      skeletonCategory = 0;
    } else {
      Integer range = (int) (smallRingThreshold - min);
      Integer categoryStepSize = range / 10;
      skeletonCategory = (int) (0 - ((value - smallRingThreshold) / categoryStepSize));
    }

    return skeletonCategory;
  }
 /**
  * Returns the mean throughput time
  *
  * @return double
  */
 public double getMeanThroughputTime() {
   return timeStats.getMean();
 }
示例#10
0
文件: Limit.java 项目: ning/pummel
  public Void call() throws Exception {
    List<String> urls = Lists.newArrayList();
    final BufferedReader in;
    if (urlFile != null) {
      in = new BufferedReader(new InputStreamReader(new FileInputStream(urlFile)));
    } else {
      in = new BufferedReader(new InputStreamReader(System.in));
    }
    for (String line = in.readLine(); line != null; line = in.readLine()) {
      if (maxRequests >= 0) {
        if (maxRequests == 0) {
          break;
        } else if (maxRequests > 0) {
          maxRequests--;
        }
      }
      urls.add(line);
    }

    if (labels) {
      System.out.printf("clients\ttp%.1f\tmean\treqs/sec\n", percentile);
    }

    int best_concurency = start;
    int concurrency = start;
    DescriptiveStatistics result;
    DescriptiveStatistics best_result = null;
    double reqs_per_sec;
    double res = 1;
    while ((result = new Fight(concurrency, urls).call()).getPercentile(percentile) < target) {
      res = result.getPercentile(percentile);
      reqs_per_sec = ((1000 / result.getMean()) * concurrency);
      System.out.printf("%d\t%.2f\t%.2f\t%.2f\n", concurrency, res, result.getMean(), reqs_per_sec);
      best_concurency = concurrency;
      best_result = result;
      concurrency = concurrency * 2;
    }
    reqs_per_sec = ((1000 / result.getMean()) * concurrency);
    System.out.printf(
        "%d\t%.2f\t%.2f\t%.2f\n",
        concurrency, result.getPercentile(percentile), result.getMean(), reqs_per_sec);

    int increment = (int) Math.sqrt((concurrency));
    concurrency = concurrency / 2;
    while ((result = new Fight(concurrency, urls).call()).getPercentile(percentile) < target) {
      res = result.getPercentile(percentile);
      reqs_per_sec = ((1000 / result.getMean()) * concurrency);
      System.out.printf("%d\t%.2f\t%.2f\t%.2f\n", concurrency, res, result.getMean(), reqs_per_sec);
      best_concurency = concurrency;
      best_result = result;
      concurrency += increment;
    }
    reqs_per_sec = ((1000 / result.getMean()) * concurrency);
    System.out.printf(
        "%d\t%.2f\t%.2f\t%.2f\n",
        concurrency, result.getPercentile(percentile), result.getMean(), reqs_per_sec);

    increment = (int) Math.sqrt(Math.sqrt(concurrency));
    concurrency = concurrency - (2 * increment);
    while ((result = new Fight(concurrency, urls).call()).getPercentile(percentile) < target) {
      res = result.getPercentile(percentile);
      reqs_per_sec = ((1000 / result.getMean()) * concurrency);
      System.out.printf("%d\t%.2f\t%.2f\t%.2f\n", concurrency, res, result.getMean(), reqs_per_sec);
      best_concurency = concurrency;
      best_result = result;
      concurrency += increment;
    }
    reqs_per_sec = ((1000 / result.getMean()) * concurrency);
    System.out.printf(
        "%d\t%.2f\t%.2f\t%.2f\n",
        concurrency, result.getPercentile(percentile), result.getMean(), reqs_per_sec);

    assert best_result != null;
    reqs_per_sec = ((1000 / best_result.getMean()) * best_concurency);
    System.out.printf(
        "%d\t%.2f\t%.2f\t%.2f\n",
        best_concurency,
        best_result.getPercentile(percentile),
        best_result.getMean(),
        reqs_per_sec);

    return null;
  }
  @Override
  public void notifyAfterMobsim(AfterMobsimEvent event) {
    Network network = event.getServices().getScenario().getNetwork();
    DescriptiveStatistics error = new DescriptiveStatistics();
    DescriptiveStatistics errorAbs = new DescriptiveStatistics();
    DescriptivePiStatistics errorWeighted = new WSMStatsFactory().newInstance();

    TDoubleArrayList errorVals = new TDoubleArrayList();
    TDoubleArrayList caps = new TDoubleArrayList();
    TDoubleArrayList speeds = new TDoubleArrayList();

    for (Count count : counts.getCounts().values()) {
      if (!count.getId().toString().startsWith(ODCalibrator.VIRTUAL_ID_PREFIX)) {
        double obsVal = 0;
        for (int i = 1; i < 25; i++) {
          obsVal += count.getVolume(i).getValue();
        }

        if (obsVal > 0) {
          double simVal = calculator.getOccupancy(count.getId());
          simVal *= factor;

          double err = (simVal - obsVal) / obsVal;

          error.addValue(err);
          errorAbs.addValue(Math.abs(err));
          errorWeighted.addValue(Math.abs(err), 1 / obsVal);

          Link link = network.getLinks().get(count.getId());
          errorVals.add(Math.abs(err));
          caps.add(link.getCapacity());
          speeds.add(link.getFreespeed());
        }
      }
    }

    logger.info(
        String.format(
            "Relative counts error: mean = %s, var = %s, stderr = %s, min = %s, max = %s",
            error.getMean(),
            error.getVariance(),
            error.getStandardDeviation(),
            error.getMin(),
            error.getMax()));
    logger.info(
        String.format(
            "Absolute relative counts error: mean = %s, var = %s, stderr = %s, min = %s, max = %s",
            errorAbs.getMean(),
            errorAbs.getVariance(),
            errorAbs.getStandardDeviation(),
            errorAbs.getMin(),
            errorAbs.getMax()));
    logger.info(
        String.format(
            "Absolute weigthed relative counts error: mean = %s, var = %s, stderr = %s, min = %s, max = %s",
            errorWeighted.getMean(),
            errorWeighted.getVariance(),
            errorWeighted.getStandardDeviation(),
            errorWeighted.getMin(),
            errorWeighted.getMax()));

    String outdir = event.getServices().getControlerIO().getIterationPath(event.getIteration());

    try {
      TDoubleDoubleHashMap map = Correlations.mean(caps.toArray(), errorVals.toArray());
      StatsWriter.writeHistogram(
          map, "capacity", "counts", String.format("%s/countsError.capacity.txt", outdir));

      map = Correlations.mean(speeds.toArray(), errorVals.toArray());
      StatsWriter.writeHistogram(
          map, "speed", "counts", String.format("%s/countsError.speed.txt", outdir));

      StatsWriter.writeHistogram(
          Histogram.createHistogram(error, new LinearDiscretizer(0.1), false),
          "Error",
          "Frequency",
          String.format("%s/countsError.hist.txt", outdir));
      StatsWriter.writeHistogram(
          Histogram.createHistogram(errorAbs, new LinearDiscretizer(0.1), false),
          "Error (absolute)",
          "Frequency",
          String.format("%s/countsErrorAbs.hist.txt", outdir));
      StatsWriter.writeHistogram(
          Histogram.createHistogram(errorWeighted, new LinearDiscretizer(0.1), true),
          "Error (weighted)",
          "Frequency",
          String.format("%s/countsErrorWeighted.hist.txt", outdir));

      CountsCompare2GeoJSON.write(calculator, counts, factor, network, outdir);
      NetworkLoad2GeoJSON.write(
          event.getServices().getScenario().getNetwork(),
          calculator,
          factor,
          outdir + "/network.json");
    } catch (Exception e) {
      e.printStackTrace();
    }

    String rootOutDir = event.getServices().getControlerIO().getOutputPath();
    boolean append = false;
    if (event.getIteration() > 0) {
      append = true;
    }
    writeErrorFile(error, String.format("%s/countsError.txt", rootOutDir), append);
    writeErrorFile(errorAbs, String.format("%s/countsAbsError.txt", rootOutDir), append);
  }
示例#12
0
  public void SummuarySimulation() {
    calculatePower();
    PrintStream Pout1 = null;
    PrintStream Pout2 = null;
    PrintStream Pout3 = null;
    PrintStream Pout4 = null;
    PrintStream Pout5 = null;
    PrintStream Pout6 = null;
    ChiSquaredDistribution chi = new ChiSquaredDistributionImpl(weight.length);
    try {
      Pout1 = new PrintStream(new BufferedOutputStream(new FileOutputStream("LogAP.txt")));
      Pout2 = new PrintStream(new BufferedOutputStream(new FileOutputStream("LNP.txt")));
      Pout3 = new PrintStream(new BufferedOutputStream(new FileOutputStream("LN.txt")));
      Pout4 = new PrintStream(new BufferedOutputStream(new FileOutputStream("Wald.txt")));
      Pout5 = new PrintStream(new BufferedOutputStream(new FileOutputStream("F.txt")));
      Pout6 = new PrintStream(new BufferedOutputStream(new FileOutputStream("LogDP.txt")));
    } catch (Exception E) {
      E.printStackTrace(System.err);
    }
    for (int i = 0; i < SimulationResults.size(); i++) {
      ArrayList PointStatistics = (ArrayList) SimulationResults.get(i);
      double[] LRT = new double[PointStatistics.size()];
      double[] WALD = new double[PointStatistics.size()];
      DescriptiveStatistics dsLRT = new DescriptiveStatisticsImpl();
      DescriptiveStatistics dsWALD = new DescriptiveStatisticsImpl();
      SimpleRegression sr = new SimpleRegression();
      for (int j = 0; j < PointStatistics.size(); j++) {
        PointMappingStatistic pms = (PointMappingStatistic) PointStatistics.get(j);
        double lod = pms.get_LOD() > 0 ? pms.get_LOD() : 0;
        double ln = pms.get_LN();
        double p = 0;
        try {
          p = chi.cumulativeProbability(ln);
        } catch (Exception E) {
          E.printStackTrace(System.err);
        }
        double logLOD = -1 * Math.log10(1 - p);
        Pout1.print(pms.get_logP_additive() + " ");
        Pout2.print(logLOD + " ");
        Pout3.print(pms.get_LN() + " ");
        Pout4.print(pms.get_wald() + " ");
        Pout5.print(pms.get_logP_F() + " ");
        Pout6.print(pms.get_logP_dominance() + " ");

        dsLRT.addValue(pms.get_LN());
        dsWALD.addValue(pms.get_wald());
        LRT[j] = pms.get_LN();
        WALD[j] = pms.get_wald();
        sr.addData(pms.get_LN(), pms.get_wald());
      }
      System.out.println(
          dsLRT.getMean()
              + " +- "
              + dsLRT.getStandardDeviation()
              + " "
              + dsWALD.getMean()
              + " +- "
              + dsWALD.getStandardDeviation()
              + " cor "
              + sr.getR());
      dsLRT.clear();
      dsWALD.clear();
      sr.clear();
      Pout1.println();
      Pout2.println();
      Pout3.println();
      Pout4.println();
      Pout5.println();
      Pout6.println();
    }
    Pout1.close();
    Pout2.close();
    Pout3.close();
    Pout4.close();
    Pout5.close();
    Pout6.close();
  }
示例#13
0
  public void run(String basePath) throws IOException {
    Logger webSeeLog =
        Util.getNewLogger(
            basePath + File.separatorChar + "WebSee_statistics.txt",
            "WebSee_statistics" + System.currentTimeMillis());
    webSeeLog.info("");
    webSeeLog.info("--------------------------------------------------------------------------");
    webSeeLog.info("Test case" + "\t" + "Rank" + "\t" + "Result set size" + "\t" + "Hit/Miss");

    BufferedReader br = new BufferedReader(new FileReader(basePath + "/description.txt"));
    String line;
    while ((line = br.readLine()) != null) {
      String[] lineSplit = line.split("\t");
      String id = lineSplit[1];
      if (id.equals("no seeding")) {
        System.out.println("skip");
        continue;
      }
      String testFolder = lineSplit[8];
      String xpath = lineSplit[7];
      try {
        WebSeeTestCase wstc = new WebSeeTestCase(basePath + "/" + testFolder);
        wstc.run(xpath, webSeeLog, this);
        wstc.cleanIntermediateFiles();
      } catch (Exception e) {
        System.err.println(testFolder);
        System.err.println(e.getMessage());
        e.printStackTrace();
      }
      // Compute WebSee statistics
      double mean = webSeeRankStats.getMean();
      double std = webSeeRankStats.getStandardDeviation();
      double median = webSeeRankStats.getPercentile(50);

      webSeeLog.info("");
      webSeeLog.info("\t\t\t\t\tResults so far");
      webSeeLog.info("\t\t\t\t\tMean rank = " + mean);
      webSeeLog.info("\t\t\t\t\tMedian rank = " + median);
      webSeeLog.info("\t\t\t\t\tStandard deviation rank = " + std);
    }
    br.close();

    // Compute WebSee statistics
    webSeeLog.info("");
    webSeeLog.info("-------- FINAL RESULTS --------");
    webSeeLog.info("");
    webSeeLog.info("Mean rank = " + webSeeRankStats.getMean());
    webSeeLog.info("Median rank = " + webSeeRankStats.getPercentile(50));
    webSeeLog.info("Standard deviation rank = " + webSeeRankStats.getStandardDeviation());
    webSeeLog.info("");
    webSeeLog.info("Mean result set size = " + webSeeResultSetSizeStats.getMean());
    webSeeLog.info("Median result set size = " + webSeeResultSetSizeStats.getPercentile(50));
    webSeeLog.info(
        "Standard deviation result set size = " + webSeeResultSetSizeStats.getStandardDeviation());
    webSeeLog.info("");
    webSeeLog.info("Mean distance = " + webSeeDistanceStats.getMean());
    webSeeLog.info("Median distance = " + webSeeDistanceStats.getPercentile(50));
    webSeeLog.info("Standard deviation distance = " + webSeeDistanceStats.getStandardDeviation());
    webSeeLog.info("");
    webSeeLog.info("Mean P1 time = " + webSeeP1TimeStats.getMean());
    webSeeLog.info("Mean P2 time = " + webSeeP2TimeStats.getMean());
    webSeeLog.info("Mean P3 time = " + webSeeP3TimeStats.getMean());
    webSeeLog.info("Mean P4 time = " + webSeeP4TimeStats.getMean());
    webSeeLog.info("Mean P5 time = " + webSeeP5TimeStats.getMean());
    webSeeLog.info("Mean Total time = " + webSeeTotalTimeStats.getMean());
  }
 /**
  * Get the mean of x values.
  *
  * @return x values means
  */
 public double getXMean() {
   return xData.getMean();
 }
 /**
  * Get the mean of y values.
  *
  * @return y values means
  */
 public double getYMean() {
   return yData.getMean();
 }
示例#16
0
  private void loadSociogramData(Collection<VertexRecord> records, SQLDumpReader sqlData) {
    logger.info("Loading sociogram data...");
    Map<String, VertexRecord> map = sqlData.getFullAlterKeyMappping(records);

    TObjectIntHashMap<Vertex> rawDegrees = new TObjectIntHashMap<Vertex>();
    for (Vertex v : proj.getVertices()) {
      rawDegrees.put(v, v.getNeighbours().size());
    }

    int edgecnt = 0;
    int doublecnt = 0;
    int egoEdge = 0;

    Set<Vertex> notOkVertices = new HashSet<Vertex>();
    Set<Vertex> okVertices = new HashSet<Vertex>();
    DescriptiveStatistics notOkStats = new DescriptiveStatistics();
    DescriptiveStatistics okStats = new DescriptiveStatistics();

    DescriptiveStatistics numDistr = new DescriptiveStatistics();
    DescriptiveStatistics numDistrNoZero = new DescriptiveStatistics();
    DescriptiveStatistics sizeDistr = new DescriptiveStatistics();

    TDoubleArrayList sizeValues = new TDoubleArrayList();
    TDoubleArrayList kSizeValues = new TDoubleArrayList();
    TDoubleArrayList numValues = new TDoubleArrayList();
    TDoubleArrayList numValues2 = new TDoubleArrayList();
    TDoubleArrayList kNumValues = new TDoubleArrayList();

    for (VertexRecord record : records) {
      if (record.isEgo) {
        List<Set<String>> cliques = sqlData.getCliques(record);
        numDistr.addValue(cliques.size());

        Vertex v = idMap.get(record.id);
        numValues.add(cliques.size());
        kNumValues.add(v.getNeighbours().size());

        if (!cliques.isEmpty()) numDistrNoZero.addValue(cliques.size());

        for (Set<String> clique : cliques) {
          sizeDistr.addValue(clique.size());
          sizeValues.add(clique.size());
          kSizeValues.add(rawDegrees.get(projMap.get(v)));
          numValues2.add(cliques.size());
          List<SocialSparseVertex> vertices = new ArrayList<SocialSparseVertex>(clique.size());
          for (String alter : clique) {
            VertexRecord r = map.get(record.egoSQLId + alter);
            if (r != null) {
              SocialSparseVertex vertex = idMap.get(r.id);
              if (vertex != null) {
                vertices.add(vertex);
              } else {
                logger.warn("Vertex not found.");
              }
            } else {
              logger.warn("Record not found.");
            }
          }

          for (int i = 0; i < vertices.size(); i++) {
            for (int j = i + 1; j < vertices.size(); j++) {
              SampledVertexDecorator<SocialSparseVertex> vProj1 = projMap.get(vertices.get(i));
              SampledVertexDecorator<SocialSparseVertex> vProj2 = projMap.get(vertices.get(j));
              if (!vProj1.isSampled() && !vProj2.isSampled()) {

                if (Math.random() < 0.62) {
                  SocialSparseEdge socialEdge =
                      builder.addEdge(graph, vertices.get(i), vertices.get(j));
                  if (socialEdge != null) {
                    projBuilder.addEdge(proj, vProj1, vProj2, socialEdge);
                    edgecnt++;

                    if (vProj1.isSampled() || vProj2.isSampled()) {
                      egoEdge++;
                      if (vProj1.isSampled()) notOkVertices.add(vProj1);
                      else notOkVertices.add(vProj2);
                    }

                  } else {
                    doublecnt++;
                    if (vProj1.isSampled()) okVertices.add(vProj1);
                    else if (vProj2.isSampled()) okVertices.add(vProj2);
                  }
                }
              }
            }
          }
        }
      }
    }

    for (Vertex v : okVertices) okStats.addValue(rawDegrees.get(v));

    for (Vertex v : notOkVertices) notOkStats.addValue(rawDegrees.get(v));
    try {

      TDoubleDoubleHashMap hist =
          Histogram.createHistogram(okStats, new LinearDiscretizer(1), false);
      StatsWriter.writeHistogram(
          hist,
          "k",
          "n",
          "/Users/jillenberger/Work/socialnets/data/ivt2009/11-2011/augmented/k_ok.txt");

      TDoubleDoubleHashMap hist2 =
          Histogram.createHistogram(notOkStats, new LinearDiscretizer(1), false);
      StatsWriter.writeHistogram(
          hist2,
          "k",
          "n",
          "/Users/jillenberger/Work/socialnets/data/ivt2009/11-2011/augmented/k_notok.txt");

      TDoubleDoubleHashMap ratio = new TDoubleDoubleHashMap();
      double[] keys = hist.keys();
      for (double k : keys) {
        double val1 = hist2.get(k);
        double val2 = hist.get(k);

        ratio.put(k, val1 / (val2 + val1));
      }
      StatsWriter.writeHistogram(
          ratio,
          "k",
          "p",
          "/Users/jillenberger/Work/socialnets/data/ivt2009/11-2011/augmented/k_ratio.txt");

      logger.info("Mean num of cliques: " + numDistrNoZero.getMean());
      logger.info("Mean size: " + sizeDistr.getMean());
      logger.info("Median num of cliques: " + StatUtils.percentile(numDistrNoZero.getValues(), 50));
      logger.info("Median size: " + StatUtils.percentile(sizeDistr.getValues(), 50));

      TDoubleDoubleHashMap histNum =
          Histogram.createHistogram(
              numDistrNoZero,
              FixedSampleSizeDiscretizer.create(numDistrNoZero.getValues(), 2, 20),
              true);
      Histogram.normalize(histNum);
      StatsWriter.writeHistogram(
          histNum,
          "num",
          "freq",
          "/Users/jillenberger/Work/socialnets/data/ivt2009/11-2011/augmented/numCliques.txt");

      TDoubleDoubleHashMap histSize =
          Histogram.createHistogram(
              sizeDistr, FixedSampleSizeDiscretizer.create(sizeDistr.getValues(), 2, 20), true);
      Histogram.normalize(histSize);
      StatsWriter.writeHistogram(
          histSize,
          "size",
          "freq",
          "/Users/jillenberger/Work/socialnets/data/ivt2009/11-2011/augmented/numPersons.txt");

      Discretizer discretizer =
          FixedSampleSizeDiscretizer.create(kSizeValues.toNativeArray(), 20, 20);
      TDoubleArrayList valuesX = new TDoubleArrayList();
      for (int i = 0; i < kSizeValues.size(); i++) {
        valuesX.add(discretizer.discretize(kSizeValues.get(i)));
      }

      Correlations.writeToFile(
          Correlations.mean(valuesX.toNativeArray(), sizeValues.toNativeArray()),
          "/Users/jillenberger/Work/socialnets/data/ivt2009/11-2011/augmented/size_k.txt",
          "k",
          "size");

      discretizer = FixedSampleSizeDiscretizer.create(kNumValues.toNativeArray(), 20, 20);
      valuesX = new TDoubleArrayList();
      for (int i = 0; i < kNumValues.size(); i++) {
        valuesX.add(discretizer.discretize(kNumValues.get(i)));
      }

      Correlations.writeToFile(
          Correlations.mean(valuesX.toNativeArray(), numValues.toNativeArray()),
          "/Users/jillenberger/Work/socialnets/data/ivt2009/11-2011/augmented/num_k.txt",
          "k",
          "n");

      Correlations.writeToFile(
          Correlations.mean(numValues2.toNativeArray(), sizeValues.toNativeArray()),
          "/Users/jillenberger/Work/socialnets/data/ivt2009/11-2011/augmented/size_num.txt",
          "num",
          "size");
    } catch (FileNotFoundException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (IOException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }
    logger.info(
        String.format("Inserted %1$s edges, %2$s edges already present.", edgecnt, doublecnt));
    logger.info(String.format("Inserted %1$s edges between at least one ego.", egoEdge));
  }