protected static boolean hasRepresentation(
     final String operation,
     Map<Report, List<Aggregation>> reportAggregationMap,
     final Class<?> representationClass,
     final Object... representationArgs) {
   return Projections.any(
       Projections.notNull(reportAggregationMap.values()),
       new Projections.Condition<List<Aggregation>>() {
         @Override
         public boolean accept(List<Aggregation> aggregations) {
           return Projections.any(
               Projections.notNull(aggregations),
               new Projections.Condition<Aggregation>() {
                 @Override
                 public boolean accept(Aggregation aggregation) {
                   OperationStats operationStats =
                       aggregation.totalStats.getOperationsStats().get(operation);
                   return operationStats != null
                       && operationStats.getRepresentation(representationClass, representationArgs)
                           != null;
                 }
               });
         }
       });
 }
 public int getMaxThreads(List<Aggregation> aggregations, final int slaveIndex) {
   Integer maxThreads =
       Projections.max(
           Projections.project(
               aggregations,
               new Projections.Func<Aggregation, Integer>() {
                 @Override
                 public Integer project(Aggregation aggregation) {
                   List<Statistics> statistics = aggregation.iteration.getStatistics(slaveIndex);
                   return statistics == null ? 0 : statistics.size();
                 }
               }));
   return maxThreads != null ? maxThreads : 0;
 }
  @Override
  public StageResult processAckOnMaster(List<DistStageAck> acks) {
    StageResult result = super.processAckOnMaster(acks);
    if (result.isError()) return result;

    log.info("--------------------");
    if (ramPercentage > 0) {
      if (stringData) {
        log.info(
            "Filled cache with String objects totaling "
                + Math.round(ramPercentage * 100)
                + "% of the Java heap");
      } else {
        log.info(
            "Filled cache with byte arrays totaling "
                + Math.round(ramPercentage * 100)
                + "% of the Java heap");
      }
    }
    if (ramPercentage < 0 && targetMemoryUse > 0) {
      if (stringData) {
        log.info("Filled cache with String objects totaling " + Utils.kbString(targetMemoryUse));
      } else {
        log.info("Filled cache with byte arrays totaling " + Utils.kbString(targetMemoryUse));
      }
    }
    if (valueCount > 0) {
      if (stringData) {
        log.info(
            "Filled cache with "
                + Utils.kbString((valueSize * 2) * valueCount)
                + " of "
                + valueSize
                + " character String objects");
      } else {
        log.info(
            "Filled cache with "
                + Utils.kbString(valueSize * valueCount)
                + " of "
                + Utils.kbString(valueSize)
                + " byte arrays");
      }
    }

    Report report = masterState.getReport();
    Report.Test test = report.createTest("Random_Data_Stage", null, true);
    int testIteration = test.getIterations().size();

    Map<Integer, Report.SlaveResult> nodeKeyCountsResult =
        new HashMap<Integer, Report.SlaveResult>();
    Map<Integer, Report.SlaveResult> nodeTargetMemoryUseResult =
        new HashMap<Integer, Report.SlaveResult>();
    Map<Integer, Report.SlaveResult> nodeCountOfWordsInDataResult =
        new HashMap<Integer, Report.SlaveResult>();
    Map<Integer, Report.SlaveResult> nodeBytesWritten = new HashMap<Integer, Report.SlaveResult>();

    long totalValues = 0;
    long totalBytes = 0;
    long totalNodeWordCount = 0;
    Map<String, Integer> clusterWordCount = new TreeMap<String, Integer>();
    for (DataInsertAck ack : Projections.instancesOf(acks, DataInsertAck.class)) {
      if (ack.wordCount != null) {
        for (Map.Entry<String, Integer> entry : ack.wordCount.entrySet()) {
          if (clusterWordCount.containsKey(entry.getKey())) {
            clusterWordCount.put(
                entry.getKey(), clusterWordCount.get(entry.getKey()) + entry.getValue());
          } else {
            clusterWordCount.put(entry.getKey(), entry.getValue());
          }
        }
      }

      nodeKeyCountsResult.put(
          ack.getSlaveIndex(), new Report.SlaveResult(Long.toString(ack.nodeKeyCount), false));
      nodeBytesWritten.put(
          ack.getSlaveIndex(), new Report.SlaveResult(Long.toString(ack.bytesWritten), false));
      test.addStatistics(
          testIteration, ack.getSlaveIndex(), Collections.singletonList(ack.nodePutStats));

      totalValues += ack.nodePutCount;
      totalBytes += ack.bytesWritten;
      String logInfo =
          "Slave "
              + ack.getSlaveIndex()
              + " wrote "
              + ack.nodePutCount
              + " values to the cache with a total size of "
              + Utils.kbString(ack.bytesWritten);
      if (ramPercentage > 0) {
        logInfo += "; targetMemoryUse = " + Utils.kbString(ack.targetMemoryUse);
        nodeTargetMemoryUseResult.put(
            ack.getSlaveIndex(), new Report.SlaveResult(Long.toString(ack.targetMemoryUse), false));
      }
      if (stringData) {
        logInfo += "; countOfWordsInData = " + ack.countOfWordsInData;
        nodeCountOfWordsInDataResult.put(
            ack.getSlaveIndex(),
            new Report.SlaveResult(Long.toString(ack.countOfWordsInData), false));
      }
      log.info(logInfo);
    }
    log.info(
        "The cache contains "
            + totalValues
            + " values with a total size of "
            + Utils.kbString(totalBytes));
    if (limitWordCount) {
      int totalWordCount = maxWordCount;
      if (!shareWords) {
        totalWordCount = maxWordCount * slaveState.getClusterSize();
      }
      log.info(
          "Up to "
              + totalWordCount
              + " words were generated with a maximum length of "
              + maxWordLength
              + " characters");
    }
    if (!clusterWordCount.isEmpty()) {
      log.info(clusterWordCount.size() + "words were actually generated");
      log.info("--------------------");
      log.info("Cluster wide word count:");
      for (String key : clusterWordCount.keySet()) {
        log.info("word: " + key + "; count: " + clusterWordCount.get(key));
      }
      // TODO Will this take too much memory?
      //         masterState.put(RANDOMDATA_CLUSTER_WORDCOUNT_KEY, clusterWordCount);
    }
    log.info("--------------------");

    masterState.put(RANDOMDATA_TOTALBYTES_KEY, totalBytes);
    test.addResult(
        testIteration,
        new Report.TestResult(
            "Kilobytes written per node", nodeBytesWritten, Utils.kbString(totalBytes), false));

    test.addResult(
        testIteration, new Report.TestResult("Key count per node", nodeKeyCountsResult, "", false));
    if (!nodeTargetMemoryUseResult.isEmpty()) {
      test.addResult(
          testIteration,
          new Report.TestResult(
              "Target memory use per node",
              nodeTargetMemoryUseResult,
              Utils.kbString(totalBytes),
              false));
    }
    if (!nodeCountOfWordsInDataResult.isEmpty()) {
      test.addResult(
          testIteration,
          new Report.TestResult(
              "Count of words in data per node",
              nodeCountOfWordsInDataResult,
              Long.toString(totalNodeWordCount),
              false));
    }

    return StageResult.SUCCESS;
  }
  protected boolean addToChart(
      ComparisonChart chart,
      String subCategory,
      String operation,
      ChartType chartType,
      Map<Report, List<Aggregation>> reportAggregationMap) {
    Map<String, List<Report>> byConfiguration =
        Projections.groupBy(
            reportAggregationMap.keySet(),
            new Projections.Func<Report, String>() {
              @Override
              public String project(Report report) {
                return report.getConfiguration().name;
              }
            });
    for (Map.Entry<Report, List<Aggregation>> entry : reportAggregationMap.entrySet()) {
      for (Aggregation aggregation : entry.getValue()) {
        OperationStats operationStats = aggregation.totalStats.getOperationsStats().get(operation);
        if (operationStats == null) return false;

        String categoryName = entry.getKey().getConfiguration().name;
        if (subCategory != null) {
          categoryName = String.format("%s, %s", categoryName, subCategory);
        }
        // if there are multiple reports for the same configuration (multiple clusters), use cluster
        // size in category
        if (byConfiguration.get(entry.getKey().getConfiguration().name).size() > 1) {
          categoryName =
              String.format("%s, size %d", categoryName, entry.getKey().getCluster().getSize());
        }

        double subCategoryNumeric;
        String subCategoryValue;
        if (maxIterations > 1) {
          subCategoryNumeric = aggregation.iteration.id;
          subCategoryValue =
              aggregation.iteration.getValue() != null
                  ? aggregation.iteration.getValue()
                  : String.valueOf(aggregation.iteration.id);
        } else {
          subCategoryNumeric = entry.getKey().getCluster().getSize();
          subCategoryValue = String.format("Size %.0f", subCategoryNumeric);
        }
        switch (chartType) {
          case MEAN_AND_DEV:
            {
              MeanAndDev meanAndDev = operationStats.getRepresentation(MeanAndDev.class);
              if (meanAndDev == null) return false;
              chart.addValue(
                  toMillis(meanAndDev.mean),
                  toMillis(meanAndDev.dev),
                  categoryName,
                  subCategoryNumeric,
                  subCategoryValue);
              break;
            }
          case OPERATION_THROUGHPUT_GROSS:
            {
              OperationThroughput throughput =
                  operationStats.getRepresentation(
                      OperationThroughput.class,
                      TimeUnit.MILLISECONDS.toNanos(
                          aggregation.totalStats.getEnd() - aggregation.totalStats.getBegin()));
              if (throughput == null) return false;
              chart.addValue(
                  throughput.gross, 0, categoryName, subCategoryNumeric, subCategoryValue);
              break;
            }
          case OPERATION_THROUGHPUT_NET:
            {
              OperationThroughput throughput =
                  operationStats.getRepresentation(
                      OperationThroughput.class,
                      TimeUnit.MILLISECONDS.toNanos(
                          aggregation.totalStats.getEnd() - aggregation.totalStats.getBegin()));
              if (throughput == null) return false;
              chart.addValue(throughput.net, 0, categoryName, subCategoryNumeric, subCategoryValue);
              break;
            }
          case DATA_THROUGHPUT:
            {
              DataThroughput dataThroughput =
                  operationStats.getRepresentation(
                      DataThroughput.class,
                      aggregation.totalThreads,
                      aggregation.totalStats.getEnd() - aggregation.totalStats.getBegin());
              if (dataThroughput == null) return false;
              chart.addValue(
                  dataThroughput.meanThroughput / (1024.0 * 1024.0),
                  dataThroughput.deviation / (1024.0 * 1024.0),
                  categoryName,
                  subCategoryNumeric,
                  subCategoryValue);
              break;
            }
        }
      }
    }
    return true;
  }