private int aggregateTimelineCandidates(
      final List<TimelineChunk> timelineChunkCandidates,
      final int aggregationLevel,
      final int chunksToAggregate) {
    final TimelineChunk firstCandidate = timelineChunkCandidates.get(0);
    final int sourceId = firstCandidate.getSourceId();
    final int metricId = firstCandidate.getMetricId();
    log.debug(
        "For sourceId {}, metricId {}, looking to aggregate {} candidates in {} chunks",
        new Object[] {sourceId, metricId, timelineChunkCandidates.size(), chunksToAggregate});
    int aggregatesCreated = 0;
    int chunkIndex = 0;
    while (timelineChunkCandidates.size() >= chunkIndex + chunksToAggregate) {
      final List<TimelineChunk> chunkCandidates =
          timelineChunkCandidates.subList(chunkIndex, chunkIndex + chunksToAggregate);
      chunkIndex += chunksToAggregate;
      timelineChunksCombined.addAndGet(chunksToAggregate);
      try {
        aggregateHostSampleChunks(chunkCandidates, aggregationLevel);
      } catch (IOException e) {
        log.error(
            String.format(
                "IOException aggregating {} chunks, sourceId %s, metricId %s, looking to aggregate %s candidates in %s chunks",
                new Object[] {
                  firstCandidate.getSourceId(),
                  firstCandidate.getMetricId(),
                  timelineChunkCandidates.size(),
                  chunksToAggregate
                }),
            e);
      }
      aggregatesCreated++;
    }

    return aggregatesCreated;
  }
  /**
   * The sequence of events is:
   *
   * <ul>
   *   <li>Build the aggregated TimelineChunk object, and save it, setting not_valid to true, and
   *       aggregation_level to 1. This means that it won't be noticed by any of the dashboard
   *       queries. The save operation returns the new timeline_times_id
   *   <li>Then, in a single transaction, update the aggregated TimelineChunk object to have
   *       not_valid = 0, and also delete the TimelineChunk objects that were the basis of the
   *       aggregation, and flush any TimelineChunks that happen to be in the cache.
   *       <p>
   *
   * @param timelineChunks the TimelineChunks to be aggregated
   */
  private void aggregateHostSampleChunks(
      final List<TimelineChunk> timelineChunks, final int aggregationLevel) throws IOException {
    final TimelineChunk firstTimesChunk = timelineChunks.get(0);
    final TimelineChunk lastTimesChunk = timelineChunks.get(timelineChunks.size() - 1);
    final int chunkCount = timelineChunks.size();
    final int sourceId = firstTimesChunk.getSourceId();
    final DateTime startTime = firstTimesChunk.getStartTime();
    final DateTime endTime = lastTimesChunk.getEndTime();
    final List<byte[]> timeParts = new ArrayList<byte[]>(chunkCount);
    try {
      final List<byte[]> sampleParts = new ArrayList<byte[]>(chunkCount);
      final List<Long> timelineChunkIds = new ArrayList<Long>(chunkCount);
      int sampleCount = 0;
      for (final TimelineChunk timelineChunk : timelineChunks) {
        timeParts.add(timelineChunk.getTimeBytesAndSampleBytes().getTimeBytes());
        sampleParts.add(timelineChunk.getTimeBytesAndSampleBytes().getSampleBytes());
        sampleCount += timelineChunk.getSampleCount();
        timelineChunkIds.add(timelineChunk.getChunkId());
      }
      final byte[] combinedTimeBytes = timelineCoder.combineTimelines(timeParts, sampleCount);
      final byte[] combinedSampleBytes = sampleCoder.combineSampleBytes(sampleParts);
      final int timeBytesLength = combinedTimeBytes.length;
      final int totalSize = 4 + timeBytesLength + combinedSampleBytes.length;
      log.debug(
          "For sourceId {}, aggregationLevel {}, aggregating {} timelines ({} bytes, {} samples): {}",
          new Object[] {
            firstTimesChunk.getSourceId(),
            firstTimesChunk.getAggregationLevel(),
            timelineChunks.size(),
            totalSize,
            sampleCount
          });
      timelineChunksBytesCreated.addAndGet(totalSize);
      final int totalSampleCount = sampleCount;
      final TimelineChunk chunk =
          new TimelineChunk(
              0,
              sourceId,
              firstTimesChunk.getMetricId(),
              startTime,
              endTime,
              combinedTimeBytes,
              combinedSampleBytes,
              totalSampleCount,
              aggregationLevel + 1,
              false,
              false);
      chunksToWrite.add(chunk);
      chunkIdsToInvalidateOrDelete.addAll(timelineChunkIds);
      timelineChunksQueuedForCreation.incrementAndGet();

      if (chunkIdsToInvalidateOrDelete.size() >= config.getMaxChunkIdsToInvalidateOrDelete()) {
        performWrites();
      }
    } catch (Exception e) {
      log.error(
          String.format(
              "Exception aggregating level %d, sourceId %d, metricId %d, startTime %s, endTime %s",
              aggregationLevel, sourceId, firstTimesChunk.getMetricId(), startTime, endTime),
          e);
    }
  }