private Duration estimateIntervalStep(String interval) {
   Duration step;
   switch (interval) {
     case "minute":
       step = Minutes.ONE.toStandardDuration();
       break;
     case "hour":
       step = Hours.ONE.toStandardDuration();
       break;
     case "day":
       step = Days.ONE.toStandardDuration();
       break;
     case "week":
       step = Weeks.ONE.toStandardDuration();
       break;
     case "month":
       step = Days.days(31).toStandardDuration();
       break;
     case "quarter":
       step = Days.days(31 * 3).toStandardDuration();
       break;
     case "year":
       step = Days.days(365).toStandardDuration();
       break;
     default:
       throw new IllegalArgumentException("Invalid duration specified: " + interval);
   }
   return step;
 }
  /**
   * Defines the magnitudes that can be added to the timestamp
   *
   * @param token token of form "[number][magnitude]" (ex. "1d")
   * @return integer indicating the magnitude of the date for the calendar system
   */
  public static ReadablePeriod getTimePeriod(String token) {
    String valString = token.substring(0, token.length() - 1);
    int value = Integer.parseInt(valString);
    char mag = token.charAt(token.length() - 1);

    ReadablePeriod period;

    switch (mag) {
      case 's':
        period = Seconds.seconds(value);
        break;
      case 'm':
        period = Minutes.minutes(value);
        break;
      case 'h':
        period = Hours.hours(value);
        break;
      case 'd':
        period = Days.days(value);
        break;
      case 'M':
        period = Months.months(value);
        break;
      case 'y':
        period = Years.years(value);
        break;
      default:
        logger.warn("Invalid date magnitude: {}. Defaulting to seconds.", mag);
        period = Seconds.seconds(value);
        break;
    }

    return period;
  }
  private void when_all_jobs_within_X_days_are_executed(int days) {
    Date date = DateTime.now().plus(Days.days(days)).toDate();
    Job job = managementService.createJobQuery().duedateLowerThan(date).singleResult();

    assertThat(job, notNullValue());
    managementService.executeJob(job.getId());
  }
Пример #4
0
 public void setRawDataAgeLimit(int rawDataAgeLimit) {
   if (rawDataAgeLimit > RAW_DATA_AGE_LIMIT_MAX) {
     throw new IllegalArgumentException(
         "The requested limit, "
             + rawDataAgeLimit
             + ", exceeds the max age "
             + "limit of "
             + RAW_DATA_AGE_LIMIT_MAX);
   }
   this.rawDataAgeLimit = Days.days(rawDataAgeLimit);
 }
Пример #5
0
  public List<Days> getWorkingDaysAsJodaTimeDays() {

    List<Days> jodaWorkingDays = new ArrayList<Days>();

    List<WeekDay> workingDaysAsWeekDays = getWorkingDays();
    for (WeekDay weekDay : workingDaysAsWeekDays) {

      Days jodaWeekDay =
          Days.days(WeekDay.getJodaDayOfWeekThatMatchesMifosWeekDay(weekDay.getValue()));

      jodaWorkingDays.add(jodaWeekDay);
    }

    return jodaWorkingDays;
  }
Пример #6
0
 @Override
 protected Days fromNumber(final int number) throws ConversionException {
   return Days.days(number);
 }
Пример #7
0
 @Override
 public Days unmarshal(String v) throws Exception {
   return Days.days(Integer.parseInt(v));
 }
Пример #8
0
/** @author John Sanda */
public class MetricsServer {

  private final Log log = LogFactory.getLog(MetricsServer.class);

  private static final int RAW_DATA_AGE_LIMIT_MAX = 5;

  private DateTimeService dateTimeService = new DateTimeService();

  private MetricsDAO dao;

  private MetricsConfiguration configuration;

  private ListeningExecutorService tasks =
      MoreExecutors.listeningDecorator(
          Executors.newCachedThreadPool(new StorageClientThreadFactory("MetricsServerTasks")));

  private InvalidMetricsManager invalidMetricsManager;

  private AggregationManager aggregationManager;

  private Days rawDataAgeLimit =
      Days.days(
          Math.min(3, Integer.parseInt(System.getProperty("rhq.metrics.data.age-limit", "3"))));

  public void setDAO(MetricsDAO dao) {
    this.dao = dao;
  }

  public void setConfiguration(MetricsConfiguration configuration) {
    this.configuration = configuration;
  }

  public void setDateTimeService(DateTimeService dateTimeService) {
    this.dateTimeService = dateTimeService;
  }

  public int getRawDataAgeLimit() {
    return rawDataAgeLimit.getDays();
  }

  public void setRawDataAgeLimit(int rawDataAgeLimit) {
    if (rawDataAgeLimit > RAW_DATA_AGE_LIMIT_MAX) {
      throw new IllegalArgumentException(
          "The requested limit, "
              + rawDataAgeLimit
              + ", exceeds the max age "
              + "limit of "
              + RAW_DATA_AGE_LIMIT_MAX);
    }
    this.rawDataAgeLimit = Days.days(rawDataAgeLimit);
  }

  public void setIndexPartitions(int indexPartitions) {
    configuration.setIndexPartitions(indexPartitions);
  }

  public void init() {
    aggregationManager = new AggregationManager(dao, dateTimeService, configuration);
    invalidMetricsManager = new InvalidMetricsManager(dateTimeService, dao);
  }

  /** A test hook */
  InvalidMetricsManager getInvalidMetricsManager() {
    return invalidMetricsManager;
  }

  public AggregationManager getAggregationManager() {
    return aggregationManager;
  }

  public void shutdown() {
    aggregationManager.shutdown();
    invalidMetricsManager.shutdown();
  }

  public RawNumericMetric findLatestValueForResource(int scheduleId) {
    log.debug("Querying for most recent raw metrics for [scheduleId: " + scheduleId + "]");
    return dao.findLatestRawMetric(scheduleId);
  }

  public Iterable<MeasurementDataNumericHighLowComposite> findDataForResource(
      int scheduleId, long beginTime, long endTime, int numberOfBuckets) {
    Stopwatch stopwatch = new Stopwatch().start();
    try {
      DateTime begin = new DateTime(beginTime);

      if (dateTimeService.isInRawDataRange(begin)) {
        Iterable<RawNumericMetric> metrics = dao.findRawMetrics(scheduleId, beginTime, endTime);
        return createRawComposites(metrics, beginTime, endTime, numberOfBuckets);
      }

      List<AggregateNumericMetric> metrics = null;
      if (dateTimeService.isIn1HourDataRange(begin)) {
        metrics = dao.findAggregateMetrics(scheduleId, Bucket.ONE_HOUR, beginTime, endTime);
        return createComposites(metrics, beginTime, endTime, numberOfBuckets);
      } else if (dateTimeService.isIn6HourDataRange(begin)) {
        metrics = dao.findAggregateMetrics(scheduleId, Bucket.SIX_HOUR, beginTime, endTime);
        return createComposites(metrics, beginTime, endTime, numberOfBuckets);
      } else if (dateTimeService.isIn24HourDataRange(begin)) {
        metrics = dao.findAggregateMetrics(scheduleId, Bucket.TWENTY_FOUR_HOUR, beginTime, endTime);
        return createComposites(metrics, beginTime, endTime, numberOfBuckets);
      } else {
        throw new IllegalArgumentException(
            "beginTime[" + beginTime + "] is outside the accepted range.");
      }
    } finally {
      stopwatch.stop();
      if (log.isDebugEnabled()) {
        log.debug(
            "Finished calculating resource summary aggregate in "
                + stopwatch.elapsed(TimeUnit.MILLISECONDS)
                + " ms");
      }
    }
  }

  public List<MeasurementDataNumericHighLowComposite> findDataForGroup(
      List<Integer> scheduleIds, long beginTime, long endTime, int numberOfBuckets) {
    if (log.isDebugEnabled()) {
      log.debug(
          "Querying for metric data using parameters [scheduleIds: "
              + scheduleIds
              + ", beingTime: "
              + beginTime
              + ", endTime: "
              + endTime
              + ", numberOfBuckets: "
              + numberOfBuckets
              + "]");
    }

    DateTime begin = new DateTime(beginTime);
    if (dateTimeService.isInRawDataRange(begin)) {
      Iterable<RawNumericMetric> metrics = dao.findRawMetrics(scheduleIds, beginTime, endTime);
      return createRawComposites(metrics, beginTime, endTime, numberOfBuckets);
    }
    Bucket bucket = getBucket(begin);
    List<AggregateNumericMetric> metrics = loadMetrics(scheduleIds, beginTime, endTime, bucket);

    return createComposites(metrics, beginTime, endTime, numberOfBuckets);
  }

  public AggregateNumericMetric getSummaryAggregate(int scheduleId, long beginTime, long endTime) {
    Stopwatch stopwatch = new Stopwatch().start();
    try {
      DateTime begin = new DateTime(beginTime);

      if (dateTimeService.isInRawDataRange(begin)) {
        Iterable<RawNumericMetric> metrics = dao.findRawMetrics(scheduleId, beginTime, endTime);
        return calculateAggregatedRaw(metrics, beginTime);
      }

      Bucket bucket = getBucket(begin);
      List<AggregateNumericMetric> metrics =
          dao.findAggregateMetrics(scheduleId, bucket, beginTime, endTime);
      return calculateAggregate(metrics, beginTime, bucket);
    } finally {
      stopwatch.stop();
      if (log.isDebugEnabled()) {
        log.debug(
            "Finished calculating resource summary aggregate for [scheduleId: "
                + scheduleId
                + ", beginTime: "
                + beginTime
                + ", endTime: "
                + endTime
                + "] in "
                + stopwatch.elapsed(TimeUnit.MILLISECONDS)
                + " ms");
      }
    }
  }

  public ListenableFuture<AggregateNumericMetric> getSummaryAggregateAsync(
      int scheduleId, long beginTime, long endTime) {
    long start = System.currentTimeMillis();
    try {
      if (log.isDebugEnabled()) {
        log.debug(
            "Calculating resource summary aggregate (async) for [scheduleId: "
                + scheduleId
                + ", beginTime: "
                + beginTime
                + ", endTime: "
                + endTime
                + "]");
      }
      DateTime begin = new DateTime(beginTime);
      StorageResultSetFuture queryFuture;

      if (dateTimeService.isInRawDataRange(begin)) {
        queryFuture = dao.findRawMetricsAsync(scheduleId, beginTime, endTime);
        return Futures.transform(queryFuture, new ComputeRawAggregate(beginTime));
      }
      Bucket bucket = getBucket(begin);
      queryFuture = dao.findAggregateMetricsAsync(scheduleId, bucket, beginTime, endTime);

      return Futures.transform(queryFuture, new ComputeAggregate(beginTime, bucket));
    } finally {
      long end = System.currentTimeMillis();
      if (log.isDebugEnabled()) {
        log.debug(
            "Finished calculating resource summary aggregate (async) in " + (end - start) + " ms");
      }
    }
  }

  public AggregateNumericMetric getSummaryAggregate(
      List<Integer> scheduleIds, long beginTime, long endTime) {
    Stopwatch stopwatch = new Stopwatch().start();
    try {
      DateTime begin = new DateTime(beginTime);

      if (dateTimeService.isInRawDataRange(new DateTime(beginTime))) {
        Iterable<RawNumericMetric> metrics = dao.findRawMetrics(scheduleIds, beginTime, endTime);
        return calculateAggregatedRaw(metrics, beginTime);
      }
      Bucket bucket = getBucket(begin);
      List<AggregateNumericMetric> metrics = loadMetrics(scheduleIds, beginTime, endTime, bucket);

      return calculateAggregate(metrics, beginTime, bucket);
    } finally {
      stopwatch.stop();
      if (log.isDebugEnabled()) {
        log.debug(
            "Finished calculating group summary aggregate for [scheduleIds: "
                + scheduleIds
                + ", beginTime: "
                + beginTime
                + ", endTime: "
                + endTime
                + "] in "
                + stopwatch.elapsed(TimeUnit.MILLISECONDS)
                + " ms");
      }
    }
  }

  private List<AggregateNumericMetric> loadMetrics(
      List<Integer> scheduleIds, long begin, long end, Bucket bucket) {
    List<StorageResultSetFuture> futures =
        new ArrayList<StorageResultSetFuture>(scheduleIds.size());
    for (Integer scheduleId : scheduleIds) {
      futures.add(dao.findAggregateMetricsAsync(scheduleId, bucket, begin, end));
    }
    ListenableFuture<List<ResultSet>> resultSetsFuture = Futures.successfulAsList(futures);
    try {
      List<ResultSet> resultSets = resultSetsFuture.get();
      AggregateNumericMetricMapper mapper = new AggregateNumericMetricMapper();
      List<AggregateNumericMetric> metrics = new ArrayList<AggregateNumericMetric>();
      for (ResultSet resultSet : resultSets) {
        metrics.addAll(mapper.mapAll(resultSet));
      }
      return metrics;
    } catch (Exception e) {
      log.warn(
          "There was an error while fetching "
              + bucket
              + " data for {scheduleIds: "
              + scheduleIds
              + ", beginTime: "
              + begin
              + ", endTime: "
              + end
              + "}",
          e);
      return Collections.emptyList();
    }
  }

  protected Bucket getBucket(DateTime begin) {
    Bucket bucket;
    if (dateTimeService.isIn1HourDataRange(begin)) {
      bucket = Bucket.ONE_HOUR;
    } else if (dateTimeService.isIn6HourDataRange(begin)) {
      bucket = Bucket.SIX_HOUR;
    } else if (dateTimeService.isIn24HourDataRange(begin)) {
      bucket = Bucket.TWENTY_FOUR_HOUR;
    } else {
      throw new IllegalArgumentException(
          "beginTime[" + begin.getMillis() + "] is outside the accepted range.");
    }
    return bucket;
  }

  private List<MeasurementDataNumericHighLowComposite> createRawComposites(
      Iterable<RawNumericMetric> metrics, long beginTime, long endTime, int numberOfBuckets) {
    Buckets buckets = new Buckets(beginTime, endTime, numberOfBuckets);
    for (RawNumericMetric metric : metrics) {
      buckets.insert(
          metric.getTimestamp(), metric.getValue(), metric.getValue(), metric.getValue());
    }

    List<MeasurementDataNumericHighLowComposite> data =
        new ArrayList<MeasurementDataNumericHighLowComposite>();
    for (int i = 0; i < buckets.getNumDataPoints(); ++i) {
      Buckets.Bucket bucket = buckets.get(i);
      data.add(
          new MeasurementDataNumericHighLowComposite(
              bucket.getStartTime(), bucket.getAvg(), bucket.getMax(), bucket.getMin()));
    }
    return data;
  }

  private List<MeasurementDataNumericHighLowComposite> createComposites(
      Iterable<AggregateNumericMetric> metrics, long beginTime, long endTime, int numberOfBuckets) {

    Buckets buckets = new Buckets(beginTime, endTime, numberOfBuckets);
    for (AggregateNumericMetric metric : metrics) {
      if (invalidMetricsManager.isInvalidMetric(metric)) {
        log.warn(
            "The "
                + metric.getBucket()
                + " metric "
                + metric
                + " is invalid. It will be excluded from "
                + "the results sent to the client and we will attempt to recompute the metric.");
        invalidMetricsManager.submit(metric);
      } else {
        buckets.insert(metric.getTimestamp(), metric.getAvg(), metric.getMin(), metric.getMax());
      }
    }

    List<MeasurementDataNumericHighLowComposite> data =
        new ArrayList<MeasurementDataNumericHighLowComposite>();
    for (int i = 0; i < buckets.getNumDataPoints(); ++i) {
      Buckets.Bucket bucket = buckets.get(i);
      data.add(
          new MeasurementDataNumericHighLowComposite(
              bucket.getStartTime(), bucket.getAvg(), bucket.getMax(), bucket.getMin()));
    }
    return data;
  }

  public void addNumericData(
      final Set<MeasurementDataNumeric> dataSet, final RawDataInsertedCallback callback) {
    if (log.isDebugEnabled()) {
      log.debug("Inserting " + dataSet.size() + " raw metrics");
    }
    final Stopwatch stopwatch = new Stopwatch().start();
    final AtomicInteger remainingInserts = new AtomicInteger(dataSet.size());

    for (final MeasurementDataNumeric data : dataSet) {
      DateTime collectionTimeSlice =
          dateTimeService.getTimeSlice(
              new DateTime(data.getTimestamp()), configuration.getRawTimeSliceDuration());
      Days days = Days.daysBetween(collectionTimeSlice, dateTimeService.now());

      if (days.isGreaterThan(rawDataAgeLimit)) {
        log.info(
            data
                + " is older than the raw data age limit of "
                + rawDataAgeLimit.getDays()
                + " days. It will not be stored.");
      } else {
        StorageResultSetFuture rawFuture = dao.insertRawData(data);
        StorageResultSetFuture indexFuture =
            dao.updateIndex(IndexBucket.RAW, collectionTimeSlice.getMillis(), data.getScheduleId());
        ListenableFuture<List<ResultSet>> insertsFuture =
            Futures.successfulAsList(rawFuture, indexFuture);
        Futures.addCallback(
            insertsFuture,
            new FutureCallback<List<ResultSet>>() {
              @Override
              public void onSuccess(List<ResultSet> result) {
                callback.onSuccess(data);
                if (remainingInserts.decrementAndGet() == 0) {
                  stopwatch.stop();
                  if (log.isDebugEnabled()) {
                    log.debug(
                        "Finished inserting "
                            + dataSet.size()
                            + " raw metrics in "
                            + stopwatch.elapsed(TimeUnit.MILLISECONDS)
                            + " ms");
                  }
                  callback.onFinish();
                }
              }

              @Override
              public void onFailure(Throwable t) {
                if (log.isDebugEnabled()) {
                  log.debug(
                      "An error occurred while inserting raw data", ThrowableUtil.getRootCause(t));
                } else {
                  log.warn(
                      "An error occurred while inserting raw data: "
                          + ThrowableUtil.getRootMessage(t));
                }
                callback.onFailure(t);
              }
            },
            tasks);
      }
    }
  }

  /**
   * Computes and stores aggregates for all buckets that are ready to be aggregated. This includes
   * raw, 1hr, 6hr, and 24hr data.
   *
   * @return One hour aggregates. That is, any raw data that has been rolled up into onr one hour
   *     aggregates. The one hour aggregates are returned because they are needed for subsequently
   *     computing baselines.
   */
  public Iterable<AggregateNumericMetric> calculateAggregates() {
    return aggregationManager.run();
  }

  private AggregateNumericMetric calculateAggregatedRaw(
      Iterable<RawNumericMetric> rawMetrics, long timestamp) {
    double min = Double.NaN;
    double max = min;
    int count = 0;
    ArithmeticMeanCalculator mean = new ArithmeticMeanCalculator();
    double value;

    for (RawNumericMetric metric : rawMetrics) {
      value = metric.getValue();
      if (count == 0) {
        min = value;
        max = min;
      }
      if (value < min) {
        min = value;
      } else if (value > max) {
        max = value;
      }
      mean.add(value);
      ++count;
    }

    // We let the caller handle setting the schedule id because in some cases we do
    // not care about it.
    return new AggregateNumericMetric(
        0, Bucket.ONE_HOUR, mean.getArithmeticMean(), min, max, timestamp);
  }

  private AggregateNumericMetric calculateAggregate(
      Iterable<AggregateNumericMetric> metrics, long timestamp, Bucket bucket) {
    double min = Double.NaN;
    double max = min;
    int count = 0;
    ArithmeticMeanCalculator mean = new ArithmeticMeanCalculator();

    for (AggregateNumericMetric metric : metrics) {
      if (count == 0) {
        min = metric.getMin();
        max = metric.getMax();
      }
      if (metric.getMin() < min) {
        min = metric.getMin();
      }
      if (metric.getMax() > max) {
        max = metric.getMax();
      }
      mean.add(metric.getAvg());
      ++count;
    }

    // We let the caller handle setting the schedule id because in some cases we do
    // not care about it.
    return new AggregateNumericMetric(0, bucket, mean.getArithmeticMean(), min, max, timestamp);
  }
}
Пример #9
0
 private Date daysAgo(final int days) {
   return new LocalDate().minus(Days.days(days)).toDate();
 }
 @Override
 protected ReadablePeriod getPeriodSecondLevel() {
   return Days.days(1);
 }
 @Override
 protected ReadablePeriod getPeriodFirstLevel() {
   return Days.days(7);
 }
 private long createIntervalPeriod() {
   return Days.days(1).toStandardDuration().getMillis();
 }