public AnomalyDetectionJobManager(
      Scheduler quartzScheduler,
      AnomalyFunctionSpecDAO specDAO,
      AnomalyFunctionRelationDAO relationDAO,
      AnomalyResultDAO resultDAO,
      SessionFactory sessionFactory,
      MetricRegistry metricRegistry,
      AnomalyFunctionFactory anomalyFunctionFactory,
      FailureEmailConfiguration failureEmailConfig) {

    this.queryCache = CACHE_REGISTRY_INSTANCE.getQueryCache();

    timeSeriesHandler = new TimeSeriesHandler(queryCache);
    timeSeriesResponseConverter = TimeSeriesResponseConverter.getInstance();

    this.quartzScheduler = quartzScheduler;
    this.specDAO = specDAO;
    this.relationDAO = relationDAO;
    this.resultDAO = resultDAO;
    this.sessionFactory = sessionFactory;
    this.metricRegistry = metricRegistry;
    this.sync = new Object();
    this.scheduledJobKeys = new HashMap<>();
    this.anomalyFunctionFactory = anomalyFunctionFactory;
    this.failureEmailConfig = failureEmailConfig;
  }
  private List<AnomalyResult> exploreCombination(TimeSeriesRequest request) throws Exception {
    LOG.info("Exploring {}", request);
    List<AnomalyResult> results = null;

    // Query server
    TimeSeriesResponse response;
    try {
      LOG.debug("Executing {}", request);
      response = timeSeriesHandler.handle(request);
    } catch (Exception e) {
      throw new JobExecutionException(e);
    }
    Map<DimensionKey, MetricTimeSeries> res =
        timeSeriesResponseConverter.toMap(response, collectionDimensions);

    for (Map.Entry<DimensionKey, MetricTimeSeries> entry : res.entrySet()) {
      if (entry.getValue().getTimeWindowSet().size() < 2) {
        LOG.warn("Insufficient data for {} to run anomaly detection function", entry.getKey());
        continue;
      }

      try {
        // Run algorithm
        DimensionKey dimensionKey = entry.getKey();
        MetricTimeSeries metricTimeSeries = entry.getValue();
        LOG.info(
            "Analyzing anomaly function with dimensionKey: {}, windowStart: {}, windowEnd: {}",
            dimensionKey,
            windowStart,
            windowEnd);

        results =
            anomalyFunction.analyze(
                dimensionKey, metricTimeSeries, windowStart, windowEnd, knownAnomalies);

        // Handle results
        handleResults(results);

        // Remove any known anomalies
        results.removeAll(knownAnomalies);

        LOG.info(
            "{} has {} anomalies in window {} to {}",
            entry.getKey(),
            results.size(),
            windowStart,
            windowEnd);
        anomalyCounter += results.size();
      } catch (Exception e) {
        LOG.error("Could not compute for {}", entry.getKey(), e);
      }
    }
    return results;
  }
 public AnomalyDetectionTaskRunner() {
   queryCache = CACHE_REGISTRY_INSTANCE.getQueryCache();
   timeSeriesHandler = new TimeSeriesHandler(queryCache);
   timeSeriesResponseConverter = TimeSeriesResponseConverter.getInstance();
 }