public void runAdhocConfig(
      AnomalyFunctionSpec spec,
      String windowStartIsoString,
      String windowEndIsoString,
      String executionName)
      throws Exception, SchedulerException {
    AnomalyFunction anomalyFunction = anomalyFunctionFactory.fromSpec(spec);

    String triggerKey = String.format("file-based_anomaly_function_trigger_%s", executionName);
    Trigger trigger = TriggerBuilder.newTrigger().withIdentity(triggerKey).startNow().build();

    String jobKey = String.format("file-based_anomaly_function_job_%s", executionName);
    buildAndScheduleJob(
        jobKey, trigger, anomalyFunction, spec, windowStartIsoString, windowEndIsoString);
  }
  public void runAdHoc(Long id, String windowStartIsoString, String windowEndIsoString)
      throws Exception {
    synchronized (sync) {
      AnomalyFunctionSpec spec = specDAO.findById(id);
      if (spec == null) {
        throw new IllegalArgumentException("No function with id " + id);
      }
      AnomalyFunction anomalyFunction = anomalyFunctionFactory.fromSpec(spec);

      String triggerKey = String.format("ad_hoc_anomaly_function_trigger_%d", spec.getId());
      Trigger trigger = TriggerBuilder.newTrigger().withIdentity(triggerKey).startNow().build();

      String jobKey = String.format("ad_hoc_anomaly_function_job_%d", spec.getId());
      buildAndScheduleJob(
          jobKey, trigger, anomalyFunction, spec, windowStartIsoString, windowEndIsoString);
    }
  }
  public void start(Long id) throws Exception {
    synchronized (sync) {
      AnomalyFunctionSpec spec = specDAO.findById(id);
      if (spec == null) {
        throw new IllegalArgumentException("No function with id " + id);
      }
      AnomalyFunction anomalyFunction = anomalyFunctionFactory.fromSpec(spec);

      String triggerKey = String.format("scheduled_anomaly_function_trigger_%d", spec.getId());
      CronTrigger trigger =
          TriggerBuilder.newTrigger()
              .withIdentity(triggerKey)
              .withSchedule(CronScheduleBuilder.cronSchedule(spec.getCron()))
              .build();

      String jobKey = String.format("scheduled_anomaly_function_job_%d", spec.getId());
      scheduledJobKeys.put(id, jobKey);

      buildAndScheduleJob(jobKey, trigger, anomalyFunction, spec, null, null); // use schedule time
      // to determine
      // start/end
    }
  }
Пример #4
0
  public List<TaskResult> execute(TaskInfo taskInfo, TaskContext taskContext) throws Exception {

    List<TaskResult> taskResult = new ArrayList<>();
    LOG.info("Begin executing task {}", taskInfo);
    resultDAO = taskContext.getResultDAO();
    relationDAO = taskContext.getRelationDAO();
    sessionFactory = taskContext.getSessionFactory();
    anomalyFunctionFactory = taskContext.getAnomalyFunctionFactory();

    anomalyFunctionSpec = taskInfo.getAnomalyFunctionSpec();
    anomalyFunction = anomalyFunctionFactory.fromSpec(anomalyFunctionSpec);
    windowStart = taskInfo.getWindowStartTime();
    windowEnd = taskInfo.getWindowEndTime();

    // Compute metric function
    TimeGranularity timeGranularity =
        new TimeGranularity(
            anomalyFunctionSpec.getBucketSize(), anomalyFunctionSpec.getBucketUnit());
    // TODO put sum into the function config
    metricFunction = new MetricFunction(MetricAggFunction.SUM, anomalyFunctionSpec.getMetric());

    // Collection
    collection = anomalyFunctionSpec.getCollection();

    // Filters
    String filters = anomalyFunctionSpec.getFilters();

    LOG.info(
        "Running anomaly detection job with metricFunction: {}, collection: {}",
        metricFunction,
        collection);

    CollectionSchema collectionSchema = null;
    try {
      collectionSchema = CACHE_REGISTRY_INSTANCE.getCollectionSchemaCache().get(collection);
    } catch (Exception e) {
      LOG.error("Exception when reading collection schema cache", e);
    }
    collectionDimensions = collectionSchema.getDimensionNames();

    // Get existing anomalies for this time range
    knownAnomalies = getExistingAnomalies();

    // Seed request with top-level...
    TimeSeriesRequest topLevelRequest = new TimeSeriesRequest();
    topLevelRequest.setCollectionName(collection);
    List<MetricFunction> metricFunctions = Collections.singletonList(metricFunction);
    List<MetricExpression> metricExpressions = Utils.convertToMetricExpressions(metricFunctions);
    topLevelRequest.setMetricExpressions(metricExpressions);
    topLevelRequest.setAggregationTimeGranularity(timeGranularity);
    topLevelRequest.setStart(windowStart);
    topLevelRequest.setEnd(windowEnd);
    topLevelRequest.setEndDateInclusive(false);
    if (StringUtils.isNotBlank(filters)) {
      topLevelRequest.setFilterSet(ThirdEyeUtils.getFilterSet(filters));
    }
    String exploreDimension = taskInfo.getGroupByDimension();
    if (StringUtils.isNotBlank(exploreDimension)) {
      topLevelRequest.setGroupByDimensions(
          Collections.singletonList(taskInfo.getGroupByDimension()));
    }

    LOG.info(
        "Running anomaly detection job with windowStartProp: {}, windowEndProp: {}, metricExpressions: {}, timeGranularity: {}, windowStart: {}, windowEnd: {}",
        windowStart,
        windowEnd,
        metricExpressions,
        timeGranularity);

    List<AnomalyResult> results = exploreCombination(topLevelRequest);
    LOG.info("{} anomalies found in total", anomalyCounter);

    return taskResult;
  }