private List<AnomalyResult> getExistingAnomalies() { List<AnomalyResult> results = new ArrayList<>(); Session session = sessionFactory.openSession(); try { ManagedSessionContext.bind(session); Transaction transaction = session.beginTransaction(); try { // The ones for this function results.addAll( resultDAO.findAllByCollectionTimeAndFunction( collection, windowStart, windowEnd, anomalyFunction.getSpec().getId())); // The ones for any related functions List<AnomalyFunctionRelation> relations = relationDAO.findByParent(anomalyFunction.getSpec().getId()); for (AnomalyFunctionRelation relation : relations) { results.addAll( resultDAO.findAllByCollectionTimeAndFunction( collection, windowStart, windowEnd, relation.getChildId())); } transaction.commit(); } catch (Exception e) { transaction.rollback(); e.printStackTrace(); } } finally { session.close(); ManagedSessionContext.unbind(sessionFactory); } return results; }
private void handleResults(List<AnomalyResult> results) { Session session = sessionFactory.openSession(); try { ManagedSessionContext.bind(session); Transaction transaction = session.beginTransaction(); try { for (AnomalyResult result : results) { // Properties that always come from the function spec AnomalyFunctionSpec spec = anomalyFunction.getSpec(); result.setFunctionId(spec.getId()); result.setFunctionType(spec.getType()); result.setFunctionProperties(spec.getProperties()); result.setCollection(spec.getCollection()); result.setMetric(spec.getMetric()); result.setFilters(spec.getFilters()); // make sure score and weight are valid numbers result.setScore(normalize(result.getScore())); result.setWeight(normalize(result.getWeight())); resultDAO.createOrUpdate(result); } transaction.commit(); } catch (Exception e) { transaction.rollback(); throw new RuntimeException(e); } } finally { session.close(); ManagedSessionContext.unbind(sessionFactory); } }
private List<AnomalyResult> exploreCombination(TimeSeriesRequest request) throws Exception { LOG.info("Exploring {}", request); List<AnomalyResult> results = null; // Query server TimeSeriesResponse response; try { LOG.debug("Executing {}", request); response = timeSeriesHandler.handle(request); } catch (Exception e) { throw new JobExecutionException(e); } Map<DimensionKey, MetricTimeSeries> res = timeSeriesResponseConverter.toMap(response, collectionDimensions); for (Map.Entry<DimensionKey, MetricTimeSeries> entry : res.entrySet()) { if (entry.getValue().getTimeWindowSet().size() < 2) { LOG.warn("Insufficient data for {} to run anomaly detection function", entry.getKey()); continue; } try { // Run algorithm DimensionKey dimensionKey = entry.getKey(); MetricTimeSeries metricTimeSeries = entry.getValue(); LOG.info( "Analyzing anomaly function with dimensionKey: {}, windowStart: {}, windowEnd: {}", dimensionKey, windowStart, windowEnd); results = anomalyFunction.analyze( dimensionKey, metricTimeSeries, windowStart, windowEnd, knownAnomalies); // Handle results handleResults(results); // Remove any known anomalies results.removeAll(knownAnomalies); LOG.info( "{} has {} anomalies in window {} to {}", entry.getKey(), results.size(), windowStart, windowEnd); anomalyCounter += results.size(); } catch (Exception e) { LOG.error("Could not compute for {}", entry.getKey(), e); } } return results; }