@RequestMapping(value = "timeseries", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) public Response timeseries(@RequestParam String clusterName, @RequestParam String status) { Response response = new Response(); response.setSuccess(true); int level = SessionUtils.getLevel(); ApplicationContext applicationContext = ApplicationContextRegistry.getApplicationContext(); JdbcTemplate jdbcTemplate = applicationContext.getBean(JdbcTemplate.class); String query = null; if (level != 1) { // 일반 사용자의 경우 자기것만 보여줘야 함 if ("ALL".equals(status)) { query = "select (@row:=@row+1) as num, count(*) as sum, DATE_FORMAT(MAX(START_DATE),'%Y-%m-%d %H') as time, START_DATE from FL_WORKFLOW_HISTORY, (SELECT @row := 0) r WHERE USERNAME = '******' AND START_DATE > DATE_ADD(now(), INTERVAL -7 DAY) GROUP BY DATE_FORMAT(START_DATE,'%Y-%m-%d %H') ORDER BY START_DATE asc"; } else if ("SUCCESS".equals(status)) { query = "select (@row:=@row+1) as num, count(*) as sum, DATE_FORMAT(MAX(START_DATE),'%Y-%m-%d %H') as time, 'SUCCESS' as type, START_DATE from FL_WORKFLOW_HISTORY, (SELECT @row := 0) r WHERE USERNAME = '******' AND STATUS = 'SUCCESS' AND START_DATE > DATE_ADD(now(), INTERVAL -7 DAY) GROUP BY DATE_FORMAT(START_DATE,'%Y-%m-%d %H') ORDER BY START_DATE asc"; } else { query = "select (@row:=@row+1) as num, count(*) as sum, DATE_FORMAT(MAX(START_DATE),'%Y-%m-%d %H') as time, 'FAILED' as type, START_DATE from FL_WORKFLOW_HISTORY, (SELECT @row := 0) r WHERE USERNAME = '******' AND STATUS <> 'SUCCESS' AND START_DATE > DATE_ADD(now(), INTERVAL -7 DAY) GROUP BY DATE_FORMAT(START_DATE,'%Y-%m-%d %H') ORDER BY START_DATE asc"; } } else { if ("ALL".equals(status)) { query = "select (@row:=@row+1) as num, count(*) as sum, DATE_FORMAT(MAX(START_DATE),'%Y-%m-%d %H') as time, START_DATE from FL_WORKFLOW_HISTORY, (SELECT @row := 0) r WHERE START_DATE > DATE_ADD(now(), INTERVAL -7 DAY) GROUP BY DATE_FORMAT(START_DATE,'%Y-%m-%d %H') ORDER BY START_DATE asc"; } else if ("SUCCESS".equals(status)) { query = "select (@row:=@row+1) as num, count(*) as sum, DATE_FORMAT(MAX(START_DATE),'%Y-%m-%d %H') as time, 'SUCCESS' as type, START_DATE from FL_WORKFLOW_HISTORY, (SELECT @row := 0) r WHERE STATUS = 'SUCCESS' AND START_DATE > DATE_ADD(now(), INTERVAL -7 DAY) GROUP BY DATE_FORMAT(START_DATE,'%Y-%m-%d %H') ORDER BY START_DATE asc"; } else { query = "select (@row:=@row+1) as num, count(*) as sum, DATE_FORMAT(MAX(START_DATE),'%Y-%m-%d %H') as time, 'FAILED' as type, START_DATE from FL_WORKFLOW_HISTORY, (SELECT @row := 0) r WHERE AND STATUS <> 'SUCCESS' AND START_DATE > DATE_ADD(now(), INTERVAL -7 DAY) GROUP BY DATE_FORMAT(START_DATE,'%Y-%m-%d %H') ORDER BY START_DATE asc"; } } List<Map<String, Object>> list = jdbcTemplate.queryForList(MessageFormatter.format(query, clusterName).getMessage()); response.getList().addAll(list); return response; }
@Override protected void executeActivity(final ProcessInstance instance) throws Exception { this.instance = instance; this.user = (User) instance.get("user"); Map variable = (Map) instance.get("variable"); String taskId = this.getTaskId(); Map local = (Map) variable.get("local"); if (local.containsKey(taskId)) { this.params = new TypedMap((Map) local.get(taskId)); } Map global = (Map) variable.get("global"); List parallelVectors = (List) variable.get("parallelVectors"); this.parallelVectors = parallelVectors; this.workflowVariables = new TypedMap(global); this.mergedParams = new TypedMap(); this.mergedParams.putAll(this.workflowVariables); this.mergedParams.putAll(this.params); // 게이트웨이 벡터일람으로부터 자신이 Async 인지 Sync 인지 판별한다. // 서비스 타스크의 인커밍 트랜젝션은 하나여야 한다. if (getIncomingTransitions().size() > 1) throw new Exception( "Incoming transaction of " + getTaskId() + " must have one. Current " + getIncomingTransitions().size() + " transactions occurred."); Transition transition = getIncomingTransitions().get(0); Activity sourceActivity = transition.getSourceActivity(); String sourceId = sourceActivity.getTracingTag(); for (int i = 0; i < parallelVectors.size(); i++) { Map parallelVector = (Map) parallelVectors.get(i); String fromNode = parallelVector.get("fromNode").toString(); String toNode = parallelVector.get("toNode").toString(); boolean parallel = (boolean) parallelVector.get("parallel"); if (fromNode.equals(sourceId) && toNode.equals(this.getTaskId())) { this.isAsync = parallel; } } /** 이 타스크가 서브플로우 타스크일경우 서브플로우 변수를 구하고, 고정값이 아닌 변수에 대해 현재의 워크플로우 변수를 overwrite 시킨 자료형을 구한다. */ if (this.getClass().getName().equals(SubflowTask.class.getName())) { this.isSubflow = true; Long treeId = Long.parseLong(getParams().getString("treeId")); this.subflowTreeId = treeId; // 서브플로우의 원본 전역변수를 구한다. ApplicationContext context = ApplicationContextRegistry.getApplicationContext(); WorkflowService workflowService = context.getBean(WorkflowService.class); Workflow workflow = workflowService.getByTreeId(treeId); this.subflowName = workflow.getWorkflowName(); Map subflowvars = getSubWorkflowVariables(workflow); Map subflowGlobal = (Map) subflowvars.get("global"); // UI 에서 입력한 서브플로우 변수를 결합가능 형태로 변경한다. Map subflowmap = new HashMap(); subflowmap.putAll(subflowGlobal); subflowmap.putAll(getGridSubflowVariables()); this.subflowParams = new TypedMap(subflowmap); // protect 가 걸리지 않은 변수에 한해 현재 워크플로우 변수를 오버라이딩 한다. Map subflowMergedmap = new HashMap(); subflowMergedmap.putAll(subflowmap); Set<String> parentvarkeys = this.workflowVariables.keySet(); for (String parentvarkey : parentvarkeys) { if (!isProtectedVariable(parentvarkey)) { subflowMergedmap.put(parentvarkey, this.workflowVariables.get(parentvarkey)); } } this.subflowMergedParams = new TypedMap(subflowMergedmap); } if (logger.isDebugEnabled()) { Set<String> names = params.keySet(); for (String name : names) { logger.debug("[Local Variable] {} = {}", name, params.get(name)); } Set keySet = workflowVariables.keySet(); for (Object key : keySet) { logger.debug("[Global Parameter] {} = {}", key, workflowVariables.get(key)); } } this.doExecute(instance, this.params); }