@RequestMapping(value = "mrId", method = RequestMethod.GET)
  @ResponseStatus(HttpStatus.OK)
  @ResponseBody
  public Response mrId(
      @RequestParam(defaultValue = "") String clusterName,
      @RequestParam(defaultValue = "") String identifier,
      @RequestParam(defaultValue = "") String type) {
    Response response = new Response();
    EngineService engineService = getEngineService(clusterName);

    // FIXME type이 workflow일 경우 처리
    if ("task".equals(type)) {
      TaskHistoryRemoteService taskHistoryRemoteService =
          engineService.getTaskHistoryRemoteService();
      List<TaskHistory> taskHistory = taskHistoryRemoteService.selectByIdentifier(identifier);
      String[] idList =
          engineService
              .getDesignerRemoteService()
              .idList(taskHistory.get(0).getLogDirectory(), "hadoop.");

      if (idList != null && idList.length > 0) {
        for (String file : idList) {
          if (file.startsWith("hadoop.")) {
            Map<String, String> map = new HashMap<>();
            map.put("id", StringUtils.removePrefix(file, "hadoop.", true));
            response.getList().add(map);
          }
        }
      }
    }

    response.setSuccess(true);
    return response;
  }
  @RequestMapping(value = "/listVariablesHdfs", method = RequestMethod.POST)
  @ResponseStatus(HttpStatus.OK)
  public Response listVariablesHdfs(@RequestBody Map params) {

    Response response = new Response();
    try {
      String clusterName = params.get("clusterName").toString();
      EngineService engineService = this.getEngineService(clusterName);

      VisualService service = engineService.getVisualService();
      Map resultMap = service.listVariablesHdfs(params);

      if ((boolean) resultMap.get("success")) {
        response.setSuccess(true);
        response.getMap().putAll(resultMap);
      } else {
        response.setSuccess(false);
      }

    } catch (Exception ex) {
      response.setSuccess(false);
      response.getError().setMessage(ex.getMessage());
      if (ex.getCause() != null) response.getError().setCause(ex.getCause().getMessage());
      response.getError().setException(ExceptionUtils.getFullStackTrace(ex));
      logger.info(ex.toString());
    }
    return response;
  }
 @RequestMapping(value = "script", method = RequestMethod.GET)
 @ResponseStatus(HttpStatus.OK)
 @ResponseBody
 public Response getScript(
     @RequestParam(defaultValue = "") String clusterName,
     @RequestParam(defaultValue = "") String identifier,
     @RequestParam(defaultValue = "") String taskId) {
   Response response = new Response();
   EngineService engineService = getEngineService(clusterName);
   TaskHistoryRemoteService taskHistoryRemoteService = engineService.getTaskHistoryRemoteService();
   String script = taskHistoryRemoteService.getScript(identifier, taskId);
   response.setObject(script);
   response.setSuccess(true);
   return response;
 }
  /**
   * 파일을 업로드한다.
   *
   * @return REST Response JAXB Object
   */
  @RequestMapping(
      value = "/upload",
      method = RequestMethod.POST,
      consumes = {"multipart/form-data"})
  @ResponseStatus(HttpStatus.OK)
  public ResponseEntity<String> upload(HttpServletRequest req) throws IOException {
    Response response = new Response();

    if (!(req instanceof DefaultMultipartHttpServletRequest)) {
      response.setSuccess(false);
      response.getError().setCause("Invalid Request.");
      response.getError().setMessage("Invalid Request.");
      String json = new ObjectMapper().writeValueAsString(response);
      return new ResponseEntity(json, HttpStatus.BAD_REQUEST);
    }

    try {
      DefaultMultipartHttpServletRequest request = (DefaultMultipartHttpServletRequest) req;
      logger.debug(
          "Uploaded File >> Path : {}, Filename : {}, Size: {} bytes",
          new Object[] {
            request.getParameter("path"),
            request.getFile("file").getOriginalFilename(),
            request.getFile("file").getSize()
          });

      String clusterName = request.getParameter("clusterName");
      Map params = new HashMap();

      EngineService engineService = this.getEngineService(clusterName);
      VisualService service = engineService.getVisualService();
      Map resultMap = service.saveFile(request.getFile("file"), request.getParameter("options"));

      response.getMap().putAll(resultMap);
      response.setSuccess(true);
      String json = new ObjectMapper().writeValueAsString(response);
      HttpStatus statusCode = HttpStatus.OK;
      return new ResponseEntity(json, statusCode);
    } catch (Exception ex) {
      response.setSuccess(false);
      response.getError().setMessage(ex.getMessage());
      if (ex.getCause() != null) response.getError().setCause(ex.getCause().getMessage());
      response.getError().setException(ExceptionUtils.getFullStackTrace(ex));

      String json = new ObjectMapper().writeValueAsString(response);
      HttpStatus statusCode = HttpStatus.INTERNAL_SERVER_ERROR;

      logger.debug(ExceptionUtils.getFullStackTrace(ex));

      return new ResponseEntity(json, statusCode);
    }
  }
 @RequestMapping(value = "/task/get", method = RequestMethod.GET)
 @ResponseStatus(HttpStatus.OK)
 @ResponseBody
 public Response getTask(
     @RequestParam(defaultValue = "") String clusterName,
     @RequestParam(defaultValue = "") String identifier,
     @RequestParam(defaultValue = "") String taskId) {
   Response response = new Response();
   EngineService engineService = getEngineService(clusterName);
   TaskHistoryRemoteService taskHistoryRemoteService = engineService.getTaskHistoryRemoteService();
   TaskHistory history = new TaskHistory();
   history.setIdentifier(identifier);
   history.setTaskId(taskId);
   TaskHistory taskHistory = taskHistoryRemoteService.selectByTaskIdAndIdentifier(history);
   response.setObject(taskHistory);
   response.setSuccess(true);
   return response;
 }
  @RequestMapping(value = "timeseries", method = RequestMethod.GET)
  @ResponseStatus(HttpStatus.OK)
  public Response timeseries(@RequestParam String clusterName, @RequestParam String status) {
    Response response = new Response();
    response.setSuccess(true);
    int level = SessionUtils.getLevel();

    ApplicationContext applicationContext = ApplicationContextRegistry.getApplicationContext();
    JdbcTemplate jdbcTemplate = applicationContext.getBean(JdbcTemplate.class);
    String query = null;
    if (level != 1) { // 일반 사용자의 경우 자기것만 보여줘야 함
      if ("ALL".equals(status)) {
        query =
            "select (@row:=@row+1) as num, count(*) as sum, DATE_FORMAT(MAX(START_DATE),'%Y-%m-%d %H') as time, START_DATE from FL_WORKFLOW_HISTORY, (SELECT @row := 0) r WHERE USERNAME = '******' AND START_DATE > DATE_ADD(now(), INTERVAL -7 DAY) GROUP BY DATE_FORMAT(START_DATE,'%Y-%m-%d %H') ORDER BY START_DATE asc";
      } else if ("SUCCESS".equals(status)) {
        query =
            "select (@row:=@row+1) as num, count(*) as sum, DATE_FORMAT(MAX(START_DATE),'%Y-%m-%d %H') as time, 'SUCCESS' as type, START_DATE from FL_WORKFLOW_HISTORY, (SELECT @row := 0) r WHERE USERNAME = '******' AND STATUS = 'SUCCESS' AND START_DATE > DATE_ADD(now(), INTERVAL -7 DAY) GROUP BY DATE_FORMAT(START_DATE,'%Y-%m-%d %H') ORDER BY START_DATE asc";
      } else {
        query =
            "select (@row:=@row+1) as num, count(*) as sum, DATE_FORMAT(MAX(START_DATE),'%Y-%m-%d %H') as time, 'FAILED' as type, START_DATE from FL_WORKFLOW_HISTORY, (SELECT @row := 0) r WHERE USERNAME = '******' AND STATUS <> 'SUCCESS' AND START_DATE > DATE_ADD(now(), INTERVAL -7 DAY) GROUP BY DATE_FORMAT(START_DATE,'%Y-%m-%d %H') ORDER BY START_DATE asc";
      }
    } else {
      if ("ALL".equals(status)) {
        query =
            "select (@row:=@row+1) as num, count(*) as sum, DATE_FORMAT(MAX(START_DATE),'%Y-%m-%d %H') as time, START_DATE from FL_WORKFLOW_HISTORY, (SELECT @row := 0) r WHERE START_DATE > DATE_ADD(now(), INTERVAL -7 DAY) GROUP BY DATE_FORMAT(START_DATE,'%Y-%m-%d %H') ORDER BY START_DATE asc";
      } else if ("SUCCESS".equals(status)) {
        query =
            "select (@row:=@row+1) as num, count(*) as sum, DATE_FORMAT(MAX(START_DATE),'%Y-%m-%d %H') as time, 'SUCCESS' as type, START_DATE from FL_WORKFLOW_HISTORY, (SELECT @row := 0) r WHERE STATUS = 'SUCCESS' AND START_DATE > DATE_ADD(now(), INTERVAL -7 DAY) GROUP BY DATE_FORMAT(START_DATE,'%Y-%m-%d %H') ORDER BY START_DATE asc";
      } else {
        query =
            "select (@row:=@row+1) as num, count(*) as sum, DATE_FORMAT(MAX(START_DATE),'%Y-%m-%d %H') as time, 'FAILED' as type, START_DATE from FL_WORKFLOW_HISTORY, (SELECT @row := 0) r WHERE AND STATUS <> 'SUCCESS' AND START_DATE > DATE_ADD(now(), INTERVAL -7 DAY) GROUP BY DATE_FORMAT(START_DATE,'%Y-%m-%d %H') ORDER BY START_DATE asc";
      }
    }
    List<Map<String, Object>> list =
        jdbcTemplate.queryForList(MessageFormatter.format(query, clusterName).getMessage());
    response.getList().addAll(list);
    return response;
  }
  @RequestMapping(value = "kill", method = RequestMethod.POST)
  @ResponseStatus(HttpStatus.OK)
  @ResponseBody
  public Response jobKill(
      @RequestParam(defaultValue = "") String clusterName,
      @RequestParam(defaultValue = "") String identifier,
      @RequestParam(defaultValue = "") String type) {
    Response response = new Response();
    EngineConfig engineConfig = getEngineConfig(clusterName);
    EngineService engineService = getEngineService(clusterName);

    // FIXME type이 workflow일 경우 처리
    if ("task".equals(type)) {
      TaskHistoryRemoteService taskHistoryRemoteService =
          engineService.getTaskHistoryRemoteService();
      List<TaskHistory> taskHistory = taskHistoryRemoteService.selectByIdentifier(identifier);
      String[] idList =
          engineService
              .getDesignerRemoteService()
              .idList(taskHistory.get(0).getLogDirectory(), "app.");

      // applicationId가 없으면 워크플로우를 하둡에 던지기 전이고 또한 java, python, r 등의 모듈이라고 볼 수 있다. 따라서 RUNNIG 중인
      // 프로세스를 킬할 수 있다.
      if (idList != null && idList.length > 0) {
        for (String file : idList) {
          if (file.startsWith("app.")) {
            ResourceManagerRemoteService service = engineService.getResourceManagerRemoteService();
            service.killApplication(StringUtils.removePrefix(file, "app.", true), engineConfig);
            taskHistory.get(0).setStatus(State.FAILED.toString());
            taskHistoryRemoteService.updateByTaskIdAndIdentifier(taskHistory.get(0));
          }
        }
      } else if ("RUNNING".equals(taskHistory.get(0).getStatus())) {
        engineService.getDesignerRemoteService().killProccess(taskHistory.get(0).getLogDirectory());
        taskHistory.get(0).setStatus(State.FAILED.toString());
        taskHistoryRemoteService.updateByTaskIdAndIdentifier(taskHistory.get(0));
      }
    }

    response.setSuccess(true);
    return response;
  }
  /**
   * 지정한 조건의 워크플로우 실행 이력을 조회한다.
   *
   * @param status 상태코드
   * @param sort 정렬할 컬럼명
   * @param dir 정렬 방식(ASC, DESC)
   * @param start 시작 페이지
   * @param limit 페이지당 건수
   * @return 워크플로우 실행 이력 목록
   */
  @RequestMapping(value = "actions", method = RequestMethod.GET)
  @ResponseStatus(HttpStatus.OK)
  @ResponseBody
  public Response getActions(
      @RequestParam(defaultValue = "") String clusterName,
      @RequestParam(defaultValue = "") String identifier,
      @RequestParam(defaultValue = "ALL") String status,
      @RequestParam(defaultValue = "ID") String sort,
      @RequestParam(defaultValue = "DESC") String dir,
      @RequestParam(defaultValue = "0") int start,
      @RequestParam(defaultValue = "16") int limit) {

    Response response = new Response();
    EngineService engineService = getEngineService(clusterName);
    List<TaskHistory> taskHistories =
        engineService.getTaskHistoryRemoteService().selectByIdentifier(identifier);
    response.getList().addAll(taskHistories);
    response.setSuccess(true);
    return response;
  }
  @RequestMapping(value = "logs", method = RequestMethod.GET)
  @ResponseStatus(HttpStatus.OK)
  @ResponseBody
  public Response getLogs(
      @RequestParam(defaultValue = "") String clusterName,
      @RequestParam(defaultValue = "") String identifier,
      @RequestParam(defaultValue = "") String taskId,
      @RequestParam(defaultValue = "") String tabConditionKey) {
    Response response = new Response();
    EngineService engineService = getEngineService(clusterName);
    TaskHistoryRemoteService taskHistoryRemoteService = engineService.getTaskHistoryRemoteService();

    String log;
    String script;
    String command;
    String error;
    Map<String, Object> map = new HashMap<>();

    switch (tabConditionKey) {
      case "log":
        log = taskHistoryRemoteService.getTaskLog(identifier, taskId);
        map.put("log", log);
        break;
      case "script":
        script = taskHistoryRemoteService.getScript(identifier, taskId);
        map.put("script", script);
        break;
      case "command":
        command = taskHistoryRemoteService.getCommand(identifier, taskId);
        map.put("command", command);
        break;
      case "error":
        error = taskHistoryRemoteService.getError(identifier, taskId);
        map.put("error", error);
        break;
    }

    response.getMap().putAll(map);
    response.setSuccess(true);
    return response;
  }
  /**
   * Workflow Monitoring History 목록을 조회한다.
   *
   * @param clusterName 클러스터명
   * @param startDate 시작 날짜
   * @param endDate 마지막 날짜
   * @param status 워크플로우 작업 상태
   * @param workflowName 워크플로우명
   * @param jobType 워크플로우 작업 타입
   * @param page 페이지
   * @param start 시작 페이지
   * @param limit 조회 제한 개수
   * @param node 히스토리 목록이 속한 상위 노드 정보
   * @return Workflow History List
   */
  @RequestMapping(value = "/workflows", method = RequestMethod.GET)
  @ResponseStatus(HttpStatus.OK)
  public Response getWorkflows(
      @RequestParam(defaultValue = "") String clusterName,
      @RequestParam(defaultValue = "") String startDate,
      @RequestParam(defaultValue = "") String endDate,
      @RequestParam(defaultValue = "") String status,
      @RequestParam(defaultValue = "") String workflowName,
      @RequestParam(defaultValue = "") String jobType,
      @RequestParam(defaultValue = "0") int page,
      @RequestParam(defaultValue = "0") int start,
      @RequestParam(defaultValue = "16") int limit,
      @RequestParam(defaultValue = "") String node) {

    Response response = new Response();
    EngineService engineService = getEngineService(clusterName);
    WorkflowHistoryRemoteService workflowHistoryRemoteService =
        engineService.getWorkflowHistoryRemoteService();
    int level = SessionUtils.getLevel();
    String username = level == 1 ? "" : SessionUtils.getUsername();

    ArrayList<Map> arrayList = new ArrayList<>();

    List<WorkflowHistory> workflowHistories =
        workflowHistoryRemoteService.selectByCondition(
            startDate, endDate, start, limit, username, workflowName, status, "");
    for (WorkflowHistory workflowHistory : workflowHistories) {
      Map map = getNodeForWorkflow(workflowHistory, node);
      arrayList.add(map);
    }
    int total =
        workflowHistoryRemoteService.selectTotalCountByUsername(
            startDate, endDate, start, limit, username, workflowName, status, "");
    response.setTotal(total);

    response.setLimit(arrayList.size());
    response.getList().addAll(arrayList);
    response.setSuccess(true);
    return response;
  }
  @RequestMapping(value = "/task/log", method = RequestMethod.GET)
  @ResponseStatus(HttpStatus.OK)
  public Response getTaskLog(
      @RequestParam(defaultValue = "") String clusterName,
      @RequestParam(defaultValue = "") Long id) {
    EngineService engineService = getEngineService(clusterName);

    Response response = new Response();
    try {
      TaskHistory taskHistories = engineService.getTaskHistoryRemoteService().select(id);
      String filename = null;
      String task = taskHistories.getLogDirectory() + "/task.log";
      if (new File(task).exists() && new File(task).length() == 0) {
        String err = taskHistories.getLogDirectory() + "/err.log";
        if (new File(err).exists()) {
          filename = err;
        }
      } else {
        filename = task;
      }

      ByteArrayOutputStream baos = new ByteArrayOutputStream();
      FileUtils.copyFile(new File(filename), baos);
      response.getMap().put("text", new String(baos.toByteArray()));
      response.setSuccess(true);
    } catch (Exception ex) {
      // FIXME 여기 WholeBodyException을 수정해야하지 않을까??
      response.setSuccess(false);
      response.getError().setMessage("Unable to load a log file.");
      response.getError().setException(ExceptionUtils.getFullStackTrace(ex));
      if (ex.getCause() != null) response.getError().setCause(ex.getCause().getMessage());
    }
    return response;
  }
  /**
   * 지정한 조건의 top10을 조회한다.
   *
   * @param clusterName Hadoop Cluster명
   * @param searchType 조회 유형
   * @param startDate 시작 날짜
   * @param endDate 종료 날짜
   * @return top10 목록
   */
  @RequestMapping(value = "top10", method = RequestMethod.GET)
  @ResponseStatus(HttpStatus.OK)
  @ResponseBody
  public Response top10(
      @RequestParam(defaultValue = "") String clusterName,
      @RequestParam(defaultValue = "ACT") String searchType,
      @RequestParam(defaultValue = "") String startDate,
      @RequestParam(defaultValue = "") String endDate) {
    EngineService engineService = this.getEngineService(clusterName);
    FileSystemAuditRemoteService service = engineService.getFileSystemAuditRemoteService();
    int level = getSessionUserLevel();
    String username = level == 1 ? "" : getSessionUsername();

    Response response = new Response();
    List<Top10> top10s = service.auditTop10(startDate, endDate, searchType, username);

    response.getList().addAll(top10s);
    response.setTotal(top10s.size());
    response.setSuccess(true);

    return response;
  }
  /**
   * 지정한 조건의 파일 처리 이력을 조회한다.
   *
   * <p>auditConditionMap 처리 이력 조건 정보
   *
   * @return 파일 처리 목록
   */
  @RequestMapping(value = "list", method = RequestMethod.GET)
  @ResponseStatus(HttpStatus.OK)
  @ResponseBody
  public Response getAuditHistories(
      @RequestParam String clusterName,
      @RequestParam(defaultValue = "") String startDate,
      @RequestParam(defaultValue = "") String endDate,
      @RequestParam(defaultValue = "") String path,
      @RequestParam(defaultValue = "ALL") String auditType,
      @RequestParam(defaultValue = "0") int nextPage,
      @RequestParam(defaultValue = "10") int limit) {

    EngineService engineService = this.getEngineService(clusterName);
    FileSystemAuditRemoteService service = engineService.getFileSystemAuditRemoteService();
    int level = getSessionUserLevel();
    String username = level == 1 ? "" : getSessionUsername();
    int totalRows =
        service.getTotalCountOfAuditHistories(startDate, endDate, path, auditType, username);

    Map auditConditionMap = new HashMap();

    auditConditionMap.put("level", level);
    auditConditionMap.put("username", username);
    auditConditionMap.put("startDate", startDate);
    auditConditionMap.put("endDate", endDate);
    auditConditionMap.put("path", path);
    auditConditionMap.put("auditType", auditType);
    auditConditionMap.put("nextPage", nextPage);
    auditConditionMap.put("limit", limit);

    List<AuditHistory> auditHistories = service.getAuditHistories(auditConditionMap);

    Response response = new Response();
    response.getList().addAll(auditHistories);
    response.setTotal(totalRows);
    response.setSuccess(true);

    return response;
  }
  @RequestMapping(value = "/reloadData", method = RequestMethod.GET)
  @ResponseStatus(HttpStatus.OK)
  public Response reloadData(@RequestParam Map params) {
    Response response = new Response();
    try {
      String clusterName = params.get("clusterName").toString();
      EngineService engineService = this.getEngineService(clusterName);

      VisualService service = engineService.getVisualService();
      Map resultMap = service.reloadData(params);

      if ((boolean) resultMap.get("success")) {
        response.setSuccess(true);
        response.getMap().putAll(resultMap);
      } else {
        response.setSuccess(false);
      }

    } catch (IOException ex) {
      throw new ServiceException("You can not reload data.", ex);
    }
    return response;
  }
  @RequestMapping(value = "/task/list", method = RequestMethod.GET)
  @ResponseStatus(HttpStatus.OK)
  public Response getTasksOfJob(
      @RequestParam(defaultValue = "") String clusterName,
      @RequestParam(defaultValue = "") String sort,
      @RequestParam(defaultValue = "DESC") String dir,
      @RequestParam(defaultValue = "0") int page,
      @RequestParam(defaultValue = "0") int start,
      @RequestParam(defaultValue = "16") int limit,
      @RequestParam(defaultValue = "") String orderby,
      @RequestParam(defaultValue = "") String identifier) {

    EngineService engineService = getEngineService(clusterName);

    Response response = new Response();
    List<TaskHistory> taskHistories =
        engineService.getTaskHistoryRemoteService().selectByIdentifier(identifier);

    response.setLimit(taskHistories.size());
    response.getList().addAll(taskHistories);
    response.setSuccess(true);
    return response;
  }
  /**
   * 선택한 시작 날짜 및 종료 날짜에 해당하는 Audit Log를 조회한다.
   *
   * @param clusterName Hadoop Cluster명
   * @param searchType 조회 유형
   * @param startDate 시작 날짜 (yyyy-MM-dd HH)
   * @param endDate 종료 날짜 (yyyy-MM-dd HH)
   * @return trend 목록
   */
  @RequestMapping(value = "trend", method = RequestMethod.GET)
  @ResponseStatus(HttpStatus.OK)
  @ResponseBody
  public Response trend(
      @RequestParam(defaultValue = "") String clusterName,
      @RequestParam(defaultValue = "ACT") String searchType,
      @RequestParam(defaultValue = "") String startDate,
      @RequestParam(defaultValue = "") String endDate) {

    EngineService engineService = this.getEngineService(clusterName);
    FileSystemAuditRemoteService service = engineService.getFileSystemAuditRemoteService();
    int level = getSessionUserLevel();
    String username = level == 1 ? "" : getSessionUsername();

    SimpleDateFormat hoursFormat = new SimpleDateFormat("yyyy-MM-dd HH");
    SimpleDateFormat daysFormat = new SimpleDateFormat("yyyy-MM-dd");

    Calendar calendar = Calendar.getInstance();
    List<Trend> trendList;
    Date startTime;
    Date endTime;

    try {
      if ("".equals(startDate) && "".equals(endDate)) {
        calendar.setTime(new Date());
        calendar.add(Calendar.HOUR, -12);
        trendList = trends(hoursFormat, calendar, Calendar.HOUR, 12);
      } else if ("".equals(startDate) && !"".equals(endDate)) {
        calendar.setTime(daysFormat.parse(endDate));
        calendar.add(Calendar.HOUR, -1);
        trendList = trends(hoursFormat, calendar, Calendar.HOUR, 24);
      } else {
        startTime = daysFormat.parse(startDate);
        calendar.setTime(new Date());
        endTime =
            (endDate.equals(""))
                ? daysFormat.parse(daysFormat.format(calendar.getTime()))
                : daysFormat.parse(endDate);
        long difference = (endTime.getTime() - startTime.getTime()) / (1000 * 60 * 60 * 24);
        calendar.setTime(startTime);
        calendar.add(Calendar.DATE, -1);
        trendList = trends(daysFormat, calendar, Calendar.DATE, (int) difference + 1);
        calendar.add(Calendar.DATE, 1);
      }
    } catch (ParseException e) {
      throw new ServiceException("Unable to parse the date.", e);
    }

    List<Trends> trendsList = service.auditTrend(startDate, endDate, searchType, username);
    HashMap<String, String> trendTitle = new HashMap<>();

    for (Trends trends : trendsList) {
      String trendsSearchType = trends.getSearchType();
      if (!trendTitle.containsKey(trendsSearchType)) {
        trendTitle.put(trendsSearchType, "data" + (trendTitle.size() + 1));
      }

      /** 날짜가 같은 필드에 데이터 삽입 */
      for (Trend trend : trendList) {
        if (trend.getTime().equals(trends.getTime())) {
          Integer position =
              Integer.parseInt(trendTitle.get(trendsSearchType).replaceAll("[^\\d]", ""));
          trend.setData(position, trend.getData(position) + trends.getCount());
        }
      }
    }

    Response response = new Response();
    response.getMap().putAll(trendTitle);
    response.getList().addAll(trendList);
    response.setTotal(trendList.size());
    response.setSuccess(true);

    return response;
  }