/** 先通过pipeLineId和DataMediaPairId在数据库里查找对应的tableStat,如果有,则增量更新对应的Table统计状态, 如果没有则将该数据插入 */ public void updateTableStat(TableStat stat) { Assert.assertNotNull(stat); int affect = tableStatDao.modifyTableStat(tableStatModelToDo(stat)); if (affect == 0) { tableStatDao.insertTableStat(tableStatModelToDo(stat)); } if (stat.getStartTime() != null && stat.getEndTime() != null) { if (statUnit <= 0) { insertBehaviorHistory(stat); } else { synchronized (tableStats) { // 插入历史数据表 TableStat old = tableStats.get(stat.getDataMediaPairId()); if (old != null) { // 合并数据 old.setInsertCount(stat.getInsertCount() + old.getInsertCount()); old.setUpdateCount(stat.getUpdateCount() + old.getUpdateCount()); old.setDeleteCount(stat.getDeleteCount() + old.getDeleteCount()); old.setFileCount(stat.getFileCount() + old.getFileCount()); old.setFileSize(stat.getFileSize() + old.getFileSize()); if (stat.getEndTime().after(old.getEndTime())) { old.setEndTime(stat.getEndTime()); } if (stat.getStartTime().before(stat.getStartTime())) { stat.setStartTime(stat.getStartTime()); } } else { tableStats.put(stat.getDataMediaPairId(), stat); } } } } }
/** 列出pairId下,start-end时间段下的tableStat, 首先从数据库中取出这一段时间所有数据,该数据都是根据end_time倒排序的, 每隔1分钟将这些数据分组 */ public Map<Long, BehaviorHistoryInfo> listTimelineBehaviorHistory( TimelineBehaviorHistoryCondition condition) { Assert.assertNotNull(condition); Map<Long, BehaviorHistoryInfo> behaviorHistoryInfos = new LinkedHashMap<Long, BehaviorHistoryInfo>(); List<TableHistoryStatDO> tableHistoryStatDOs = tableHistoryStatDao.listTimelineTableStat(condition); int size = tableHistoryStatDOs.size(); int k = size - 1; for (Long i = condition.getStart().getTime(); i <= condition.getEnd().getTime(); i += 60 * 1000) { BehaviorHistoryInfo behaviorHistoryInfo = new BehaviorHistoryInfo(); List<TableStat> tableStat = new ArrayList<TableStat>(); // 取出每个时间点i以内的数据,k是一个游标,每次遍历时前面已经取过了的数据就不用再遍历了 for (int j = k; j >= 0; --j) { if ((i - tableHistoryStatDOs.get(j).getEndTime().getTime() <= 60 * 1000) && (i - tableHistoryStatDOs.get(j).getEndTime().getTime() >= 0)) { tableStat.add(tableHistoryStatDOToModel(tableHistoryStatDOs.get(j))); k = j - 1; } // 如果不满足if条件,则后面的数据也不用再遍历 else { break; } } if (tableStat.size() > 0) { behaviorHistoryInfo.setItems(tableStat); behaviorHistoryInfos.put(i, behaviorHistoryInfo); } } return behaviorHistoryInfos; }
/** 列出对应同步任务下的统计信息 */ public List<TableStat> listTableStat(Long pipelineId) { Assert.assertNotNull(pipelineId); List<TableStatDO> tableStatDOs = tableStatDao.listTableStatsByPipelineId(pipelineId); List<TableStat> tableStats = new ArrayList<TableStat>(); for (TableStatDO tableStatDO : tableStatDOs) { tableStats.add(tableStatDOToModel(tableStatDO)); } return tableStats; }
public List<PipelineNodeRelationDO> listByNodeId(Long nodeId) { Assert.assertNotNull(nodeId); return (List<PipelineNodeRelationDO>) getSqlMapClientTemplate().queryForList("listRelationsByNodeId", nodeId); }
public List<PipelineNodeRelationDO> listByPipelineIds(Long... pipelineId) { Assert.assertNotNull(pipelineId); return (List<PipelineNodeRelationDO>) getSqlMapClientTemplate().queryForList("listRelationsByPipelineIds", pipelineId); }
public void update(PipelineNodeRelationDO pipelineNodeRelationDo) { Assert.assertNotNull(pipelineNodeRelationDo); getSqlMapClientTemplate().update("updatePipelineNodeRelation", pipelineNodeRelationDo); }
public void delete(Long pipelineNodeRelationId) { Assert.assertNotNull(pipelineNodeRelationId); getSqlMapClientTemplate().delete("deletePipelineNodeRelationById", pipelineNodeRelationId); }
public PipelineNodeRelationDO insert(PipelineNodeRelationDO pipelineNodeRelationDo) { Assert.assertNotNull(pipelineNodeRelationDo); getSqlMapClientTemplate().insert("insertPipelineNodeRelation", pipelineNodeRelationDo); return pipelineNodeRelationDo; }