private static String replaceScript(JobHistory history, String script) { if (StringUtils.isEmpty(history.getStatisEndTime()) || StringUtils.isEmpty(history.getTimezone())) { return script; } script = script.replace("${j_set}", history.getStatisEndTime()); try { script = script.replace( "${j_est}", DateUtil.string2Timestamp(history.getStatisEndTime(), history.getTimezone()) / 1000 + ""); } catch (ParseException e) { e.printStackTrace(); } return script; }
private static List<Job> parseJobs( JobContext jobContext, ApplicationContext applicationContext, JobBean jobBean, List<Processer> ps, JobHistory history, String workDir) { List<Job> jobs = new ArrayList<Job>(); Map<String, String> map = jobContext.getProperties().getAllProperties(); Map<String, String> newmap = new HashMap<String, String>(); try { for (String key : map.keySet()) { String value = map.get(key); if (value != null) { if (StringUtils.isNotEmpty(history.getStatisEndTime()) && StringUtils.isNotEmpty(history.getTimezone())) { value = value.replace("${j_set}", history.getStatisEndTime()); value = value.replace( "${j_est}", DateUtil.string2Timestamp(history.getStatisEndTime(), history.getTimezone()) / 1000 + ""); map.put(key, value); } newmap.put("${" + key + "}", value); } } } catch (ParseException e) { Log.warn("parse job end time to timestamp failed", e); } for (Processer p : ps) { String config = p.getConfig(); if (config != null && !"".equals(config.trim())) { for (String key : newmap.keySet()) { String old = ""; do { old = config; String value = newmap.get(key).replace("\"", "\\\""); config = config.replace(key, value); } while (!old.equals(config)); } p.parse(config); } if (p instanceof DownloadProcesser) { jobs.add(new DownloadJob(jobContext)); } else if (p instanceof ZooKeeperProcesser) { ZooKeeperProcesser zkp = (ZooKeeperProcesser) p; if (!zkp.getUseDefault()) { jobs.add(new ZooKeeperJob(jobContext, (ZooKeeperProcesser) p, applicationContext)); } } else if (p instanceof MailProcesser) { jobs.add(new MailJob(jobContext, (MailProcesser) p, applicationContext)); } else if (p instanceof WangWangProcesser) { jobs.add(new WangWangJob(jobContext)); } else if (p instanceof OutputCheckProcesser) { jobs.add(new OutputCheckJob(jobContext, (OutputCheckProcesser) p, applicationContext)); } else if (p instanceof OutputCleanProcesser) { jobs.add(new OutputCleanJob(jobContext, (OutputCleanProcesser) p, applicationContext)); } else if (p instanceof HiveProcesser) { jobs.add(new HiveProcesserJob(jobContext, (HiveProcesser) p, applicationContext)); } else if (p instanceof JobProcesser) { Integer depth = (Integer) jobContext.getData("depth"); if (depth == null) { depth = 0; } if (depth < 2) { // job 的递归深度控制,防止无限递归 JobProcesser jobProcesser = (JobProcesser) p; GroupManager groupManager = (GroupManager) applicationContext.getBean("groupManager"); JobBean jb = groupManager.getUpstreamJobBean(jobProcesser.getJobId()); if (jb != null) { for (String key : jobProcesser.getKvConfig().keySet()) { if (jobProcesser.getKvConfig().get(key) != null) { jb.getJobDescriptor().getProperties().put(key, jobProcesser.getKvConfig().get(key)); } } File direcotry = new File(workDir + File.separator + "job-processer-" + jobProcesser.getJobId()); if (!direcotry.exists()) { direcotry.mkdirs(); } JobContext sub = new JobContext(jobContext.getRunType()); sub.putData("depth", ++depth); Job job = createJob(sub, jb, history, direcotry.getAbsolutePath(), applicationContext); jobs.add(job); } } else { jobContext.getJobHistory().getLog().appendZeus("递归的JobProcesser处理单元深度过大,停止递归"); } } } return jobs; }