public static Job createJob( JobContext jobContext, JobBean jobBean, JobHistory history, String workDir, ApplicationContext applicationContext) { jobContext.setJobHistory(history); jobContext.setWorkDir(workDir); HierarchyProperties hp = jobBean.getHierarchyProperties(); if (history.getProperties() != null && !history.getProperties().isEmpty()) { history.getLog().appendZeus("This job hava instance configs:"); for (String key : history.getProperties().keySet()) { hp.setProperty(key, history.getProperties().get(key)); history.getLog().appendZeus(key + "=" + history.getProperties().get(key)); } } jobContext.setProperties(new RenderHierarchyProperties(hp)); List<Map<String, String>> resources = jobBean.getHierarchyResources(); String script = jobBean.getJobDescriptor().getScript(); /// *************************update run date 2014-09-18************** String dateStr = history.getJobId().substring(0, 16); System.out.println("Manual Job run date :" + dateStr); if (dateStr != null && dateStr.length() == 16) { script = RenderHierarchyProperties.render(script, dateStr); System.out.println("Manual Job script :" + script); } /// ********************************************************* // 处理脚本中的 资源引用 语句 if (jobBean.getJobDescriptor().getJobType().equals(JobRunType.Shell) || jobBean.getJobDescriptor().getJobType().equals(JobRunType.Hive)) { script = resolvScriptResource(resources, script, applicationContext); jobBean.getJobDescriptor().setScript(script); } jobContext.setResources(resources); if (dateStr != null && dateStr.length() == 16) { script = replace(jobContext.getProperties().getAllProperties(dateStr), script); } else { script = replace(jobContext.getProperties().getAllProperties(), script); } System.out.println("Manual Job last script :" + script); script = replaceScript(history, script); hp.setProperty(PropertyKeys.JOB_SCRIPT, script); /*// 添加宙斯标记属性,提供给云梯 hp.setProperty("hadoop.mapred.job.zues_id", "zeus_job_" + history.getJobId() + "_" + history.getId()); */ // 前置处理Job创建 List<Job> pres = parseJobs( jobContext, applicationContext, jobBean, jobBean.getJobDescriptor().getPreProcessers(), history, workDir); pres.add(0, new DownloadJob(jobContext)); // 后置处理Job创建 List<Job> posts = parseJobs( jobContext, applicationContext, jobBean, jobBean.getJobDescriptor().getPostProcessers(), history, workDir); posts.add(new ZooKeeperJob(jobContext, null, applicationContext)); // 核心处理Job创建 Job core = null; if (jobBean.getJobDescriptor().getJobType() == JobRunType.MapReduce) { core = new MapReduceJob(jobContext); } else if (jobBean.getJobDescriptor().getJobType() == JobRunType.Shell) { core = new HadoopShellJob(jobContext); } else if (jobBean.getJobDescriptor().getJobType() == JobRunType.Hive) { core = new HiveJob(jobContext, applicationContext); } Job job = new WithProcesserJob(jobContext, pres, posts, core, applicationContext); return job; }
@Override public Integer run() throws IOException { List<Job> jobs = new ArrayList<Job>(); for (Map<String, String> map : jobContext.getResources()) { if (map.get("uri") != null) { String name = map.get("name"); String uri = map.get("uri"); if (uri.startsWith("hdfs://")) { jobs.add( new DownloadHdfsFileJob( jobContext, jobContext.getWorkDir() + File.separator + name, uri.substring(7))); } else if (uri.startsWith("doc://")) { String fileId = uri.substring(uri.lastIndexOf('/') + 1); String script = map.get("zeus-doc-" + fileId); script = RenderHierarchyProperties.render(script); File f = new File(jobContext.getWorkDir() + File.separator + name); if (f.exists()) { log(name + "已存在,可能是重名或循环引用"); continue; } FileWriter w = null; try { w = new FileWriter(f); w.write(script); w.flush(); } catch (Exception e) { log(e); } finally { if (w != null) { w.close(); } } } else if (uri.startsWith("http://")) { if (name == null || name.trim().isEmpty()) { log("download from http error! name not specified!"); continue; } else if (!name.endsWith(".xml") && !name.endsWith(".txt")) { log(name + " is not allow to download"); continue; } HttpClient client = new HttpClient(); GetMethod getMethod = new GetMethod(uri); int statusCode = client.executeMethod(getMethod); if (statusCode != HttpStatus.SC_OK) { log("download from http error! code=" + statusCode); } else { BufferedReader in = null; try { in = new BufferedReader( new InputStreamReader( getMethod.getResponseBodyAsStream(), getMethod.getRequestCharSet())); StringBuffer sb = new StringBuffer(); String inputLine = null; while ((inputLine = in.readLine()) != null) { sb.append(inputLine); } String script = sb.toString(); if (script == null || script.trim().isEmpty()) { log(name + "为空"); continue; } script = RenderHierarchyProperties.render(script); File f = new File(jobContext.getWorkDir() + File.separator + name); if (f.exists()) { log(name + "已存在,可能是重名或循环引用"); continue; } FileWriter w = null; try { w = new FileWriter(f); w.write(script); w.flush(); } catch (Exception e) { log(e); } finally { if (w != null) { w.close(); } } } catch (Exception e) { log(e); } finally { if (in != null) { in.close(); } } } } } } Integer exitCode = 0; for (Job job : jobs) { try { exitCode = job.run(); } catch (Exception e) { jobContext.getJobHistory().getLog().appendZeusException(e); } } return exitCode; }