Esempio n. 1
0
 public static Job createDebugJob(
     JobContext jobContext,
     DebugHistory history,
     String workDir,
     ApplicationContext applicationContext) {
   jobContext.setDebugHistory(history);
   jobContext.setWorkDir(workDir);
   HierarchyProperties hp = new HierarchyProperties(new HashMap<String, String>());
   String script = history.getScript();
   List<Map<String, String>> resources = new ArrayList<Map<String, String>>();
   // 处理脚本中的 资源引用 语句
   script = resolvScriptResource(resources, script, applicationContext);
   jobContext.setResources(resources);
   hp.setProperty(PropertyKeys.JOB_SCRIPT, script);
   FileManager fileManager = (FileManager) applicationContext.getBean("fileManager");
   ProfileManager profileManager = (ProfileManager) applicationContext.getBean("profileManager");
   String owner = fileManager.getFile(history.getFileId()).getOwner();
   Profile profile = profileManager.findByUid(owner);
   if (profile != null && profile.getHadoopConf() != null) {
     for (String key : profile.getHadoopConf().keySet()) {
       hp.setProperty(key, profile.getHadoopConf().get(key));
     }
   }
   jobContext.setProperties(new RenderHierarchyProperties(hp));
   hp.setProperty("hadoop.mapred.job.zeus_id", "zeus_debug_" + history.getId());
   List<Job> pres = new ArrayList<Job>(1);
   pres.add(new DownloadJob(jobContext));
   Job core = null;
   if (history.getJobRunType() == JobRunType.Hive) {
     core = new HiveJob(jobContext, applicationContext);
   } else if (history.getJobRunType() == JobRunType.Shell) {
     core = new HadoopShellJob(jobContext);
   }
   Job job =
       new WithProcesserJob(jobContext, pres, new ArrayList<Job>(), core, applicationContext);
   return job;
 }
Esempio n. 2
0
  public static Job createJob(
      JobContext jobContext,
      JobBean jobBean,
      JobHistory history,
      String workDir,
      ApplicationContext applicationContext) {
    jobContext.setJobHistory(history);
    jobContext.setWorkDir(workDir);
    HierarchyProperties hp = jobBean.getHierarchyProperties();
    if (history.getProperties() != null && !history.getProperties().isEmpty()) {
      history.getLog().appendZeus("This job hava instance configs:");
      for (String key : history.getProperties().keySet()) {
        hp.setProperty(key, history.getProperties().get(key));
        history.getLog().appendZeus(key + "=" + history.getProperties().get(key));
      }
    }
    jobContext.setProperties(new RenderHierarchyProperties(hp));
    List<Map<String, String>> resources = jobBean.getHierarchyResources();
    String script = jobBean.getJobDescriptor().getScript();
    /// *************************update run date  2014-09-18**************
    String dateStr = history.getJobId().substring(0, 16);
    System.out.println("Manual Job run date :" + dateStr);
    if (dateStr != null && dateStr.length() == 16) {
      script = RenderHierarchyProperties.render(script, dateStr);
      System.out.println("Manual Job script :" + script);
    }
    /// *********************************************************
    // 处理脚本中的 资源引用 语句
    if (jobBean.getJobDescriptor().getJobType().equals(JobRunType.Shell)
        || jobBean.getJobDescriptor().getJobType().equals(JobRunType.Hive)) {
      script = resolvScriptResource(resources, script, applicationContext);
      jobBean.getJobDescriptor().setScript(script);
    }
    jobContext.setResources(resources);
    if (dateStr != null && dateStr.length() == 16) {
      script = replace(jobContext.getProperties().getAllProperties(dateStr), script);
    } else {
      script = replace(jobContext.getProperties().getAllProperties(), script);
    }
    System.out.println("Manual Job last script :" + script);
    script = replaceScript(history, script);
    hp.setProperty(PropertyKeys.JOB_SCRIPT, script);

    /*// 添加宙斯标记属性,提供给云梯
    		hp.setProperty("hadoop.mapred.job.zues_id",
    				"zeus_job_" + history.getJobId() + "_" + history.getId());
    */
    // 前置处理Job创建
    List<Job> pres =
        parseJobs(
            jobContext,
            applicationContext,
            jobBean,
            jobBean.getJobDescriptor().getPreProcessers(),
            history,
            workDir);
    pres.add(0, new DownloadJob(jobContext));
    // 后置处理Job创建
    List<Job> posts =
        parseJobs(
            jobContext,
            applicationContext,
            jobBean,
            jobBean.getJobDescriptor().getPostProcessers(),
            history,
            workDir);
    posts.add(new ZooKeeperJob(jobContext, null, applicationContext));
    // 核心处理Job创建
    Job core = null;
    if (jobBean.getJobDescriptor().getJobType() == JobRunType.MapReduce) {
      core = new MapReduceJob(jobContext);
    } else if (jobBean.getJobDescriptor().getJobType() == JobRunType.Shell) {
      core = new HadoopShellJob(jobContext);
    } else if (jobBean.getJobDescriptor().getJobType() == JobRunType.Hive) {
      core = new HiveJob(jobContext, applicationContext);
    }

    Job job = new WithProcesserJob(jobContext, pres, posts, core, applicationContext);

    return job;
  }