コード例 #1
0
ファイル: MysqlGroupManager.java プロジェクト: BestQu/zeus2
 @Override
 public void deleteJob(String user, String jobId) throws ZeusException {
   GroupBean root = getGlobeGroupBean();
   JobBean job = root.getAllSubJobBeans().get(jobId);
   if (!job.getDepender().isEmpty()) {
     List<String> deps = new ArrayList<String>();
     for (JobBean jb : job.getDepender()) {
       deps.add(jb.getJobDescriptor().getId());
     }
     throw new ZeusException("该Job正在被其他Job" + deps.toString() + "依赖,无法删除");
   }
   getHibernateTemplate()
       .delete(getHibernateTemplate().get(JobPersistence.class, Long.valueOf(jobId)));
 }
コード例 #2
0
ファイル: MysqlGroupManager.java プロジェクト: BestQu/zeus2
  @Override
  public GroupBean getDownstreamGroupBean(GroupBean parent) {
    if (parent.isDirectory()) {
      List<GroupDescriptor> children = getChildrenGroup(parent.getGroupDescriptor().getId());
      for (GroupDescriptor child : children) {
        GroupBean childBean = new GroupBean(child);
        getDownstreamGroupBean(childBean);
        childBean.setParentGroupBean(parent);
        parent.getChildrenGroupBeans().add(childBean);
      }
    } else {
      List<Tuple<JobDescriptor, JobStatus>> jobs =
          getChildrenJob(parent.getGroupDescriptor().getId());
      for (Tuple<JobDescriptor, JobStatus> tuple : jobs) {
        JobBean jobBean = new JobBean(tuple.getX(), tuple.getY());
        jobBean.setGroupBean(parent);
        parent.getJobBeans().put(tuple.getX().getId(), jobBean);
      }
    }

    return parent;
  }
コード例 #3
0
  public static Job createJob(
      JobContext jobContext,
      JobBean jobBean,
      JobHistory history,
      String workDir,
      ApplicationContext applicationContext) {
    jobContext.setJobHistory(history);
    jobContext.setWorkDir(workDir);
    HierarchyProperties hp = jobBean.getHierarchyProperties();
    if (history.getProperties() != null && !history.getProperties().isEmpty()) {
      history.getLog().appendZeus("This job hava instance configs:");
      for (String key : history.getProperties().keySet()) {
        hp.setProperty(key, history.getProperties().get(key));
        history.getLog().appendZeus(key + "=" + history.getProperties().get(key));
      }
    }
    jobContext.setProperties(new RenderHierarchyProperties(hp));
    List<Map<String, String>> resources = jobBean.getHierarchyResources();
    String script = jobBean.getJobDescriptor().getScript();
    /// *************************update run date  2014-09-18**************
    String dateStr = history.getJobId().substring(0, 16);
    System.out.println("Manual Job run date :" + dateStr);
    if (dateStr != null && dateStr.length() == 16) {
      script = RenderHierarchyProperties.render(script, dateStr);
      System.out.println("Manual Job script :" + script);
    }
    /// *********************************************************
    // 处理脚本中的 资源引用 语句
    if (jobBean.getJobDescriptor().getJobType().equals(JobRunType.Shell)
        || jobBean.getJobDescriptor().getJobType().equals(JobRunType.Hive)) {
      script = resolvScriptResource(resources, script, applicationContext);
      jobBean.getJobDescriptor().setScript(script);
    }
    jobContext.setResources(resources);
    if (dateStr != null && dateStr.length() == 16) {
      script = replace(jobContext.getProperties().getAllProperties(dateStr), script);
    } else {
      script = replace(jobContext.getProperties().getAllProperties(), script);
    }
    System.out.println("Manual Job last script :" + script);
    script = replaceScript(history, script);
    hp.setProperty(PropertyKeys.JOB_SCRIPT, script);

    /*// 添加宙斯标记属性,提供给云梯
    		hp.setProperty("hadoop.mapred.job.zues_id",
    				"zeus_job_" + history.getJobId() + "_" + history.getId());
    */
    // 前置处理Job创建
    List<Job> pres =
        parseJobs(
            jobContext,
            applicationContext,
            jobBean,
            jobBean.getJobDescriptor().getPreProcessers(),
            history,
            workDir);
    pres.add(0, new DownloadJob(jobContext));
    // 后置处理Job创建
    List<Job> posts =
        parseJobs(
            jobContext,
            applicationContext,
            jobBean,
            jobBean.getJobDescriptor().getPostProcessers(),
            history,
            workDir);
    posts.add(new ZooKeeperJob(jobContext, null, applicationContext));
    // 核心处理Job创建
    Job core = null;
    if (jobBean.getJobDescriptor().getJobType() == JobRunType.MapReduce) {
      core = new MapReduceJob(jobContext);
    } else if (jobBean.getJobDescriptor().getJobType() == JobRunType.Shell) {
      core = new HadoopShellJob(jobContext);
    } else if (jobBean.getJobDescriptor().getJobType() == JobRunType.Hive) {
      core = new HiveJob(jobContext, applicationContext);
    }

    Job job = new WithProcesserJob(jobContext, pres, posts, core, applicationContext);

    return job;
  }
コード例 #4
0
 private static List<Job> parseJobs(
     JobContext jobContext,
     ApplicationContext applicationContext,
     JobBean jobBean,
     List<Processer> ps,
     JobHistory history,
     String workDir) {
   List<Job> jobs = new ArrayList<Job>();
   Map<String, String> map = jobContext.getProperties().getAllProperties();
   Map<String, String> newmap = new HashMap<String, String>();
   try {
     for (String key : map.keySet()) {
       String value = map.get(key);
       if (value != null) {
         if (StringUtils.isNotEmpty(history.getStatisEndTime())
             && StringUtils.isNotEmpty(history.getTimezone())) {
           value = value.replace("${j_set}", history.getStatisEndTime());
           value =
               value.replace(
                   "${j_est}",
                   DateUtil.string2Timestamp(history.getStatisEndTime(), history.getTimezone())
                           / 1000
                       + "");
           map.put(key, value);
         }
         newmap.put("${" + key + "}", value);
       }
     }
   } catch (ParseException e) {
     Log.warn("parse job end time to timestamp failed", e);
   }
   for (Processer p : ps) {
     String config = p.getConfig();
     if (config != null && !"".equals(config.trim())) {
       for (String key : newmap.keySet()) {
         String old = "";
         do {
           old = config;
           String value = newmap.get(key).replace("\"", "\\\"");
           config = config.replace(key, value);
         } while (!old.equals(config));
       }
       p.parse(config);
     }
     if (p instanceof DownloadProcesser) {
       jobs.add(new DownloadJob(jobContext));
     } else if (p instanceof ZooKeeperProcesser) {
       ZooKeeperProcesser zkp = (ZooKeeperProcesser) p;
       if (!zkp.getUseDefault()) {
         jobs.add(new ZooKeeperJob(jobContext, (ZooKeeperProcesser) p, applicationContext));
       }
     } else if (p instanceof MailProcesser) {
       jobs.add(new MailJob(jobContext, (MailProcesser) p, applicationContext));
     } else if (p instanceof WangWangProcesser) {
       jobs.add(new WangWangJob(jobContext));
     } else if (p instanceof OutputCheckProcesser) {
       jobs.add(new OutputCheckJob(jobContext, (OutputCheckProcesser) p, applicationContext));
     } else if (p instanceof OutputCleanProcesser) {
       jobs.add(new OutputCleanJob(jobContext, (OutputCleanProcesser) p, applicationContext));
     } else if (p instanceof HiveProcesser) {
       jobs.add(new HiveProcesserJob(jobContext, (HiveProcesser) p, applicationContext));
     } else if (p instanceof JobProcesser) {
       Integer depth = (Integer) jobContext.getData("depth");
       if (depth == null) {
         depth = 0;
       }
       if (depth < 2) { // job 的递归深度控制,防止无限递归
         JobProcesser jobProcesser = (JobProcesser) p;
         GroupManager groupManager = (GroupManager) applicationContext.getBean("groupManager");
         JobBean jb = groupManager.getUpstreamJobBean(jobProcesser.getJobId());
         if (jb != null) {
           for (String key : jobProcesser.getKvConfig().keySet()) {
             if (jobProcesser.getKvConfig().get(key) != null) {
               jb.getJobDescriptor().getProperties().put(key, jobProcesser.getKvConfig().get(key));
             }
           }
           File direcotry =
               new File(workDir + File.separator + "job-processer-" + jobProcesser.getJobId());
           if (!direcotry.exists()) {
             direcotry.mkdirs();
           }
           JobContext sub = new JobContext(jobContext.getRunType());
           sub.putData("depth", ++depth);
           Job job = createJob(sub, jb, history, direcotry.getAbsolutePath(), applicationContext);
           jobs.add(job);
         }
       } else {
         jobContext.getJobHistory().getLog().appendZeus("递归的JobProcesser处理单元深度过大,停止递归");
       }
     }
   }
   return jobs;
 }