示例#1
0
  @Override
  public void init() throws AnalysisException {
    // 获得任务数量
    jobBuilder.setConfig(config);
    jobExporter.setConfig(config);
    jobResultMerger.setConfig(config);

    jobBuilder.init();
    jobExporter.init();
    jobResultMerger.init();

    jobs = jobBuilder.build();
    for (Job job : jobs.values()) {
      job.reset(null);
    }

    if (jobs == null || (jobs != null && jobs.size() == 0))
      throw new AnalysisException("jobs should not be empty!");

    jobTaskPool = new ConcurrentHashMap<String, JobTask>();
    undoTaskQueue = new LinkedBlockingDeque<JobTask>();
    statusPool = new ConcurrentHashMap<String, JobTaskStatus>();
    jobTaskResultsQueuePool = new HashMap<String, BlockingQueue<JobTaskResult>>();
    branchResultQueuePool = new HashMap<String, BlockingQueue<JobMergedResult>>();

    for (String jobName : jobs.keySet()) {
      jobTaskResultsQueuePool.put(jobName, new LinkedBlockingQueue<JobTaskResult>());
      branchResultQueuePool.put(jobName, new LinkedBlockingQueue<JobMergedResult>());
    }

    eventProcessThreadPool =
        new ThreadPoolExecutor(
            this.config.getMaxJobEventWorker(),
            this.config.getMaxJobEventWorker(),
            0,
            TimeUnit.SECONDS,
            new LinkedBlockingQueue<Runnable>(),
            new NamedThreadFactory("jobManagerEventProcess_worker"));

    masterDataRecoverWorker =
        new MasterDataRecoverWorker(
            config.getMasterName(), config.getTempStoreDataDir(), jobs, this.config);
    masterDataRecoverWorker.start();

    addJobsToPool();

    if (logger.isInfoEnabled())
      logger.info("jobManager init end, MaxJobEventWorker size : " + config.getMaxJobEventWorker());
  }
示例#2
0
 /**
  * 从某一个备份载入job的临时数据开始恢复
  *
  * @param jobName
  * @param epoch
  */
 @Override
 public void loadJobBackupData(String jobName, String bckPrefix) {
   if (jobs.containsKey(jobName)) {
     jobExporter.loadJobBackupData(jobs.get(jobName), bckPrefix);
   } else {
     logger.error("loadJobBackupData do nothing, jobName " + jobName + " not exist!");
   }
 }
示例#3
0
 @Override
 public void loadJobDataToTmp(String jobName) {
   if (jobs.containsKey(jobName)) {
     jobExporter.loadEntryDataToTmp(jobs.get(jobName));
   } else {
     logger.error("exportJobData do nothing, jobName " + jobName + " not exist!");
   }
 }
示例#4
0
  @Override
  public void releaseResource() {
    stopped = true;

    try {
      // 导出所有结果,暂时不导出中间data,后面看是否需要
      // 添加中间结果导出,不导出中间结果,会有部分数据丢失
      if (jobs != null)
        for (Job j : jobs.values()) {
          // 结果导出不重要,可以考虑去掉
          while (!j.getTrunkExported().get()) Thread.sleep(3000);
          if (!j.isExported().get()) {
            jobExporter.exportReport(j, false);
            logger.info("releaseResouce now, export job : " + j.getJobName());
          }
        }
      if (eventProcessThreadPool != null) eventProcessThreadPool.shutdown();

      if (masterDataRecoverWorker != null) masterDataRecoverWorker.stopWorker();
    } catch (Throwable e) {
      logger.error("error when stop the node", e);
    } finally {
      if (jobs != null) jobs.clear();

      if (jobTaskPool != null) jobTaskPool.clear();
      if (undoTaskQueue != null) undoTaskQueue.clear();

      if (statusPool != null) statusPool.clear();

      if (jobTaskResultsQueuePool != null) jobTaskResultsQueuePool.clear();

      if (branchResultQueuePool != null) branchResultQueuePool.clear();

      if (jobBuilder != null) jobBuilder.releaseResource();

      if (jobExporter != null) jobExporter.releaseResource();

      if (jobResultMerger != null) jobResultMerger.releaseResource();

      logger.info("jobManager releaseResource end");
    }
  }