@Test
  public void testTaskExecutorRejects() throws Exception {

    final List<String> list = new ArrayList<String>();
    jobLauncher.setTaskExecutor(
        new TaskExecutor() {
          @Override
          public void execute(Runnable task) {
            list.add("execute");
            throw new TaskRejectedException("Planned failure");
          }
        });

    JobExecution jobExecution = new JobExecution(null, null);

    expect(jobRepository.getLastJobExecution(job.getName(), jobParameters)).andReturn(null);
    expect(jobRepository.createJobExecution(job.getName(), jobParameters)).andReturn(jobExecution);
    jobRepository.update(jobExecution);
    expectLastCall();
    replay(jobRepository);

    jobLauncher.afterPropertiesSet();
    try {
      jobLauncher.run(job, jobParameters);
    } finally {
      assertEquals(BatchStatus.FAILED, jobExecution.getStatus());
      assertEquals(ExitStatus.FAILED.getExitCode(), jobExecution.getExitStatus().getExitCode());
      verify(jobRepository);
    }

    assertEquals(1, list.size());
  }
  @Test
  public void testFindOrCreateJobConcurrentlyWhenJobAlreadyExists() throws Exception {

    job = new JobSupport("test-job");
    job.setRestartable(true);
    job.setName("spam");

    JobExecution execution = repository.createJobExecution(job.getName(), new JobParameters());
    cacheJobIds(execution);
    execution.setEndTime(new Timestamp(System.currentTimeMillis()));
    repository.update(execution);
    execution.setStatus(BatchStatus.FAILED);

    int before = simpleJdbcTemplate.queryForInt("SELECT COUNT(*) FROM BATCH_JOB_INSTANCE");
    assertEquals(1, before);

    long t0 = System.currentTimeMillis();
    try {
      doConcurrentStart();
      fail("Expected JobExecutionAlreadyRunningException");
    } catch (JobExecutionAlreadyRunningException e) {
      // expected
    }
    long t1 = System.currentTimeMillis();

    int after = simpleJdbcTemplate.queryForInt("SELECT COUNT(*) FROM BATCH_JOB_INSTANCE");
    assertNotNull(execution.getId());
    assertEquals(before, after);

    logger.info(
        "Duration: "
            + (t1 - t0)
            + " - the second transaction did not block if this number is less than about 1000.");
  }
 public int stopAll() {
   Collection<JobExecution> result = jobExecutionDao.getRunningJobExecutions();
   for (JobExecution jobExecution : result) {
     jobExecution.stop();
     jobRepository.update(jobExecution);
   }
   return result.size();
 }
  public JobExecution stop(Long jobExecutionId)
      throws NoSuchJobExecutionException, JobExecutionNotRunningException {

    JobExecution jobExecution = getJobExecution(jobExecutionId);
    if (!jobExecution.isRunning()) {
      throw new JobExecutionNotRunningException(
          "JobExecution is not running and therefore cannot be stopped");
    }

    logger.info("Stopping job execution: " + jobExecution);
    jobExecution.stop();
    jobRepository.update(jobExecution);
    return jobExecution;
  }
  public JobExecution abandon(Long jobExecutionId)
      throws NoSuchJobExecutionException, JobExecutionAlreadyRunningException {

    JobExecution jobExecution = getJobExecution(jobExecutionId);
    if (jobExecution.getStatus().isLessThan(BatchStatus.STOPPING)) {
      throw new JobExecutionAlreadyRunningException(
          "JobExecution is running or complete and therefore cannot be aborted");
    }

    logger.info("Aborting job execution: " + jobExecution);
    jobExecution.upgradeStatus(BatchStatus.ABANDONED);
    jobExecution.setEndTime(new Date());
    jobRepository.update(jobExecution);
    return jobExecution;
  }
  @Test
  public void testExecuteRestart() throws Exception {

    DefaultJobParametersExtractor jobParametersExtractor = new DefaultJobParametersExtractor();
    jobParametersExtractor.setKeys(new String[] {"foo"});
    ExecutionContext executionContext = stepExecution.getExecutionContext();
    executionContext.put("foo", "bar");
    step.setJobParametersExtractor(jobParametersExtractor);

    step.setJob(
        new JobSupport("child") {
          @Override
          public void execute(JobExecution execution) throws UnexpectedJobExecutionException {
            assertEquals(1, execution.getJobParameters().getParameters().size());
            execution.setStatus(BatchStatus.FAILED);
            execution.setEndTime(new Date());
            jobRepository.update(execution);
            throw new RuntimeException("FOO");
          }

          @Override
          public boolean isRestartable() {
            return true;
          }
        });
    step.afterPropertiesSet();
    step.execute(stepExecution);
    assertEquals("FOO", stepExecution.getFailureExceptions().get(0).getMessage());
    JobExecution jobExecution = stepExecution.getJobExecution();
    jobExecution.setEndTime(new Date());
    jobRepository.update(jobExecution);

    jobExecution = jobRepository.createJobExecution("job", new JobParameters());
    stepExecution = jobExecution.createStepExecution("step");
    // In a restart the surrounding Job would set up the context like this...
    stepExecution.setExecutionContext(executionContext);
    jobRepository.add(stepExecution);
    step.execute(stepExecution);
    assertEquals("FOO", stepExecution.getFailureExceptions().get(0).getMessage());
  }
Beispiel #7
0
 private void updateStatus(JobExecution jobExecution, BatchStatus status) {
   jobExecution.setStatus(status);
   jobRepository.update(jobExecution);
 }
Beispiel #8
0
  /**
   * Run the specified job, handling all listener and repository calls, and delegating the actual
   * processing to {@link #doExecute(JobExecution)}.
   *
   * @see Job#execute(JobExecution)
   * @throws StartLimitExceededException if start limit of one of the steps was exceeded
   */
  @Override
  public final void execute(JobExecution execution) {

    logger.debug("Job execution starting: " + execution);

    try {

      jobParametersValidator.validate(execution.getJobInstance().getJobParameters());

      if (execution.getStatus() != BatchStatus.STOPPING) {

        execution.setStartTime(new Date());
        updateStatus(execution, BatchStatus.STARTED);

        listener.beforeJob(execution);

        try {
          doExecute(execution);
          logger.debug("Job execution complete: " + execution);
        } catch (RepeatException e) {
          throw e.getCause();
        }
      } else {

        // The job was already stopped before we even got this far. Deal
        // with it in the same way as any other interruption.
        execution.setStatus(BatchStatus.STOPPED);
        execution.setExitStatus(ExitStatus.COMPLETED);
        logger.debug("Job execution was stopped: " + execution);
      }

    } catch (JobInterruptedException e) {
      logger.info("Encountered interruption executing job: " + e.getMessage());
      if (logger.isDebugEnabled()) {
        logger.debug("Full exception", e);
      }
      execution.setExitStatus(getDefaultExitStatusForFailure(e));
      execution.setStatus(BatchStatus.max(BatchStatus.STOPPED, e.getStatus()));
      execution.addFailureException(e);
    } catch (Throwable t) {
      logger.error("Encountered fatal error executing job", t);
      execution.setExitStatus(getDefaultExitStatusForFailure(t));
      execution.setStatus(BatchStatus.FAILED);
      execution.addFailureException(t);
    } finally {

      if (execution.getStatus().isLessThanOrEqualTo(BatchStatus.STOPPED)
          && execution.getStepExecutions().isEmpty()) {
        ExitStatus exitStatus = execution.getExitStatus();
        execution.setExitStatus(
            exitStatus.and(
                ExitStatus.NOOP.addExitDescription(
                    "All steps already completed or no steps configured for this job.")));
      }

      execution.setEndTime(new Date());

      try {
        listener.afterJob(execution);
      } catch (Exception e) {
        logger.error("Exception encountered in afterStep callback", e);
      }

      jobRepository.update(execution);
    }
  }
  /*
   * Start a job by obtaining a combined classpath using the job launcher and
   * job paths. If a JobLocator has been set, then use it to obtain an actual
   * job, if not ask the context for it.
   */
  public int start(String moduleNm, String jobIdentifier, String[] parameters, Set<String> opts)
      throws Exception {

    INaviModuleContext context = null;

    try {
      context = NaviModuleContextFactory.getInstance().getNaviModuleContext(moduleNm);
      launcher = (JobLauncher) context.getBean("jobLauncher");
      jobExplorer = (JobExplorer) context.getBean("jobExplorer");
      jobRepository = (JobRepository) context.getBean("jobRepository");

      Assert.state(
          launcher != null,
          "A JobLauncher must be provided.  Please add one to the configuration.");
      if (opts.contains("-restart") || opts.contains("-next")) {
        Assert.state(
            jobExplorer != null,
            "A JobExplorer must be provided for a restart or start next operation.  Please add one to the configuration.");
      }

      String jobName = moduleNm + "_" + jobIdentifier;

      JobParameters jobParameters =
          jobParametersConverter.getJobParameters(
              StringUtils.splitArrayElementsIntoProperties(parameters, "="));
      Assert.isTrue(
          parameters == null || parameters.length == 0 || !jobParameters.isEmpty(),
          "Invalid JobParameters "
              + Arrays.asList(parameters)
              + ". If parameters are provided they should be in the form name=value (no whitespace).");

      if (opts.contains("-stop")) {
        List<JobExecution> jobExecutions = getRunningJobExecutions(jobName);
        if (jobExecutions == null) {
          throw new JobExecutionNotRunningException(
              "No running execution found for job=" + jobName);
        }
        for (JobExecution jobExecution : jobExecutions) {
          jobExecution.setStatus(BatchStatus.STOPPING);
          jobRepository.update(jobExecution);
        }
        return exitCodeMapper.intValue(ExitStatus.COMPLETED.getExitCode());
      }

      if (opts.contains("-abandon")) {
        List<JobExecution> jobExecutions = getStoppedJobExecutions(jobName);
        if (jobExecutions == null) {
          throw new JobExecutionNotStoppedException(
              "No stopped execution found for job=" + jobName);
        }
        for (JobExecution jobExecution : jobExecutions) {
          jobExecution.setStatus(BatchStatus.ABANDONED);
          jobRepository.update(jobExecution);
        }
        return exitCodeMapper.intValue(ExitStatus.COMPLETED.getExitCode());
      }

      if (opts.contains("-restart")) {
        JobExecution jobExecution = getLastFailedJobExecution(jobName);
        if (jobExecution == null) {
          throw new JobExecutionNotFailedException(
              "No failed or stopped execution found for job=" + jobName);
        }
        jobParameters = jobExecution.getJobInstance().getJobParameters();
        jobName = jobExecution.getJobInstance().getJobName();
      }

      Job job;
      if (jobLocator != null) {
        job = jobLocator.getJob(jobIdentifier);
      } else {
        job = (Job) context.getBean(jobIdentifier);
        AbstractJob tmptJob = (AbstractJob) job;
        // 重写jobNm
        tmptJob.setName(jobName);
      }

      if (opts.contains("-next")) {
        JobParameters nextParameters = getNextJobParameters(job);
        Map<String, JobParameter> map =
            new HashMap<String, JobParameter>(nextParameters.getParameters());
        map.putAll(jobParameters.getParameters());
        jobParameters = new JobParameters(map);
      }

      JobExecution jobExecution = launcher.run(job, jobParameters);
      return exitCodeMapper.intValue(jobExecution.getExitStatus().getExitCode());

    } catch (Throwable e) {
      String message = "Job Terminated in error: " + e.getMessage();
      log.error(message, e);
      NaviDaemonJobRunner.message = message;
      return exitCodeMapper.intValue(ExitStatus.FAILED.getExitCode());
    } finally {
      if (context != null) {
        context.close();
      }
    }
  }