@Test
  public void testRunRestartableJobInstanceTwice() throws Exception {
    job =
        new JobSupport("foo") {
          @Override
          public boolean isRestartable() {
            return true;
          }

          @Override
          public void execute(JobExecution execution) {
            execution.setExitStatus(ExitStatus.COMPLETED);
            return;
          }
        };

    testRun();
    reset(jobRepository);
    expect(jobRepository.getLastJobExecution(job.getName(), jobParameters))
        .andReturn(new JobExecution(new JobInstance(1L, jobParameters, job.getName())));
    expect(jobRepository.createJobExecution(job.getName(), jobParameters))
        .andReturn(new JobExecution(new JobInstance(1L, jobParameters, job.getName())));
    replay(jobRepository);
    jobLauncher.run(job, jobParameters);
    verify(jobRepository);
  }
  @Test
  public void testFindOrCreateJobConcurrentlyWhenJobAlreadyExists() throws Exception {

    job = new JobSupport("test-job");
    job.setRestartable(true);
    job.setName("spam");

    JobExecution execution = repository.createJobExecution(job.getName(), new JobParameters());
    cacheJobIds(execution);
    execution.setEndTime(new Timestamp(System.currentTimeMillis()));
    repository.update(execution);
    execution.setStatus(BatchStatus.FAILED);

    int before = simpleJdbcTemplate.queryForInt("SELECT COUNT(*) FROM BATCH_JOB_INSTANCE");
    assertEquals(1, before);

    long t0 = System.currentTimeMillis();
    try {
      doConcurrentStart();
      fail("Expected JobExecutionAlreadyRunningException");
    } catch (JobExecutionAlreadyRunningException e) {
      // expected
    }
    long t1 = System.currentTimeMillis();

    int after = simpleJdbcTemplate.queryForInt("SELECT COUNT(*) FROM BATCH_JOB_INSTANCE");
    assertNotNull(execution.getId());
    assertEquals(before, after);

    logger.info(
        "Duration: "
            + (t1 - t0)
            + " - the second transaction did not block if this number is less than about 1000.");
  }
  @Test
  public void testTaskExecutorRejects() throws Exception {

    final List<String> list = new ArrayList<String>();
    jobLauncher.setTaskExecutor(
        new TaskExecutor() {
          @Override
          public void execute(Runnable task) {
            list.add("execute");
            throw new TaskRejectedException("Planned failure");
          }
        });

    JobExecution jobExecution = new JobExecution(null, null);

    expect(jobRepository.getLastJobExecution(job.getName(), jobParameters)).andReturn(null);
    expect(jobRepository.createJobExecution(job.getName(), jobParameters)).andReturn(jobExecution);
    jobRepository.update(jobExecution);
    expectLastCall();
    replay(jobRepository);

    jobLauncher.afterPropertiesSet();
    try {
      jobLauncher.run(job, jobParameters);
    } finally {
      assertEquals(BatchStatus.FAILED, jobExecution.getStatus());
      assertEquals(ExitStatus.FAILED.getExitCode(), jobExecution.getExitStatus().getExitCode());
      verify(jobRepository);
    }

    assertEquals(1, list.size());
  }
  @Test
  @Ignore // FIXME
  public void testTransactionException() throws Exception {

    final SkipWriterStub<String> writer = new SkipWriterStub<String>();
    FaultTolerantStepFactoryBean<String, String> factory =
        new FaultTolerantStepFactoryBean<String, String>();
    factory.setItemWriter(writer);

    @SuppressWarnings("serial")
    DataSourceTransactionManager transactionManager =
        new DataSourceTransactionManager(dataSource) {
          private boolean failed = false;

          @Override
          protected void doCommit(DefaultTransactionStatus status) throws TransactionException {
            if (writer.getWritten().isEmpty()
                || failed
                || !isExistingTransaction(status.getTransaction())) {
              super.doCommit(status);
              return;
            }
            failed = true;
            status.setRollbackOnly();
            super.doRollback(status);
            throw new UnexpectedRollbackException("Planned");
          }
        };

    factory.setBeanName("stepName");
    factory.setTransactionManager(transactionManager);
    factory.setCommitInterval(2);

    ItemReader<String> reader = new ListItemReader<String>(Arrays.asList("1", "2"));
    factory.setItemReader(reader);

    JobRepositoryFactoryBean repositoryFactory = new JobRepositoryFactoryBean();
    repositoryFactory.setDataSource(dataSource);
    repositoryFactory.setTransactionManager(transactionManager);
    repositoryFactory.afterPropertiesSet();
    JobRepository repository = repositoryFactory.getObject();
    factory.setJobRepository(repository);

    JobExecution jobExecution = repository.createJobExecution("job", new JobParameters());
    StepExecution stepExecution = jobExecution.createStepExecution(factory.getName());
    repository.add(stepExecution);

    Step step = factory.getObject();

    step.execute(stepExecution);
    assertEquals(BatchStatus.FAILED, stepExecution.getStatus());

    assertEquals("[]", writer.getCommitted().toString());
  }
 @Before
 public void setUp() throws Exception {
   step.setName("step");
   MapJobRepositoryFactoryBean factory = new MapJobRepositoryFactoryBean();
   jobRepository = factory.getObject();
   step.setJobRepository(jobRepository);
   JobExecution jobExecution = jobRepository.createJobExecution("job", new JobParameters());
   stepExecution = jobExecution.createStepExecution("step");
   jobRepository.add(stepExecution);
   SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
   jobLauncher.setJobRepository(jobRepository);
   jobLauncher.afterPropertiesSet();
   step.setJobLauncher(jobLauncher);
 }
  private void run(ExitStatus exitStatus) throws Exception {
    JobExecution jobExecution = new JobExecution(null, null);

    expect(jobRepository.getLastJobExecution(job.getName(), jobParameters)).andReturn(null);
    expect(jobRepository.createJobExecution(job.getName(), jobParameters)).andReturn(jobExecution);
    replay(jobRepository);

    jobLauncher.afterPropertiesSet();
    try {
      jobLauncher.run(job, jobParameters);
    } finally {
      assertEquals(exitStatus, jobExecution.getExitStatus());
      verify(jobRepository);
    }
  }
  @Test
  public void testRuntimeBatchConfigurer() throws Exception {

    jobRepository.toString(); // needed to trigger lazy bean initialization

    assertSame(getTargetObject(jobRepository, JobRepository.class), testConfig.getCustomJobRepo());
  }
  public JobExecution launch(String jobName, JobParameters jobParameters)
      throws NoSuchJobException, JobExecutionAlreadyRunningException, JobRestartException,
          JobInstanceAlreadyCompleteException, JobParametersInvalidException {

    Job job = jobLocator.getJob(jobName);

    JobExecution lastJobExecution = jobRepository.getLastJobExecution(jobName, jobParameters);
    boolean restart = false;
    if (lastJobExecution != null) {
      BatchStatus status = lastJobExecution.getStatus();
      if (status.isUnsuccessful() && status != BatchStatus.ABANDONED) {
        restart = true;
      }
    }

    if (job.getJobParametersIncrementer() != null && !restart) {
      jobParameters = job.getJobParametersIncrementer().getNext(jobParameters);
    }

    JobExecution jobExecution = jobLauncher.run(job, jobParameters);

    if (jobExecution.isRunning()) {
      activeExecutions.add(jobExecution);
    }
    return jobExecution;
  }
 public int stopAll() {
   Collection<JobExecution> result = jobExecutionDao.getRunningJobExecutions();
   for (JobExecution jobExecution : result) {
     jobExecution.stop();
     jobRepository.update(jobExecution);
   }
   return result.size();
 }
 @Transactional
 @Test
 public void testFindOrCreateJob() throws Exception {
   job.setName("foo");
   int before = 0;
   JobExecution execution = repository.createJobExecution(job.getName(), new JobParameters());
   int after = simpleJdbcTemplate.queryForInt("SELECT COUNT(*) FROM BATCH_JOB_INSTANCE");
   assertEquals(before + 1, after);
   assertNotNull(execution.getId());
 }
  @Test
  public void testExecuteRestart() throws Exception {

    DefaultJobParametersExtractor jobParametersExtractor = new DefaultJobParametersExtractor();
    jobParametersExtractor.setKeys(new String[] {"foo"});
    ExecutionContext executionContext = stepExecution.getExecutionContext();
    executionContext.put("foo", "bar");
    step.setJobParametersExtractor(jobParametersExtractor);

    step.setJob(
        new JobSupport("child") {
          @Override
          public void execute(JobExecution execution) throws UnexpectedJobExecutionException {
            assertEquals(1, execution.getJobParameters().getParameters().size());
            execution.setStatus(BatchStatus.FAILED);
            execution.setEndTime(new Date());
            jobRepository.update(execution);
            throw new RuntimeException("FOO");
          }

          @Override
          public boolean isRestartable() {
            return true;
          }
        });
    step.afterPropertiesSet();
    step.execute(stepExecution);
    assertEquals("FOO", stepExecution.getFailureExceptions().get(0).getMessage());
    JobExecution jobExecution = stepExecution.getJobExecution();
    jobExecution.setEndTime(new Date());
    jobRepository.update(jobExecution);

    jobExecution = jobRepository.createJobExecution("job", new JobParameters());
    stepExecution = jobExecution.createStepExecution("step");
    // In a restart the surrounding Job would set up the context like this...
    stepExecution.setExecutionContext(executionContext);
    jobRepository.add(stepExecution);
    step.execute(stepExecution);
    assertEquals("FOO", stepExecution.getFailureExceptions().get(0).getMessage());
  }
  public JobExecution stop(Long jobExecutionId)
      throws NoSuchJobExecutionException, JobExecutionNotRunningException {

    JobExecution jobExecution = getJobExecution(jobExecutionId);
    if (!jobExecution.isRunning()) {
      throw new JobExecutionNotRunningException(
          "JobExecution is not running and therefore cannot be stopped");
    }

    logger.info("Stopping job execution: " + jobExecution);
    jobExecution.stop();
    jobRepository.update(jobExecution);
    return jobExecution;
  }
  public JobExecution abandon(Long jobExecutionId)
      throws NoSuchJobExecutionException, JobExecutionAlreadyRunningException {

    JobExecution jobExecution = getJobExecution(jobExecutionId);
    if (jobExecution.getStatus().isLessThan(BatchStatus.STOPPING)) {
      throw new JobExecutionAlreadyRunningException(
          "JobExecution is running or complete and therefore cannot be aborted");
    }

    logger.info("Aborting job execution: " + jobExecution);
    jobExecution.upgradeStatus(BatchStatus.ABANDONED);
    jobExecution.setEndTime(new Date());
    jobRepository.update(jobExecution);
    return jobExecution;
  }
  @Test(expected = JobParametersInvalidException.class)
  public void testRunWithValidator() throws Exception {

    job.setJobParametersValidator(
        new DefaultJobParametersValidator(new String[] {"missing-and-required"}, new String[0]));

    expect(jobRepository.getLastJobExecution(job.getName(), jobParameters)).andReturn(null);
    replay(jobRepository);

    jobLauncher.afterPropertiesSet();
    try {
      jobLauncher.run(job, jobParameters);
    } finally {
      verify(jobRepository);
    }
  }
  private JobExecution doConcurrentStart() throws Exception {
    new Thread(
            new Runnable() {
              public void run() {
                try {
                  new TransactionTemplate(transactionManager)
                      .execute(
                          new TransactionCallback() {
                            public Object doInTransaction(
                                org.springframework.transaction.TransactionStatus status) {
                              try {
                                JobExecution execution =
                                    repository.createJobExecution(
                                        job.getName(), new JobParameters());
                                cacheJobIds(execution);
                                list.add(execution);
                                Thread.sleep(1000);
                              } catch (Exception e) {
                                list.add(e);
                              }
                              return null;
                            }
                          });
                } catch (RuntimeException e) {
                  list.add(e);
                }
              }
            })
        .start();

    Thread.sleep(400);
    JobExecution execution = repository.createJobExecution(job.getName(), new JobParameters());
    cacheJobIds(execution);

    int count = 0;
    while (list.size() == 0 && count++ < 100) {
      Thread.sleep(200);
    }

    assertEquals("Timed out waiting for JobExecution to be created", 1, list.size());
    assertTrue("JobExecution not created in thread", list.get(0) instanceof JobExecution);
    return (JobExecution) list.get(0);
  }
Beispiel #16
0
 private void updateStatus(JobExecution jobExecution, BatchStatus status) {
   jobExecution.setStatus(status);
   jobRepository.update(jobExecution);
 }
  /*
   * Start a job by obtaining a combined classpath using the job launcher and
   * job paths. If a JobLocator has been set, then use it to obtain an actual
   * job, if not ask the context for it.
   */
  public int start(String moduleNm, String jobIdentifier, String[] parameters, Set<String> opts)
      throws Exception {

    INaviModuleContext context = null;

    try {
      context = NaviModuleContextFactory.getInstance().getNaviModuleContext(moduleNm);
      launcher = (JobLauncher) context.getBean("jobLauncher");
      jobExplorer = (JobExplorer) context.getBean("jobExplorer");
      jobRepository = (JobRepository) context.getBean("jobRepository");

      Assert.state(
          launcher != null,
          "A JobLauncher must be provided.  Please add one to the configuration.");
      if (opts.contains("-restart") || opts.contains("-next")) {
        Assert.state(
            jobExplorer != null,
            "A JobExplorer must be provided for a restart or start next operation.  Please add one to the configuration.");
      }

      String jobName = moduleNm + "_" + jobIdentifier;

      JobParameters jobParameters =
          jobParametersConverter.getJobParameters(
              StringUtils.splitArrayElementsIntoProperties(parameters, "="));
      Assert.isTrue(
          parameters == null || parameters.length == 0 || !jobParameters.isEmpty(),
          "Invalid JobParameters "
              + Arrays.asList(parameters)
              + ". If parameters are provided they should be in the form name=value (no whitespace).");

      if (opts.contains("-stop")) {
        List<JobExecution> jobExecutions = getRunningJobExecutions(jobName);
        if (jobExecutions == null) {
          throw new JobExecutionNotRunningException(
              "No running execution found for job=" + jobName);
        }
        for (JobExecution jobExecution : jobExecutions) {
          jobExecution.setStatus(BatchStatus.STOPPING);
          jobRepository.update(jobExecution);
        }
        return exitCodeMapper.intValue(ExitStatus.COMPLETED.getExitCode());
      }

      if (opts.contains("-abandon")) {
        List<JobExecution> jobExecutions = getStoppedJobExecutions(jobName);
        if (jobExecutions == null) {
          throw new JobExecutionNotStoppedException(
              "No stopped execution found for job=" + jobName);
        }
        for (JobExecution jobExecution : jobExecutions) {
          jobExecution.setStatus(BatchStatus.ABANDONED);
          jobRepository.update(jobExecution);
        }
        return exitCodeMapper.intValue(ExitStatus.COMPLETED.getExitCode());
      }

      if (opts.contains("-restart")) {
        JobExecution jobExecution = getLastFailedJobExecution(jobName);
        if (jobExecution == null) {
          throw new JobExecutionNotFailedException(
              "No failed or stopped execution found for job=" + jobName);
        }
        jobParameters = jobExecution.getJobInstance().getJobParameters();
        jobName = jobExecution.getJobInstance().getJobName();
      }

      Job job;
      if (jobLocator != null) {
        job = jobLocator.getJob(jobIdentifier);
      } else {
        job = (Job) context.getBean(jobIdentifier);
        AbstractJob tmptJob = (AbstractJob) job;
        // 重写jobNm
        tmptJob.setName(jobName);
      }

      if (opts.contains("-next")) {
        JobParameters nextParameters = getNextJobParameters(job);
        Map<String, JobParameter> map =
            new HashMap<String, JobParameter>(nextParameters.getParameters());
        map.putAll(jobParameters.getParameters());
        jobParameters = new JobParameters(map);
      }

      JobExecution jobExecution = launcher.run(job, jobParameters);
      return exitCodeMapper.intValue(jobExecution.getExitStatus().getExitCode());

    } catch (Throwable e) {
      String message = "Job Terminated in error: " + e.getMessage();
      log.error(message, e);
      NaviDaemonJobRunner.message = message;
      return exitCodeMapper.intValue(ExitStatus.FAILED.getExitCode());
    } finally {
      if (context != null) {
        context.close();
      }
    }
  }
Beispiel #18
0
  /**
   * Run the specified job, handling all listener and repository calls, and delegating the actual
   * processing to {@link #doExecute(JobExecution)}.
   *
   * @see Job#execute(JobExecution)
   * @throws StartLimitExceededException if start limit of one of the steps was exceeded
   */
  @Override
  public final void execute(JobExecution execution) {

    logger.debug("Job execution starting: " + execution);

    try {

      jobParametersValidator.validate(execution.getJobInstance().getJobParameters());

      if (execution.getStatus() != BatchStatus.STOPPING) {

        execution.setStartTime(new Date());
        updateStatus(execution, BatchStatus.STARTED);

        listener.beforeJob(execution);

        try {
          doExecute(execution);
          logger.debug("Job execution complete: " + execution);
        } catch (RepeatException e) {
          throw e.getCause();
        }
      } else {

        // The job was already stopped before we even got this far. Deal
        // with it in the same way as any other interruption.
        execution.setStatus(BatchStatus.STOPPED);
        execution.setExitStatus(ExitStatus.COMPLETED);
        logger.debug("Job execution was stopped: " + execution);
      }

    } catch (JobInterruptedException e) {
      logger.info("Encountered interruption executing job: " + e.getMessage());
      if (logger.isDebugEnabled()) {
        logger.debug("Full exception", e);
      }
      execution.setExitStatus(getDefaultExitStatusForFailure(e));
      execution.setStatus(BatchStatus.max(BatchStatus.STOPPED, e.getStatus()));
      execution.addFailureException(e);
    } catch (Throwable t) {
      logger.error("Encountered fatal error executing job", t);
      execution.setExitStatus(getDefaultExitStatusForFailure(t));
      execution.setStatus(BatchStatus.FAILED);
      execution.addFailureException(t);
    } finally {

      if (execution.getStatus().isLessThanOrEqualTo(BatchStatus.STOPPED)
          && execution.getStepExecutions().isEmpty()) {
        ExitStatus exitStatus = execution.getExitStatus();
        execution.setExitStatus(
            exitStatus.and(
                ExitStatus.NOOP.addExitDescription(
                    "All steps already completed or no steps configured for this job.")));
      }

      execution.setEndTime(new Date());

      try {
        listener.afterJob(execution);
      } catch (Exception e) {
        logger.error("Exception encountered in afterStep callback", e);
      }

      jobRepository.update(execution);
    }
  }