Exemplo n.º 1
0
  @Test
  public void testDecisionThrowsException() throws Exception {
    ApplicationContext context =
        new GenericXmlApplicationContext(
            "classpath:/org/springframework/batch/core/jsr/step/DecisionStepTests-decisionThrowsException-context.xml");

    JobLauncher launcher = context.getBean(JobLauncher.class);
    Job job = context.getBean(Job.class);

    JobExecution execution = launcher.run(job, new JobParameters());
    assertEquals(BatchStatus.FAILED, execution.getStatus());
    assertEquals(2, execution.getStepExecutions().size());
    List<Throwable> allFailureExceptions = execution.getAllFailureExceptions();

    boolean found = false;
    for (Throwable throwable : allFailureExceptions) {
      if (throwable.getMessage().equals("Expected")) {
        found = true;
        break;
      }
    }

    if (!found) {
      fail();
    }
  }
Exemplo n.º 2
0
 static void mapreduce() throws Exception {
   JobLauncher jobLauncher = SpringInitialize.getContext().getBean(JobLauncher.class);
   Job job = SpringInitialize.getContext().getBean(Job.class);
   Map<String, JobParameter> map = new HashMap<String, JobParameter>();
   map.put("word.input", new JobParameter("/user/conan/word/input/"));
   map.put("word.output", new JobParameter("/user/conan/word/output/"));
   jobLauncher.run(job, new JobParameters(map));
 }
Exemplo n.º 3
0
  @Test
  public void testDecisionValidExitStatus() throws Exception {
    ApplicationContext context =
        new GenericXmlApplicationContext(
            "classpath:/org/springframework/batch/core/jsr/step/DecisionStepTests-decisionValidExitStatus-context.xml");

    JobLauncher launcher = context.getBean(JobLauncher.class);
    Job job = context.getBean(Job.class);

    JobExecution execution = launcher.run(job, new JobParameters());
    assertEquals(BatchStatus.COMPLETED, execution.getStatus());
    assertEquals(3, execution.getStepExecutions().size());
  }
Exemplo n.º 4
0
  @Test
  public void testWithinJob() throws Exception {
    ClassPathXmlApplicationContext context =
        new ClassPathXmlApplicationContext(
            "/org/springframework/data/hadoop/fs/HdfsItemWriterTest-context.xml");
    JobLauncher launcher = context.getBean(JobLauncher.class);
    Job job = context.getBean(Job.class);

    JobParameters jobParameters = new JobParametersBuilder().toJobParameters();

    JobExecution execution = launcher.run(job, jobParameters);
    assertTrue(
        "status was: " + execution.getStatus(), execution.getStatus() == BatchStatus.COMPLETED);
  }
  /**
   * @throws IOException if a temporary file cannot be created.
   * @throws NoSuchJobException if SpeciesPageHarvestingJob cannot be located
   * @throws JobParametersInvalidException if the job parameters are invalid
   * @throws JobInstanceAlreadyCompleteException if the job has already completed
   * @throws JobRestartException if the job cannot be restarted
   * @throws JobExecutionAlreadyRunningException if the job is already running
   */
  @Test
  public final void testNotModifiedResponse()
      throws IOException, NoSuchJobException, JobExecutionAlreadyRunningException,
          JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException {
    Map<String, JobParameter> parameters = new HashMap<String, JobParameter>();
    parameters.put("query.string", new JobParameter("select i from Image i"));

    JobParameters jobParameters = new JobParameters(parameters);

    Job job = jobLocator.getJob("ImageProcessing");
    assertNotNull("ImageProcessing must not be null", job);
    JobExecution jobExecution = jobLauncher.run(job, jobParameters);
    assertEquals(
        "The job should complete successfully",
        jobExecution.getExitStatus().getExitCode(),
        "COMPLETED");

    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
      logger.info(
          stepExecution.getStepName()
              + " "
              + stepExecution.getReadCount()
              + " "
              + stepExecution.getFilterCount()
              + " "
              + stepExecution.getWriteCount());
    }
  }
  public JobExecution launch(String jobName, JobParameters jobParameters)
      throws NoSuchJobException, JobExecutionAlreadyRunningException, JobRestartException,
          JobInstanceAlreadyCompleteException, JobParametersInvalidException {

    Job job = jobLocator.getJob(jobName);

    JobExecution lastJobExecution = jobRepository.getLastJobExecution(jobName, jobParameters);
    boolean restart = false;
    if (lastJobExecution != null) {
      BatchStatus status = lastJobExecution.getStatus();
      if (status.isUnsuccessful() && status != BatchStatus.ABANDONED) {
        restart = true;
      }
    }

    if (job.getJobParametersIncrementer() != null && !restart) {
      jobParameters = job.getJobParametersIncrementer().getNext(jobParameters);
    }

    JobExecution jobExecution = jobLauncher.run(job, jobParameters);

    if (jobExecution.isRunning()) {
      activeExecutions.add(jobExecution);
    }
    return jobExecution;
  }
 @Test
 public void testTimesheetReportJob() throws Exception {
   jobLauncher.run(timesheetJob, new JobParameters());
   // PDF generated to resource file path specified in jobs-context.xml
   System.out.println("PDF REPORT Generated in " + System.getProperty("java.io.tmpdir"));
   System.out.println("See src/test/resources/jobs-context.xml to change PDF output directory");
 }
 @RequestMapping("/job2")
 @ResponseBody
 String requestJob2()
     throws JobExecutionAlreadyRunningException, JobRestartException,
         JobInstanceAlreadyCompleteException, JobParametersInvalidException {
   jobLauncher.run(job2, createInitialJobParameterMap());
   return "Job2!";
 }
  /** @param args */
  public static void main(String[] args) {
    String[] springConfig = {"config/Job1/context.xml", "config/Job1/job1.xml"};

    ApplicationContext context = new ClassPathXmlApplicationContext(springConfig);

    JobLauncher jobLauncher = (JobLauncher) context.getBean("jobLauncher");
    Job job = (Job) context.getBean("reportJob");

    try {

      JobExecution execution = jobLauncher.run(job, new JobParameters());
      System.out.println("Exit Status : " + execution.getStatus());

    } catch (Exception e) {
      e.printStackTrace();
    }

    System.out.println("Done");
  }
 @Test
 public void testSunnyDayFaultTolerant() throws Exception {
   JobExecution jobExecution =
       jobLauncher.run(
           job, new JobParameters(Collections.singletonMap("item.three", new JobParameter("3"))));
   assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus());
   StepExecution stepExecution = jobExecution.getStepExecutions().iterator().next();
   assertEquals(9, stepExecution.getReadCount());
   assertEquals(9, stepExecution.getWriteCount());
 }
  @RequestMapping(value = "/sqs", method = RequestMethod.POST)
  @ResponseBody
  public ResponseEntity<Void> sqsMessageHandler(
      @RequestHeader(value = "User-Agent", required = false) String sqsdMessageUserAgent,
      @RequestHeader(value = "X-Aws-Sqsd-Msgid", required = false) String sqsdMessageId,
      @RequestHeader(value = "X-Aws-Sqsd-Queue", required = false) String sqsdMessageQueueName,
      @RequestHeader(value = "X-Aws-Sqsd-First-Received-At", required = false)
          String sqsdMessageReceivedTimestamp,
      @RequestHeader(value = "X-Aws-Sqsd-Receive-Count", required = false) int sqsdMessageCounts,
      @RequestHeader(value = "Content-Type", required = false) String sqsdMessageContentType,
      @RequestHeader(value = "X-Aws-Sqsd-Taskname", required = false)
          String sqsdMessagePeriodicTaskName,
      @RequestHeader(value = "X-Aws-Sqsd-Attr-(message-attribute-name)", required = false)
          String sqsdMessageCustomAttribute1,
      @RequestHeader(value = "X-Aws-Sqsd-Scheduled-At", required = false)
          String sqsdMessageTaskSchdeuleTime,
      @RequestHeader(value = "X-Aws-Sqsd-Sender-Id", required = false) String sqsdMessageSenderId,
      @RequestBody String sqsdMessageBody) {

    try {

      File localFile = retrieveS3File(sqsdMessageBody);

      if (localFile != null) {
        System.out.println("File downloaded: " + localFile.getAbsolutePath());

        // verify it
        File verifiedFile = verify(localFile);

        if (verifiedFile != null) {
          // extract it
          File extractedFile = extract(verifiedFile);

          if (extractedFile != null) {
            // process it by launch Spring Batch job. It is a async job and will return immediately.
            JobParametersBuilder jobParametersBuilder = new JobParametersBuilder();
            jobParametersBuilder.addString("INPUT_FILE_PATH", extractedFile.getAbsolutePath());
            jobParametersBuilder.addLong("TIMESTAMP", new Date().getTime());

            jobLauncher.run(job3, jobParametersBuilder.toJobParameters());
          }
        }
      }

      return new ResponseEntity<Void>(HttpStatus.OK);
    } catch (Exception ex) {
      String errorMessage;
      errorMessage = ex + " <== error";
      System.out.println("XXXXXXXXX");
      System.out.println(errorMessage);
      System.out.println("XXXXXXXXX");

      return new ResponseEntity<Void>(HttpStatus.INTERNAL_SERVER_ERROR);
    }
  }
Exemplo n.º 12
0
 @Override
 protected void executeInternal(JobExecutionContext context) {
   Map<String, Object> jobDataMap = context.getMergedJobDataMap();
   String jobName = (String) jobDataMap.get(JOB_NAME);
   log.info("Quartz trigger firing with Spring Batch jobName=" + jobName);
   JobParameters jobParameters = getJobParametersFromJobMap(jobDataMap);
   try {
     jobLauncher.run(jobLocator.getJob(jobName), jobParameters);
   } catch (JobExecutionException e) {
     log.error("Could not execute job.", e);
   }
 }
  @Test
  public void changingStateWithAdapter() throws Exception {
    JobExecution exec =
        jobLauncher.run(
            jobWithAdapter,
            new JobParametersBuilder()
                .addString("inputFile", "/partner-products.txt")
                .toJobParameters());

    assertThat(exec.getStatus()).isEqualTo(BatchStatus.COMPLETED);
    assertThat(productRepository.count()).isEqualTo(8);
  }
  @RequestMapping("/job3/{input_file_name}")
  @ResponseBody
  String requestJob3(@PathVariable("input_file_name") String inputFileName)
      throws JobExecutionAlreadyRunningException, JobRestartException,
          JobInstanceAlreadyCompleteException, JobParametersInvalidException {
    JobParametersBuilder jobParametersBuilder = new JobParametersBuilder();
    jobParametersBuilder.addString("INPUT_FILE_PATH", inputFileName);
    jobParametersBuilder.addLong("TIMESTAMP", new Date().getTime());

    jobLauncher.run(job3, jobParametersBuilder.toJobParameters());
    return "Job3!";
  }
 @Test
 public void testFailedStepOnError() throws Exception {
   JobExecution jobExecution =
       jobLauncher.run(
           job,
           new JobParameters(Collections.singletonMap("item.three", new JobParameter("error"))));
   assertEquals(BatchStatus.FAILED, jobExecution.getStatus());
   StepExecution stepExecution = jobExecution.getStepExecutions().iterator().next();
   assertEquals(9, stepExecution.getReadCount());
   // In principle the write count could be more than 2 and less than 9...
   assertEquals(7, stepExecution.getWriteCount());
 }
 @Test
 public void atomicProcessingFileNOk() throws Exception {
   JobExecution execution =
       jobLauncher.run(
           atomicProcessingJob,
           new JobParametersBuilder()
               .addString("inputFile", "classpath:/contacts-nok.txt")
               .toJobParameters());
   assertEquals(ExitStatus.FAILED, execution.getExitStatus());
   assertEquals(
       0, jdbcTemplate.queryForObject("select count(1) from contact", Integer.class).intValue());
 }
Exemplo n.º 17
0
  /** @param args the command line arguments */
  public static void main(String[] args) {

    long start = System.currentTimeMillis();
    log.info("---- Inicio proceso " + Commons.VERSION + " ----");
    try {
      ClassPathXmlApplicationContext ctx =
          new ClassPathXmlApplicationContext(
              "classpath:META-INF/applicationContext.xml",
              "classpath:META-INF/applicationDataSources.xml");
      ctx.start();
      try {

        LogFactory.getFactory().getInstance(Inicio.class).info(Commons.VERSION);

      } catch (Exception e) {
        e.printStackTrace();
      }
      JobLauncher jobLauncher = (JobLauncher) ctx.getBean("jobLauncher");

      Job job = (Job) ctx.getBean("cargaTemplate");

      JobParametersBuilder builder = new JobParametersBuilder();
      JobExecution jobExecution = jobLauncher.run(job, builder.toJobParameters());

    } catch (Exception e) {
      log.error("---- Error proceso " + Commons.VERSION + " ----\n" + e.toString());
      Commons.setLevelError(2);
    }

    log.info("---- Fin proceso " + Commons.VERSION + " ----");
    long end = System.currentTimeMillis();
    log.info("- Ejecutado en " + Commons.getTimeDifference(start, end));

    if (Commons.levelError == 0) {
      log.info("---- ULTIMALINEAPROCESO CargadorTemplate BIEN");
    } else {
      log.info("---- ULTIMALINEAPROCESO CargadorTemplate MAL");
    }
    System.exit(Commons.levelError);
  }
Exemplo n.º 18
0
 @Bean
 CommandLineRunner jobRunner(
     JobLauncher launcher, Job job, @Value("${file:data.csv}") Resource resource) {
   return args -> {
     JobExecution jobExecution =
         launcher.run(
             job,
             new JobParametersBuilder()
                 .addString("file", resource.getFile().getAbsolutePath())
                 .toJobParameters());
     System.out.println("jobExecution: " + jobExecution.toString());
   };
 }
  @Override
  protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
    Map<String, Object> jobDataMap = context.getMergedJobDataMap();
    String jobName = (String) jobDataMap.get(JOB_NAME);
    try {
      Job job = jobLocator.getJob(jobName);
      JobParameters allParams = translateParams(job, jobParameters);

      jobLauncher.run(job, allParams);
    } catch (Exception e) {
      logger.error("Could not execute job.", e);
    }
  }
Exemplo n.º 20
0
 public static void launch() {
   File input = retrieveFileToProcess();
   if (input != null) {
     String[] springConfig = {"spring/batch/jobs/jobs.xml"};
     ApplicationContext context = new ClassPathXmlApplicationContext(springConfig);
     JobLauncher jobLauncher = (JobLauncher) context.getBean("jobLauncher");
     Job job = (Job) context.getBean("integratePain008File");
     try {
       JobExecution execution =
           jobLauncher.run(
               job,
               new JobParametersBuilder()
                   .addString("inputFile", input.getName())
                   .toJobParameters());
       System.out.println("Exit Status : " + execution.getStatus());
     } catch (Exception e) {
       e.printStackTrace();
     }
   } else {
     System.out.println(
         "[" + Calendar.getInstance().getTime().toString() + "] No file to process");
   }
 }
  @Test
  public void shouldTaskletPrintSomething() {

    try {
      Map<String, JobParameter> params = Maps.newHashMap();
      params.put("test", new JobParameter("przodownik"));
      params.put("name", new JobParameter("borowiec"));
      params.put("time", new JobParameter(new Date()));
      JobExecution execution = jobLauncher.run(job, new JobParameters(params));
      log.info("Exit Status :  {}", execution.getStatus());
      assertEquals(ExitStatus.COMPLETED, execution.getExitStatus());
    } catch (Exception e) {
      e.printStackTrace();
    }
  }
  public JobExecution restart(Long jobExecutionId)
      throws NoSuchJobExecutionException, JobExecutionAlreadyRunningException, JobRestartException,
          JobInstanceAlreadyCompleteException, NoSuchJobException, JobParametersInvalidException {

    JobExecution target = getJobExecution(jobExecutionId);
    JobInstance lastInstance = target.getJobInstance();

    Job job = jobLocator.getJob(lastInstance.getJobName());

    JobExecution jobExecution = jobLauncher.run(job, lastInstance.getJobParameters());

    if (jobExecution.isRunning()) {
      activeExecutions.add(jobExecution);
    }
    return jobExecution;
  }
 @Test
 public void testSkipsInWriter() throws Exception {
   JobExecution jobExecution =
       jobLauncher.run(
           job,
           new JobParametersBuilder()
               .addString("item.three", "fail")
               .addLong("run.id", 1L)
               .toJobParameters());
   assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus());
   StepExecution stepExecution = jobExecution.getStepExecutions().iterator().next();
   assertEquals(9, stepExecution.getReadCount());
   assertEquals(7, stepExecution.getWriteCount());
   // The whole chunk gets skipped...
   assertEquals(2, stepExecution.getWriteSkipCount());
 }
Exemplo n.º 24
0
  public void run() {

    try {

      String dateParam = new Date().toString();
      JobParameters param =
          new JobParametersBuilder().addString("date", dateParam).toJobParameters();

      System.out.println(dateParam);

      JobExecution execution = jobLauncher.run(job, param);
      System.out.println("Exit Status : " + execution.getStatus());

    } catch (Exception e) {
      e.printStackTrace();
    }
  }
 /**
  * Runs the allRetrievalsJob {@link Job} every day of every month at 4 AM. Only if there's a new
  * patch will the job actually run.
  */
 @Scheduled(cron = "0 0 4 * * ?")
 public void runAllRetrievalsJob() {
   try {
     jobLauncher.run(
         allRetrievalsJob,
         new JobParametersBuilder()
             .addString(
                 "latestRiotPatch",
                 versionsRetrieval
                     .latestVersion(versionsRetrieval.versionsFromResponse())
                     .getPatch())
             .toJobParameters());
   } catch (JobInstanceAlreadyCompleteException e) {
     log.warn("Job instance was already completed with the latest Riot patch", e);
   } catch (Exception e) {
     log.error("Caught exception while running all retrievals job", e);
   }
 }
  @Test
  public void shouldProcessAllIso8583Transactions() throws Exception {

    assertNotNull(
        jobLauncher.run(
            job,
            new JobParametersBuilder()
                .addString("run.id", "offline-transaction-processing-integration.test")
                .toJobParameters()));

    long repoSize = iso8583TransactionRepository.size();

    // this test will always start with an empty map, hence the first ID is 0
    for (long id = 0; id < repoSize; id++) {
      Iso8583Transaction tx = iso8583TransactionRepository.findById(id);

      assertEquals(
          "transaction was not completed: [" + tx + "]",
          TransactionStatus.COMPLETED,
          ((AbstractIso8583Transaction) tx).getStatus());
    }
  }
Exemplo n.º 27
0
  /**
   * Starts a new job
   *
   * @param params
   * @return
   */
  public String submitJob(JobParametersBuilder params) {
    if (maxJobs - getActiveExecutionsCount() > 0) {
      try {
        String id = UUID.randomUUID().toString();

        params.addString("id", id);
        JobExecution ex = jobLauncher.run(job, params.toJobParameters());
        executions.put(id, ex);

        return id;
      } catch (JobExecutionAlreadyRunningException e) {
        e.printStackTrace();
      } catch (JobRestartException e) {
        e.printStackTrace();
      } catch (JobInstanceAlreadyCompleteException e) {
        e.printStackTrace();
      } catch (JobParametersInvalidException e) {
        e.printStackTrace();
      }
    }
    return "Server bussy, please try agan later";
  }
 @Log
 public void launch() throws Exception {
   jobLauncher.run(
       job,
       new JobParametersBuilder().addLong("s_time", System.currentTimeMillis()).toJobParameters());
 }
Exemplo n.º 29
0
  /*
   * Start a job by obtaining a combined classpath using the job launcher and
   * job paths. If a JobLocator has been set, then use it to obtain an actual
   * job, if not ask the context for it.
   */
  public int start(String moduleNm, String jobIdentifier, String[] parameters, Set<String> opts)
      throws Exception {

    INaviModuleContext context = null;

    try {
      context = NaviModuleContextFactory.getInstance().getNaviModuleContext(moduleNm);
      launcher = (JobLauncher) context.getBean("jobLauncher");
      jobExplorer = (JobExplorer) context.getBean("jobExplorer");
      jobRepository = (JobRepository) context.getBean("jobRepository");

      Assert.state(
          launcher != null,
          "A JobLauncher must be provided.  Please add one to the configuration.");
      if (opts.contains("-restart") || opts.contains("-next")) {
        Assert.state(
            jobExplorer != null,
            "A JobExplorer must be provided for a restart or start next operation.  Please add one to the configuration.");
      }

      String jobName = moduleNm + "_" + jobIdentifier;

      JobParameters jobParameters =
          jobParametersConverter.getJobParameters(
              StringUtils.splitArrayElementsIntoProperties(parameters, "="));
      Assert.isTrue(
          parameters == null || parameters.length == 0 || !jobParameters.isEmpty(),
          "Invalid JobParameters "
              + Arrays.asList(parameters)
              + ". If parameters are provided they should be in the form name=value (no whitespace).");

      if (opts.contains("-stop")) {
        List<JobExecution> jobExecutions = getRunningJobExecutions(jobName);
        if (jobExecutions == null) {
          throw new JobExecutionNotRunningException(
              "No running execution found for job=" + jobName);
        }
        for (JobExecution jobExecution : jobExecutions) {
          jobExecution.setStatus(BatchStatus.STOPPING);
          jobRepository.update(jobExecution);
        }
        return exitCodeMapper.intValue(ExitStatus.COMPLETED.getExitCode());
      }

      if (opts.contains("-abandon")) {
        List<JobExecution> jobExecutions = getStoppedJobExecutions(jobName);
        if (jobExecutions == null) {
          throw new JobExecutionNotStoppedException(
              "No stopped execution found for job=" + jobName);
        }
        for (JobExecution jobExecution : jobExecutions) {
          jobExecution.setStatus(BatchStatus.ABANDONED);
          jobRepository.update(jobExecution);
        }
        return exitCodeMapper.intValue(ExitStatus.COMPLETED.getExitCode());
      }

      if (opts.contains("-restart")) {
        JobExecution jobExecution = getLastFailedJobExecution(jobName);
        if (jobExecution == null) {
          throw new JobExecutionNotFailedException(
              "No failed or stopped execution found for job=" + jobName);
        }
        jobParameters = jobExecution.getJobInstance().getJobParameters();
        jobName = jobExecution.getJobInstance().getJobName();
      }

      Job job;
      if (jobLocator != null) {
        job = jobLocator.getJob(jobIdentifier);
      } else {
        job = (Job) context.getBean(jobIdentifier);
        AbstractJob tmptJob = (AbstractJob) job;
        // 重写jobNm
        tmptJob.setName(jobName);
      }

      if (opts.contains("-next")) {
        JobParameters nextParameters = getNextJobParameters(job);
        Map<String, JobParameter> map =
            new HashMap<String, JobParameter>(nextParameters.getParameters());
        map.putAll(jobParameters.getParameters());
        jobParameters = new JobParameters(map);
      }

      JobExecution jobExecution = launcher.run(job, jobParameters);
      return exitCodeMapper.intValue(jobExecution.getExitStatus().getExitCode());

    } catch (Throwable e) {
      String message = "Job Terminated in error: " + e.getMessage();
      log.error(message, e);
      NaviDaemonJobRunner.message = message;
      return exitCodeMapper.intValue(ExitStatus.FAILED.getExitCode());
    } finally {
      if (context != null) {
        context.close();
      }
    }
  }
  @Test
  public void testLaunchJobWithJobLauncher() throws Exception {

    JobExecution jobExecution = jobLauncher.run(job, jobParameters);
    assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus());
  }