/** * Processes requests for both HTTP <code>GET</code> and <code>POST</code> methods. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType("text/html;charset=UTF-8"); try (PrintWriter out = response.getWriter()) { out.println("<html>"); out.println("<head>"); out.println("<title>Servlet TestServlet</title>"); out.println("</head>"); out.println("<body>"); out.println("<h1>Servlet TestServlet at " + request.getContextPath() + "</h1>"); out.println("About to start the job<br>"); JobOperator jo = BatchRuntime.getJobOperator(); out.println("Got the job operator: " + jo + "<br>"); try { jo.start("myJob", new Properties()); } catch (JobSecurityException ex) { Logger.getLogger(TestServlet.class.getName()).log(Level.SEVERE, null, ex); } out.println("Job submitted<br>"); out.println("<br><br>Check server.log for output"); out.println("</body>"); out.println("</html>"); } catch (JobStartException ex) { Logger.getLogger(TestServlet.class.getName()).log(Level.SEVERE, null, ex); } }
public static long runJob(String batchJobName) { JobOperator jo = BatchRuntime.getJobOperator(); Properties props = new Properties(); long id = jo.start(batchJobName, props); return id; }
private static void executeJob(String jobId) { long execID = 0; // ジョブの開始時はXML読み込みなど同時実行されるとおかしくなる処理があるので、ジョブID単位で同期化しておく。 JobOperator jobOperator = BatchRuntime.getJobOperator(); synchronized (jobId) { Properties props = new Properties(); props.setProperty( "date", LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd"))); execID = jobOperator.start(jobId, props); System.out.println("job stated id:" + execID); } JobExecution jobExec = null; // ジョブが終了するまでポーリング while (true) { jobExec = jobOperator.getJobExecution(execID); if (jobExec.getEndTime() != null) { break; } try { Thread.sleep(1000L); } catch (InterruptedException ex) { } } System.out.println("JOB END:Status is " + jobExec.getExitStatus()); }
public void abandonJob(JobExecution jobExecution) { JobOperator jo = BatchRuntime.getJobOperator(); jo.abandon(jobExecution.getInstanceId()); LOGGER.log(Level.INFO, "Abandoned job: {0}", jobExecution.getInstanceId()); jobExecution.setStatus(JobStatus.ABANDONED); em.merge(jobExecution); }
protected JobOperator operator() { if (baseUrl == null) { return BatchRuntime.getJobOperator(); } final ClientConfiguration configuration = new ClientConfiguration(); configuration.setBaseUrl(baseUrl); configuration.setJsonProvider(jsonProvider); if (hostnameVerifier != null || keystorePath != null || keyManagerPath != null) { final ClientSslConfiguration ssl = new ClientSslConfiguration(); configuration.setSsl(ssl); ssl.setHostnameVerifier(hostnameVerifier); ssl.setKeystorePassword(keystorePassword); ssl.setKeyManagerPath(keyManagerPath); ssl.setKeyManagerType(keyManagerType); ssl.setKeystorePath(keystorePath); ssl.setKeystoreType(keystoreType); ssl.setSslContextType(sslContextType); ssl.setTrustManagerAlgorithm(trustManagerAlgorithm); ssl.setTrustManagerProvider(trustManagerProvider); } final ClientSecurity security = new ClientSecurity(); configuration.setSecurity(security); security.setUsername(username); security.setPassword(password); security.setType(type); return BatchEEJAXRSClientFactory.newClient(configuration); }
public void open(Serializable e) throws Exception { Properties jobParameters = BatchRuntime.getJobOperator().getParameters(jobContext.getExecutionId()); ConcurrentSkipListMap<Integer, PayrollInputRecord> records = dataBean.getPayrollInputRecords((String) jobParameters.get("monthYear")); Integer fromKey = (Integer) jobParameters.get("startEmpID"); Integer toKey = (Integer) jobParameters.get("endEmpID"); payrollInputRecords = records.subMap(fromKey, true, toKey, false).values().iterator(); }
public void stopJob(JobExecution jobExecution) { try { JobOperator jo = BatchRuntime.getJobOperator(); jo.stop(jobExecution.getInstanceId()); LOGGER.log(Level.INFO, "Stopped job: {0}", jobExecution.getInstanceId()); jobExecution.setStatus(JobStatus.STOPPED); em.merge(jobExecution); } catch (JobExecutionNotRunningException ex) { LOGGER.log(Level.WARNING, ex.getMessage(), ex); } }
protected static JobOperator getJobOperatorFromBatchRuntime() { try { return BatchRuntime.getJobOperator(); } catch (java.util.ServiceConfigurationError error) { throw new IllegalStateException( "Could not get JobOperator. " + " Check if the Batch DataSource is configured properly and Check if the Database is up and running", error); } catch (Throwable ex) { throw new IllegalStateException("Could not get JobOperator. ", ex); } }
public void restartJob(JobExecution jobExecution) { try { JobOperator jo = BatchRuntime.getJobOperator(); jobExecution.setInstanceId( jo.restart(jobExecution.getInstanceId(), new java.util.Properties())); LOGGER.log(Level.INFO, "Restarted job: {0}", jobExecution.getInstanceId()); jobExecution.setStatus(JobStatus.STARTED); em.merge(jobExecution); } catch (NoSuchJobExecutionException | JobRestartException ex) { LOGGER.log(Level.WARNING, ex.getMessage(), ex); } }
public void startJob(JobExecution jobExecution) { try { JobOperator jo = BatchRuntime.getJobOperator(); long instanceId = jo.start(jobExecution.getJobScheduler().getName(), new java.util.Properties()); LOGGER.log(Level.INFO, "Started job: {0}", instanceId); jobExecution.setInstanceId(instanceId); jobExecution.setStatus(JobStatus.STARTED); em.merge(jobExecution); } catch (JobStartException ex) { LOGGER.log(Level.SEVERE, ex.getMessage(), ex); } }
@Test public void write() throws Exception { final String path = "target/work/StaxItemWriter.xml"; final Properties jobParams = new Properties(); jobParams.setProperty("output", path); final JobOperator jobOperator = BatchRuntime.getJobOperator(); Batches.waitForEnd(jobOperator, jobOperator.start("stax-writer", jobParams)); final String content = IOs.slurp(path); assertEquals( content.replace("<?xml version=\"1.0\" encoding=\"UTF-8\"?>", ""), "<root><foo><value>item 1</value></foo><foo><value>item 2</value></foo></root>"); }
@Schedule(hour = "*", minute = "0", second = "0") public void myJob() { executedBatchs.add(BatchRuntime.getJobOperator().start("myJob", new Properties())); afterRun(); }
@Override public void run() { JobOperator jobOperator = BatchRuntime.getJobOperator(); long id = jobOperator.start(AWSEC2_INSTANCE_JOB, new Properties()); logger.debug("Start the job: " + AWSEC2_INSTANCE_JOB + " with ID: " + id); }
/** * A test class that reads CSV resource from * http://mysafeinfo.com/api/data?list=topmoviesboxoffice2012&format=csv */ public class MovieTest { private static final String jobName = "org.jberet.support.io.MovieTest"; static final String moviesCsv = "movies-2012.csv"; final JobOperator jobOperator = BatchRuntime.getJobOperator(); static final String header = "rank,tit,grs,opn"; static final String cellProcessors = "ParseInt; NotNull, StrMinMax(1, 100); DMinMax(1000000, 1000000000); ParseDate(YYYY-MM-dd)"; // in xml output, ' in Marvel's will be escaped so we cannot match it verbatim static final String expectFull = "The Avengers," + "The Dark Knight Rises," + "Chimpanzee," + "The Five-Year Engagement"; static final String expect2_4 = "The Dark Knight Rises, " + "The Hunger Games," + "Skyfall"; static final String forbid2_4 = "The Avengers, " + "The Hobbit: An Unexpected Journey"; static final String expect1_2 = "The Avengers," + "The Dark Knight Rises"; static final String forbid1_2 = "Hunger Games"; private String partialNameMapping; // test partial reading (certain columns are not read by include null in nameMapping for these // columns). // for bean type reading only. @Test public void testBeanTypeNoDate2_4() throws Exception { this.partialNameMapping = "rank,tit,grs,null"; testReadWrite0( "testBeanTypeNoDate2_4.out", "2", "4", Movie.class, expect2_4, forbid2_4 + ", 2012"); this.partialNameMapping = null; } @Test public void testBeanType2_4() throws Exception { testReadWrite0("testBeanType2_4.out", "2", "4", Movie.class, expect2_4, forbid2_4); } @Test public void testListTypeFull() throws Exception { testReadWrite0("testListTypeFull.out", null, null, List.class, expectFull, null); } @Test public void testBeanTypeFull() throws Exception { testReadWrite0("testBeanTypeFull.out", null, null, Movie.class, expectFull, null); } @Test public void testMapTypeFull1_100() throws Exception { testReadWrite0("testMapTypeFull1_100.out", "1", "100", Map.class, expectFull, null); } @Test public void testMapType1_2() throws Exception { testReadWrite0("testMapType1_2.out", "1", "2", Map.class, expect1_2, forbid1_2); } private void testReadWrite0( final String writeResource, final String start, final String end, final Class<?> beanType, final String expect, final String forbid) throws Exception { final Properties params = CsvItemReaderWriterTest.createParams(CsvProperties.BEAN_TYPE_KEY, beanType.getName()); final File writeResourceFile = new File(CsvItemReaderWriterTest.tmpdir, writeResource); params.setProperty("writeResource", writeResourceFile.getPath()); params.setProperty(CsvProperties.CELL_PROCESSORS_KEY, cellProcessors); if (start != null) { params.setProperty(CsvProperties.START_KEY, start); } if (end != null) { params.setProperty(CsvProperties.END_KEY, end); } if (this.partialNameMapping != null) { params.setProperty(CsvProperties.NAME_MAPPING_KEY, partialNameMapping); } params.setProperty(CsvProperties.HEADER_KEY, header); CsvItemReaderWriterTest.setRandomWriteMode(params); final long jobExecutionId = jobOperator.start(jobName, params); final JobExecutionImpl jobExecution = (JobExecutionImpl) jobOperator.getJobExecution(jobExecutionId); jobExecution.awaitTermination(CsvItemReaderWriterTest.waitTimeoutMinutes, TimeUnit.MINUTES); Assert.assertEquals(BatchStatus.COMPLETED, jobExecution.getBatchStatus()); CsvItemReaderWriterTest.validate(writeResourceFile, expect, forbid); } }
@Test public void JobOperatorTest() { JobOperator jobOperator = BatchRuntime.getJobOperator(); assertThat(jobOperator, is(not(nullValue()))); }