public void open(ExecutionContext executionContext) throws ItemStreamException { if (executionContext.containsKey("step2.tickers.page")) { curPage = (Integer) executionContext.get("step2.tickers.page"); } else { executionContext.put("step2.tickers.page", curPage); } }
@Override public RepeatStatus execute(final StepContribution sc, final ChunkContext context) throws Exception { log.info("First simple task ..... execute !!! "); log.info("+++ StepContribution : {} ", sc); log.info( "+++ ChunkContext : {} -> jobName : {} ", context, context.getStepContext().getJobName()); log.info( "+++ StepContext : jobParameters : {} , stepExecution : {} , stepName : {} ", context.getStepContext().getJobParameters(), context.getStepContext().getStepExecution(), context.getStepContext().getStepName()); ExecutionContext jobExecutionContext = context.getStepContext().getStepExecution().getJobExecution().getExecutionContext(); JobParameters jobParams = context.getStepContext().getStepExecution().getJobExecution().getJobParameters(); log.info("time : {}", jobParams.getDate("time")); log.info("test : {}", jobParams.getString("test")); log.info("message : {}", message); jobExecutionContext.put("x", "y"); // promote // promote ExecutionContext stepExecutionContext = context.getStepContext().getStepExecution().getJobExecution().getExecutionContext(); stepExecutionContext.put("login", "przodownikR1"); Thread.sleep(4000); return FINISHED; }
@Override public Map<String, ExecutionContext> partition(int gridSize) { int min = jdbcTemplate.queryForObject("SELECT MIN(" + column + ") from " + table, Integer.class); int max = jdbcTemplate.queryForObject("SELECT MAX(" + column + ") from " + table, Integer.class); int targetSize = (max - min) / gridSize; Map<String, ExecutionContext> result = new HashMap<String, ExecutionContext>(); int number = 0; int start = min; int end = start + targetSize - 1; while (start <= max) { ExecutionContext value = new ExecutionContext(); result.put("partition" + number, value); if (end >= max) { end = max; } value.putInt("minValue", start); value.putInt("maxValue", end); start += targetSize; end += targetSize; number++; } return result; }
@Override public void open(ExecutionContext executionContext) throws ItemStreamException { super.open(executionContext); try { doOpen(); } catch (Exception e) { throw new ItemStreamException("Failed to initialize the reader", e); } if (!isSaveState()) { return; } if (executionContext.containsKey(getExecutionContextKey(READ_COUNT_MAX))) { maxItemCount = executionContext.getInt(getExecutionContextKey(READ_COUNT_MAX)); } if (executionContext.containsKey(getExecutionContextKey(READ_COUNT))) { int itemCount = executionContext.getInt(getExecutionContextKey(READ_COUNT)); if (itemCount < maxItemCount) { try { jumpToItem(itemCount); } catch (Exception e) { throw new ItemStreamException("Could not move to stored position on restart", e); } } currentItemCount = itemCount; } }
public void open(ExecutionContext executionContext) throws ItemStreamException { if (executionContext.containsKey(EXPECTED)) { localState.expected = executionContext.getLong(EXPECTED); localState.actual = executionContext.getLong(ACTUAL); if (!waitForResults()) { throw new ItemStreamException("Timed out waiting for back log on open"); } } }
@Override public A23 process(A23 item) throws Exception { ExecutionContext stepContext = stepExecution.getExecutionContext(); long processCount = stepContext.getLong("PROCESS_COUNT", 0); processCount++; stepContext.putLong("PROCESS_COUNT", processCount); if (processCount % writeSampleFrequency == 1) { item.setSample(true); } return item; }
@Override public void update(ExecutionContext executionContext) throws ItemStreamException { super.update(executionContext); if (saveState) { Assert.notNull(executionContext, "ExecutionContext must not be null"); executionContext.putInt(getExecutionContextKey(READ_COUNT), currentItemCount); if (maxItemCount < Integer.MAX_VALUE) { executionContext.putInt(getExecutionContextKey(READ_COUNT_MAX), maxItemCount); } } }
@Test @SuppressWarnings({"unchecked", "rawtypes"}) public void testWrite() throws Exception { File file = new File(tmpDir, "foo.txt"); file.delete(); ByteArrayInputStream data = new ByteArrayInputStream("foobarbaz".getBytes()); Session session = mock(Session.class); SessionFactory factory = mock(SessionFactory.class); when(factory.getSession()).thenReturn(session); when(session.readRaw("foo.txt")).thenReturn(data); when(session.finalizeRaw()).thenReturn(true); StepExecution stepExecution = new StepExecution("foo", null); ExecutionContext stepExecutionContext = new ExecutionContext(); stepExecutionContext.putString("filePath", "foo.txt"); stepExecution.setExecutionContext(stepExecutionContext); StepContext stepContext = new StepContext(stepExecution); ChunkContext chunkContext = new ChunkContext(stepContext); RemoteFileTemplate template = new RemoteFileTemplate(factory); template.setBeanFactory(mock(BeanFactory.class)); template.afterPropertiesSet(); // clean up from old tests FileSystem fs = FileSystem.get(configuration); Path p = new Path("/qux/foo.txt"); fs.delete(p, true); assertFalse(fs.exists(p)); RemoteFileToHadoopTasklet tasklet = new RemoteFileToHadoopTasklet(template, configuration, "/qux"); assertEquals(RepeatStatus.FINISHED, tasklet.execute(null, chunkContext)); assertTrue(fs.exists(p)); FSDataInputStream stream = fs.open(p); byte[] out = new byte[9]; stream.readFully(out); stream.close(); assertEquals("foobarbaz", new String(out)); fs.close(); }
@Override public Map<String, ExecutionContext> partition(int gridSize) { Map<String, ExecutionContext> partitions = new HashMap<String, ExecutionContext>(gridSize); long count = repository.count(); long recordCount = count / gridSize; // do the split for (int i = 0; i < gridSize; i++) { ExecutionContext context = new ExecutionContext(); context.putLong("paramB", count); count = count - recordCount; if (count != 1) { context.putLong("paramA", count + 1); } else { context.putLong("paramA", count); } // end if partitions.put(PREFIX + i, context); } // end for // return return partitions; }
@Test public void testExecuteRestart() throws Exception { DefaultJobParametersExtractor jobParametersExtractor = new DefaultJobParametersExtractor(); jobParametersExtractor.setKeys(new String[] {"foo"}); ExecutionContext executionContext = stepExecution.getExecutionContext(); executionContext.put("foo", "bar"); step.setJobParametersExtractor(jobParametersExtractor); step.setJob( new JobSupport("child") { @Override public void execute(JobExecution execution) throws UnexpectedJobExecutionException { assertEquals(1, execution.getJobParameters().getParameters().size()); execution.setStatus(BatchStatus.FAILED); execution.setEndTime(new Date()); jobRepository.update(execution); throw new RuntimeException("FOO"); } @Override public boolean isRestartable() { return true; } }); step.afterPropertiesSet(); step.execute(stepExecution); assertEquals("FOO", stepExecution.getFailureExceptions().get(0).getMessage()); JobExecution jobExecution = stepExecution.getJobExecution(); jobExecution.setEndTime(new Date()); jobRepository.update(jobExecution); jobExecution = jobRepository.createJobExecution("job", new JobParameters()); stepExecution = jobExecution.createStepExecution("step"); // In a restart the surrounding Job would set up the context like this... stepExecution.setExecutionContext(executionContext); jobRepository.add(stepExecution); step.execute(stepExecution); assertEquals("FOO", stepExecution.getFailureExceptions().get(0).getMessage()); }
@Override protected ExitStatus doRead( StepContribution contribution, ExecutionContext context, ExcelRow item) { ExitStatus result = ExitStatus.EXECUTING; LocalDate thresholdDate = getMaxMonthsThresholdDate(item); if (thresholdDate != null) { // We store the threshold date in the context context.put( "threshold.date", Date.from(thresholdDate.atStartOfDay().atZone(ZoneId.systemDefault()).toInstant())); // We keep trace that we've stored something contribution.incrementWriteCount(1); result = COMPLETED_WITH_MAX_MONTHS; } return result; }
public static <T> T getFromJobExecutionContext( ExecutionContext context, String key, Class<T> klass) { Object valueString = context.get(key); if (valueString != null) { if (valueString instanceof String) { Gson gson = new Gson(); return gson.fromJson((String) valueString, klass); } else { logger.error( "invalid data type saved into execution context: " + valueString.getClass() + ", " + valueString); } } return null; }
@Override public void update(ExecutionContext executionContext) throws ItemStreamException { executionContext.putInt(getKey("COUNT"), counter.get()); }
@Override public void open(ExecutionContext executionContext) throws ItemStreamException { counter.set(executionContext.getInt(getKey("COUNT"), 0)); }
@Override public void update(final ExecutionContext executionContext) throws ItemStreamException { executionContext.putString(LAST_ID_RETURNED_EXECUTION_CONTEXT_KEY, lastIdReturned); }
@Override public void open(final ExecutionContext executionContext) throws ItemStreamException { lastIdBuffer = executionContext.getString(LAST_ID_RETURNED_EXECUTION_CONTEXT_KEY, null); lastIdReturned = lastIdBuffer; }
public void update(ExecutionContext executionContext) throws ItemStreamException { executionContext.putLong(EXPECTED, localState.expected); executionContext.putLong(ACTUAL, localState.actual); }
public void update(ExecutionContext executionContext) throws ItemStreamException { executionContext.put("step2.tickers.page", curPage); curPage++; }
public static void putIntoJobExecutionContext( ExecutionContext context, String key, Object value) { Gson gson = new Gson(); String valueString = gson.toJson(value); context.put(key, valueString); }
public void update(ExecutionContext executionContext) throws ItemStreamException { executionContext.putLong("POSITION", index); }
public void open(ExecutionContext executionContext) throws ItemStreamException { index = (int) executionContext.getLong("POSITION", min); }