List of usage examples for org.springframework.batch.item ExecutionContext ExecutionContext
public ExecutionContext()
From source file:org.springframework.batch.core.partition.support.FlatFilePartitioner.java
/** * Creates a standard {@link ExecutionContext} with the specified parameters. * @param partitionName the name of the partition * @param startAt the number of bytes for a partition thread to skip before starting reading * @param itemsCount the number of items to read * @return the execution context (output) *//* w w w. j a v a 2 s .com*/ protected ExecutionContext createExecutionContext(String partitionName, long startAt, long itemsCount, long previousItemsCount) { final ExecutionContext executionContext = new ExecutionContext(); executionContext.putLong(startAtKeyName, startAt); executionContext.putLong(itemsCountKeyName, itemsCount); executionContext.putLong(previousItemsCountKeyName, previousItemsCount); try { executionContext.putString(resourceKeyName, "file:" + resource.getFile().getPath()); } catch (IOException e) { throw new IllegalArgumentException("File could not be located for: " + resource, e); } if (logger.isDebugEnabled()) { logger.debug("Added partition [" + partitionName + "] with [" + executionContext + "]"); } return executionContext; }
From source file:org.springframework.batch.core.repository.support.SimpleJobRepository.java
@Override public JobExecution createJobExecution(String jobName, JobParameters jobParameters) throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException { Assert.notNull(jobName, "Job name must not be null."); Assert.notNull(jobParameters, "JobParameters must not be null."); /*/*from w w w. j a v a 2 s . co m*/ * Find all jobs matching the runtime information. * * If this method is transactional, and the isolation level is * REPEATABLE_READ or better, another launcher trying to start the same * job in another thread or process will block until this transaction * has finished. */ JobInstance jobInstance = jobInstanceDao.getJobInstance(jobName, jobParameters); ExecutionContext executionContext; // existing job instance found if (jobInstance != null) { List<JobExecution> executions = jobExecutionDao.findJobExecutions(jobInstance); // check for running executions and find the last started for (JobExecution execution : executions) { if (execution.isRunning() || execution.isStopping()) { throw new JobExecutionAlreadyRunningException( "A job execution for this job is already running: " + jobInstance); } BatchStatus status = execution.getStatus(); if (status == BatchStatus.UNKNOWN) { throw new JobRestartException("Cannot restart job from UNKNOWN status. " + "The last execution ended with a failure that could not be rolled back, " + "so it may be dangerous to proceed. Manual intervention is probably necessary."); } if (execution.getJobParameters().getParameters().size() > 0 && (status == BatchStatus.COMPLETED || status == BatchStatus.ABANDONED)) { throw new JobInstanceAlreadyCompleteException( "A job instance already exists and is complete for parameters=" + jobParameters + ". If you want to run this job again, change the parameters."); } } executionContext = ecDao.getExecutionContext(jobExecutionDao.getLastJobExecution(jobInstance)); } else { // no job found, create one jobInstance = jobInstanceDao.createJobInstance(jobName, jobParameters); executionContext = new ExecutionContext(); } JobExecution jobExecution = new JobExecution(jobInstance, jobParameters, null); jobExecution.setExecutionContext(executionContext); jobExecution.setLastUpdated(new Date(System.currentTimeMillis())); // Save the JobExecution so that it picks up an ID (useful for clients // monitoring asynchronous executions): jobExecutionDao.saveJobExecution(jobExecution); ecDao.saveExecutionContext(jobExecution); return jobExecution; }
From source file:org.springframework.batch.core.repository.support.SimpleJobRepository.java
@Override public JobExecution createJobExecution(JobInstance jobInstance, JobParameters jobParameters, String jobConfigurationLocation) { Assert.notNull(jobInstance, "A JobInstance is required to associate the JobExecution with"); Assert.notNull(jobParameters, "A JobParameters object is required to create a JobExecution"); JobExecution jobExecution = new JobExecution(jobInstance, jobParameters, jobConfigurationLocation); ExecutionContext executionContext = new ExecutionContext(); jobExecution.setExecutionContext(executionContext); jobExecution.setLastUpdated(new Date(System.currentTimeMillis())); // Save the JobExecution so that it picks up an ID (useful for clients // monitoring asynchronous executions): jobExecutionDao.saveJobExecution(jobExecution); ecDao.saveExecutionContext(jobExecution); return jobExecution; }
From source file:org.springframework.batch.core.scope.StepScopePerformanceTests.java
private int doTest(String name, String test) throws Exception { @SuppressWarnings("unchecked") ItemStreamReader<String> reader = (ItemStreamReader<String>) applicationContext.getBean(name); reader.open(new ExecutionContext()); StopWatch stopWatch = new StopWatch(test); stopWatch.start();//from w w w.j a v a2s . c o m int count = 0; while (reader.read() != null) { // do nothing count++; } stopWatch.stop(); reader.close(); logger.info(stopWatch.shortSummary()); return count; }
From source file:org.springframework.batch.integration.x.IncrementalColumnRangePartitioner.java
/** * Partition a database table assuming that the data in the column specified * are uniformly distributed. The execution context values will have keys * <code>minValue</code> and <code>maxValue</code> specifying the range of * values to consider in each partition. * * @see Partitioner#partition(int)//from w w w. j a v a2s . com */ @Override public Map<String, ExecutionContext> partition(int gridSize) { StringBuilder incrementalClause = new StringBuilder(); Map<String, ExecutionContext> result = new HashMap<>(); if (!StringUtils.hasText(checkColumn) && !StringUtils.hasText(column)) { ExecutionContext value = new ExecutionContext(); value.put("partClause", ""); result.put("partition0", value); value.put("partSuffix", ""); } else { if (StringUtils.hasText(checkColumn)) { incrementalClause.append(checkColumn).append(" > ").append(this.incrementalMin); } long targetSize = (this.partitionMax - this.partitionMin) / partitions + 1; int number = 0; long start = this.partitionMin; long end = start + targetSize - 1; while (start >= 0 && start <= this.partitionMax) { ExecutionContext value = new ExecutionContext(); result.put("partition" + number, value); if (end >= this.partitionMax) { end = this.partitionMax; } if (StringUtils.hasText(checkColumn)) { value.putString("partClause", String.format("WHERE (%s BETWEEN %s AND %s) AND %s", column, start, end, incrementalClause.toString())); } else { value.putString("partClause", String.format("WHERE (%s BETWEEN %s AND %s)", column, start, end)); } value.putString("partSuffix", "-p" + number); start += targetSize; end += targetSize; number++; log.debug("Current ExecutionContext = " + value); } } return result; }
From source file:org.springframework.batch.item.database.JdbcPagingRestartIntegrationTests.java
@Test @Ignore //FIXME// w w w . ja v a 2 s .com public void testReaderFromStart() throws Exception { ItemReader<Foo> reader = getItemReader(); int total = JdbcTestUtils.countRowsInTable(jdbcTemplate, "T_FOOS"); ExecutionContext executionContext = new ExecutionContext(); ((ItemStream) reader).open(executionContext); for (int i = 0; i < total; i++) { Foo item = reader.read(); logger.debug("Item: " + item); assertNotNull(item); } Foo item = reader.read(); logger.debug("Item: " + item); assertNull(item); }
From source file:org.springframework.batch.item.database.JdbcPagingRestartIntegrationTests.java
@Test @Ignore //FIXME/* ww w .j a v a2 s .c om*/ public void testReaderOnRestart() throws Exception { ItemReader<Foo> reader = getItemReader(); int total = JdbcTestUtils.countRowsInTable(jdbcTemplate, "T_FOOS"); int count = (total / pageSize) * pageSize; int pagesToRead = Math.min(3, total / pageSize); if (count >= pagesToRead * pageSize) { count -= pagesToRead * pageSize; } ExecutionContext executionContext = new ExecutionContext(); executionContext.putInt("JdbcPagingItemReader.read.count", count); // Assume the primary keys are in order List<Map<String, Object>> ids = jdbcTemplate.queryForList("SELECT ID,NAME FROM T_FOOS ORDER BY ID ASC"); logger.debug("Ids: " + ids); int startAfterValue = (new Long(ids.get(count - 1).get("ID").toString())).intValue(); logger.debug("Start after: " + startAfterValue); Map<String, Object> startAfterValues = new LinkedHashMap<String, Object>(); startAfterValues.put("ID", startAfterValue); executionContext.put("JdbcPagingItemReader.start.after", startAfterValues); ((ItemStream) reader).open(executionContext); for (int i = count; i < total; i++) { Foo item = reader.read(); logger.debug("Item: " + item); assertNotNull(item); } Foo item = reader.read(); logger.debug("Item: " + item); assertNull(item); }
From source file:org.springframework.batch.item.database.JpaPagingItemReaderAsyncTests.java
private JpaPagingItemReader<Foo> getItemReader() throws Exception { String jpqlQuery = "select f from Foo f"; JpaPagingItemReader<Foo> reader = new JpaPagingItemReader<Foo>(); reader.setQueryString(jpqlQuery);/*from w w w . j av a 2 s .co m*/ reader.setEntityManagerFactory(entityManagerFactory); reader.setPageSize(PAGE_SIZE); reader.afterPropertiesSet(); reader.setSaveState(false); reader.open(new ExecutionContext()); return reader; }
From source file:org.springframework.batch.item.excel.AbstractExcelItemReaderTests.java
@Before public void setup() throws Exception { this.itemReader = createExcelItemReader(); this.itemReader.setLinesToSkip(1); //First line is column names this.itemReader.setResource(new ClassPathResource("org/springframework/batch/item/excel/player.xls")); this.itemReader.setRowMapper(new PassThroughRowMapper()); this.itemReader.setSkippedRowsCallback(new RowCallbackHandler() { public void handleRow(RowSet rs) { logger.info("Skipping: " + StringUtils.arrayToCommaDelimitedString(rs.getCurrentRow())); }//from w w w . j a va2s .com }); configureItemReader(this.itemReader); this.itemReader.afterPropertiesSet(); executionContext = new ExecutionContext(); this.itemReader.open(executionContext); }
From source file:org.springframework.batch.item.xml.AbstractStaxEventWriterItemWriterTests.java
@Before public void setUp() throws Exception { File directory = new File("target/data"); directory.mkdirs();//from www .j a v a 2 s .c om outputFile = File.createTempFile(ClassUtils.getShortName(this.getClass()), ".xml", directory); resource = new FileSystemResource(outputFile); writer.setResource(resource); writer.setMarshaller(getMarshaller()); writer.afterPropertiesSet(); writer.open(new ExecutionContext()); }