List of usage examples for org.springframework.batch.core JobExecution getStepExecutions
public Collection<StepExecution> getStepExecutions()
From source file:de.codecentric.batch.test.metrics.BatchMetricsFlatFileToDbIntegrationTest.java
@Test public void testRunFlatFileToDbNoSkipJob_Success() throws InterruptedException { JobExecution jobExecution = runJob("flatFileToDbNoSkipJob", "metrics/flatFileToDbNoSkipJob_Success.csv"); assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED)); ExecutionContext executionContext = jobExecution.getStepExecutions().iterator().next() .getExecutionContext();//from w w w . j a v a 2 s.co m long writeCount = 5L; MetricValidator validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext) .withBeforeChunkCount(2L).withStreamOpenCount(1L).withStreamUpdateCount(3L).withStreamCloseCount(0L) .withBeforeReadCount(6L).withReadCount(6L).withAfterReadCount(5L).withReadErrorCount(0L) .withBeforeProcessCount(5L).withProcessCount(5L).withAfterProcessCount(5L).withProcessErrorCount(0L) .withBeforeWriteCount(5L).withWriteCount(writeCount).withAfterWriteCount(5L).withAfterChunkCount(2L) .withChunkErrorCount(0L).withSkipInReadCount(0L).withSkipInProcessCount(0L).withSkipInWriteCount(0L) .build(); validator.validate(); // if one is correct, all will be in the metricReader, so I check just one assertThat((Double) metricReader .findOne("gauge.batch.flatFileToDbNoSkipJob.step." + MetricNames.PROCESS_COUNT.getName()) .getValue(), is(notNullValue())); // TODO assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount)); }
From source file:de.codecentric.batch.test.metrics.BatchMetricsFlatFileToDbIntegrationTest.java
@Test public void testRunFlatFileToDbSkipJob_SkipInWrite() throws InterruptedException { JobExecution jobExecution = runJob("flatFileToDbSkipJob", "metrics/flatFileToDbSkipJob_SkipInWrite.csv"); assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED)); ExecutionContext executionContext = jobExecution.getStepExecutions().iterator().next() .getExecutionContext();// w w w. j a v a2s . c o m long writeCount = 7L; MetricValidator validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext) .withBeforeChunkCount(4L).withStreamOpenCount(1L).withStreamUpdateCount(4L).withStreamCloseCount(0L) .withBeforeReadCount(9L).withReadCount(9L).withAfterReadCount(8L).withReadErrorCount(0L) .withBeforeProcessCount(7L).withProcessCount(7L).withAfterProcessCount(7L).withProcessErrorCount(0L) .withBeforeWriteCount(5L).withWriteCount(writeCount).withAfterWriteCount(7L).withWriteErrorCount(4L) .withAfterChunkCount(4L).withChunkErrorCount(2L).withSkipInReadCount(0L).withSkipInProcessCount(0L) .withSkipInWriteCount(1L).build(); validator.validate(); // if one is correct, all will be in the metricReader, so I check just one assertThat((Double) metricReader .findOne("gauge.batch.flatFileToDbSkipJob.step." + MetricNames.PROCESS_COUNT.getName()).getValue(), is(notNullValue())); assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount)); }
From source file:de.codecentric.batch.test.metrics.BatchMetricsFlatFileToDbIntegrationTest.java
@Test public void testRunFlatFileToDbSkipJob_SkipInProcess() throws InterruptedException { JobExecution jobExecution = runJob("flatFileToDbSkipJob", "metrics/flatFileToDbSkipJob_SkipInProcess.csv"); assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED)); ExecutionContext executionContext = jobExecution.getStepExecutions().iterator().next() .getExecutionContext();//from w w w. java 2 s .co m long writeCount = 7L; MetricValidator validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext) .withBeforeChunkCount(3L).withStreamOpenCount(1L).withStreamUpdateCount(4L).withStreamCloseCount(0L) .withBeforeReadCount(9L).withReadCount(9L).withAfterReadCount(8L).withReadErrorCount(0L) .withBeforeProcessCount(7L).withProcessCount(7L).withAfterProcessCount(7L).withProcessErrorCount(1L) .withBeforeWriteCount(7L).withWriteCount(writeCount).withAfterWriteCount(7L).withAfterChunkCount(3L) .withChunkErrorCount(1L).withSkipInReadCount(0L).withSkipInProcessCount(1L).withSkipInWriteCount(0L) .build(); validator.validate(); // if one is correct, all will be in the metricReader, so I check just one assertThat((Double) metricReader .findOne("gauge.batch.flatFileToDbSkipJob.step." + MetricNames.PROCESS_COUNT.getName()).getValue(), is(notNullValue())); assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount)); }
From source file:org.obiba.onyx.core.service.impl.DefaultAppointmentManagementServiceImpl.java
public List<AppointmentUpdateLog> getLogListForDate(Date date) { List<AppointmentUpdateLog> logList = new ArrayList<AppointmentUpdateLog>(); List<JobInstance> jobsList = jobExplorer.getJobInstances(job.getName(), 0, 10); JobExecution jobExecution = null; for (JobInstance jobInstance : jobsList) { if (jobInstance.getJobParameters().getDate("date").toString().equals(date.toString())) { jobExecution = jobExplorer.getJobExecutions(jobInstance).get(0); break; }/* www . ja v a 2 s .c om*/ } if (jobExecution == null) return null; for (StepExecution stepExec : jobExecution.getStepExecutions()) { StepExecution stepExecution = jobExplorer.getStepExecution(jobExecution.getId(), stepExec.getId()); if (stepExecution.getExecutionContext().get("logList") != null) { logList.addAll((List<AppointmentUpdateLog>) (stepExecution.getExecutionContext().get("logList"))); } } return logList; }
From source file:com.xchanging.support.batch.admin.service.SimpleJobService.java
public Collection<String> getStepNamesForJob(String jobName) throws NoSuchJobException { try {// w ww .j a v a 2 s. c o m Job job = jobLocator.getJob(jobName); if (job instanceof StepLocator) { return ((StepLocator) job).getStepNames(); } } catch (NoSuchJobException e) { // ignore } Collection<String> stepNames = new LinkedHashSet<String>(); for (JobExecution jobExecution : listJobExecutionsForJob(jobName, 0, 100)) { for (StepExecution stepExecution : jobExecution.getStepExecutions()) { stepNames.add(stepExecution.getStepName()); } } return Collections.unmodifiableList(new ArrayList<String>(stepNames)); }
From source file:de.codecentric.batch.test.metrics.BatchMetricsFlatFileToDbIntegrationTest.java
@Test public void testRunFlatFileToDbNoSkipJob_Restart() throws InterruptedException, IOException { FileCopyUtils.copy(new File("src/test/resources/metrics/flatFileToDbNoSkipJob_Restart_FirstRun.csv"), new File("src/test/resources/metrics/flatFileToDbNoSkipJob_Restart.csv")); JobExecution jobExecution = runJob("flatFileToDbNoSkipJob", "metrics/flatFileToDbNoSkipJob_Restart.csv"); assertThat(jobExecution.getStatus(), is(BatchStatus.FAILED)); ExecutionContext executionContext = jobExecution.getStepExecutions().iterator().next() .getExecutionContext();// w w w . ja va 2 s. co m long writeCount = 3L; MetricValidator validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext) .withBeforeChunkCount(1L).withStreamOpenCount(1L).withStreamUpdateCount(2L).withStreamCloseCount(0L) .withBeforeReadCount(6L).withReadCount(6L).withAfterReadCount(6L).withReadErrorCount(0L) .withBeforeProcessCount(3L).withProcessCount(3L).withAfterProcessCount(3L).withProcessErrorCount(1L) .withBeforeWriteCount(3L).withWriteCount(writeCount).withAfterWriteCount(3L).withAfterChunkCount(1L) .withChunkErrorCount(1L).withSkipInReadCount(0L).withSkipInProcessCount(0L).withSkipInWriteCount(0L) .build(); validator.validate(); // if one is correct, all will be in the metricReader, so I check just one assertThat((Double) metricReader .findOne("gauge.batch.flatFileToDbNoSkipJob.step." + MetricNames.PROCESS_COUNT.getName()) .getValue(), is(notNullValue())); assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount)); FileCopyUtils.copy(new File("src/test/resources/metrics/flatFileToDbNoSkipJob_Restart_SecondRun.csv"), new File("src/test/resources/metrics/flatFileToDbNoSkipJob_Restart.csv")); jobExecution = runJob("flatFileToDbNoSkipJob", "metrics/flatFileToDbNoSkipJob_Restart.csv"); assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED)); executionContext = jobExecution.getStepExecutions().iterator().next().getExecutionContext(); writeCount = 8L; validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext) .withValidateGauge(false).withBeforeChunkCount(3L).withStreamOpenCount(2L).withStreamUpdateCount(5L) .withStreamCloseCount(0L).withBeforeReadCount(12L).withReadCount(12L).withAfterReadCount(11L) .withReadErrorCount(0L).withBeforeProcessCount(8L).withProcessCount(8L).withAfterProcessCount(8L) .withProcessErrorCount(1L).withBeforeWriteCount(8L).withWriteCount(writeCount) .withAfterWriteCount(8L).withAfterChunkCount(3L).withChunkErrorCount(1L).withSkipInReadCount(0L) .withSkipInProcessCount(0L).withSkipInWriteCount(0L).build(); validator.validate(); // if one is correct, all will be in the metricReader, so I check just one // processCount is 5 for second run, metrics aren't cumulated assertThat((Double) metricReader .findOne("gauge.batch.flatFileToDbNoSkipJob.step." + MetricNames.PROCESS_COUNT.getName()) .getValue(), is(notNullValue())); assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount)); new File("src/test/resources/metrics/flatFileToDbNoSkipJob_Restart.csv").delete(); }
From source file:de.codecentric.batch.test.metrics.BatchMetricsFlatFileToDbIntegrationTest.java
@Test public void testRunFlatFileToDbSkipJob_SkipInProcess_Failed() throws InterruptedException { JobExecution jobExecution = runJob("flatFileToDbSkipJob", "metrics/flatFileToDbSkipJob_SkipInProcess_Failed.csv"); assertThat(jobExecution.getStatus(), is(BatchStatus.FAILED)); ExecutionContext executionContext = jobExecution.getStepExecutions().iterator().next() .getExecutionContext();//from w w w.jav a2 s . c om long writeCount = 7L; MetricValidator validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext) .withBeforeChunkCount(3L).withStreamOpenCount(1L).withStreamUpdateCount(4L).withStreamCloseCount(0L) .withBeforeReadCount(12L).withReadCount(12L).withAfterReadCount(12L).withReadErrorCount(0L) .withBeforeProcessCount(7L).withProcessCount(7L).withAfterProcessCount(7L).withProcessErrorCount(5L) .withBeforeWriteCount(7L).withWriteCount(writeCount).withAfterWriteCount(7L).withAfterChunkCount(3L) .withChunkErrorCount(6L).withSkipInReadCount(0L).withSkipInProcessCount(2L).withSkipInWriteCount(0L) .build(); validator.validate(); // if one is correct, all will be in the metricReader, so I check just one assertThat((Double) metricReader .findOne("gauge.batch.flatFileToDbSkipJob.step." + MetricNames.PROCESS_COUNT.getName()).getValue(), is(notNullValue())); assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount)); }
From source file:org.springframework.batch.admin.domain.JobExecutionInfoResource.java
public JobExecutionInfoResource(JobExecution jobExecution, TimeZone timeZone) { if (timeZone != null) { this.timeZone = timeZone; } else {//from w ww .j a v a2s.c o m this.timeZone = TimeZone.getTimeZone("UTC"); } this.executionId = jobExecution.getId(); this.jobId = jobExecution.getJobId(); this.stepExecutionCount = jobExecution.getStepExecutions().size(); this.jobParameters = jobExecution.getJobParameters(); this.status = jobExecution.getStatus(); this.exitStatus = jobExecution.getExitStatus(); this.jobConfigurationName = jobExecution.getJobConfigurationName(); this.failureExceptions = jobExecution.getFailureExceptions(); Map<String, Object> executionContextEntires = new HashMap<String, Object>( jobExecution.getExecutionContext().size()); for (Map.Entry<String, Object> stringObjectEntry : jobExecution.getExecutionContext().entrySet()) { executionContextEntires.put(stringObjectEntry.getKey(), stringObjectEntry.getValue()); } this.executionContext = executionContextEntires; this.version = jobExecution.getVersion(); JobInstance jobInstance = jobExecution.getJobInstance(); if (jobInstance != null) { this.jobName = jobInstance.getJobName(); BatchStatus status = jobExecution.getStatus(); this.restartable = status.isGreaterThan(BatchStatus.STOPPING) && status.isLessThan(BatchStatus.ABANDONED); this.abandonable = status.isGreaterThan(BatchStatus.STARTED) && status != BatchStatus.ABANDONED; this.stoppable = status.isLessThan(BatchStatus.STOPPING) && status != BatchStatus.COMPLETED; } else { this.jobName = "?"; } this.dateFormat = this.dateFormat.withZone(DateTimeZone.forTimeZone(timeZone)); this.createDate = dateFormat.print(jobExecution.getCreateTime().getTime()); this.lastUpdated = dateFormat.print(jobExecution.getLastUpdated().getTime()); if (jobExecution.getStartTime() != null) { this.startTime = dateFormat.print(jobExecution.getStartTime().getTime()); this.endTime = dateFormat.print(jobExecution.getEndTime().getTime()); } }
From source file:admin.service.SimpleJobService.java
@Override public Collection<String> getStepNamesForJob(String jobName) throws NoSuchJobException { try {/* ww w . j a v a 2 s . c o m*/ Job job = jobLocator.getJob(jobName); if (job instanceof StepLocator) { return ((StepLocator) job).getStepNames(); } } catch (NoSuchJobException e) { // ignore } Collection<String> stepNames = new LinkedHashSet<String>(); for (JobExecution jobExecution : listJobExecutionsForJob(jobName, 0, 100)) { for (StepExecution stepExecution : jobExecution.getStepExecutions()) { stepNames.add(stepExecution.getStepName()); } } return Collections.unmodifiableList(new ArrayList<String>(stepNames)); }
From source file:de.codecentric.batch.test.metrics.BatchMetricsFlatFileToDbIntegrationTest.java
@Test public void testRunFlatFileToDbSkipJob_SkipInProcess_ReaderTransactional() throws InterruptedException { JobExecution jobExecution = runJob("flatFileToDbSkipReaderTransactionalJob", "metrics/flatFileToDbSkipJob_SkipInProcess.csv"); assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED)); ExecutionContext executionContext = jobExecution.getStepExecutions().iterator().next() .getExecutionContext();/* w w w . ja va 2s. c om*/ long writeCount = 5L; MetricValidator validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext) .withBeforeChunkCount(2L).withStreamOpenCount(1L).withStreamUpdateCount(3L).withStreamCloseCount(0L) .withBeforeReadCount(6L).withReadCount(6L).withAfterReadCount(5L).withReadErrorCount(0L) .withBeforeProcessCount(5L).withProcessCount(5L).withAfterProcessCount(5L).withProcessErrorCount(1L) .withBeforeWriteCount(5L).withWriteCount(writeCount).withAfterWriteCount(5L).withAfterChunkCount(2L) .withChunkErrorCount(1L).withSkipInReadCount(0L).withSkipInProcessCount(0L).withSkipInWriteCount(0L) .build(); validator.validate(); // if one is correct, all will be in the metricReader, so I check just one assertThat((Double) metricReader.findOne( "gauge.batch.flatFileToDbSkipReaderTransactionalJob.step." + MetricNames.PROCESS_COUNT.getName()) .getValue(), is(notNullValue())); assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount)); }