Example usage for org.springframework.batch.core JobExecution getStepExecutions

List of usage examples for org.springframework.batch.core JobExecution getStepExecutions

Introduction

In this page you can find the example usage for org.springframework.batch.core JobExecution getStepExecutions.

Prototype

public Collection<StepExecution> getStepExecutions() 

Source Link

Document

Accessor for the step executions.

Usage

From source file:uk.ac.ebi.eva.pipeline.jobs.GenotypedVcfJobWorkflowTest.java

@Test
public void optionalStepsShouldBeSkipped() throws Exception {
    initVariantConfigurationJob();//from w  ww  . j  av  a2s .co  m

    jobOptions.getPipelineOptions().put(AnnotationJob.SKIP_ANNOT, true);
    jobOptions.getPipelineOptions().put(PopulationStatisticsJob.SKIP_STATS, true);

    JobExecution execution = jobLauncherTestUtils.launchJob();

    assertEquals(ExitStatus.COMPLETED, execution.getExitStatus());
    assertEquals(2, execution.getStepExecutions().size());

    List<StepExecution> steps = new ArrayList<>(execution.getStepExecutions());
    StepExecution transformStep = steps.get(0);
    StepExecution loadStep = steps.get(1);

    assertEquals(GenotypedVcfJob.NORMALIZE_VARIANTS, transformStep.getStepName());
    assertEquals(GenotypedVcfJob.LOAD_VARIANTS, loadStep.getStepName());

    assertTrue(transformStep.getEndTime().before(loadStep.getStartTime()));
}

From source file:uk.ac.ebi.eva.pipeline.jobs.GenotypedVcfJobWorkflowTest.java

@Test
public void annotationStepsShouldBeSkipped() throws Exception {
    initVariantConfigurationJob();//from   w ww  .ja  v a  2 s.  c o m
    jobOptions.getPipelineOptions().put(AnnotationJob.SKIP_ANNOT, true);

    JobExecution execution = jobLauncherTestUtils.launchJob();

    assertEquals(ExitStatus.COMPLETED, execution.getExitStatus());
    assertEquals(4, execution.getStepExecutions().size());

    List<StepExecution> steps = new ArrayList<>(execution.getStepExecutions());
    StepExecution transformStep = steps.get(0);
    StepExecution loadStep = steps.get(1);

    Map<String, StepExecution> parallelStepsNameToStepExecution = new HashMap<>();
    for (int i = 2; i <= steps.size() - 1; i++) {
        parallelStepsNameToStepExecution.put(steps.get(i).getStepName(), steps.get(i));
    }

    assertEquals(GenotypedVcfJob.NORMALIZE_VARIANTS, transformStep.getStepName());
    assertEquals(GenotypedVcfJob.LOAD_VARIANTS, loadStep.getStepName());

    Set<String> parallelStepNamesExecuted = parallelStepsNameToStepExecution.keySet();
    Set<String> parallelStepNamesToCheck = new HashSet<>(Arrays
            .asList(PopulationStatisticsJob.CALCULATE_STATISTICS, PopulationStatisticsJob.LOAD_STATISTICS));

    assertEquals(parallelStepNamesToCheck, parallelStepNamesExecuted);

    assertTrue(transformStep.getEndTime().before(loadStep.getStartTime()));
    assertTrue(loadStep.getEndTime().before(
            parallelStepsNameToStepExecution.get(PopulationStatisticsJob.CALCULATE_STATISTICS).getStartTime()));

    assertTrue(parallelStepsNameToStepExecution.get(PopulationStatisticsJob.CALCULATE_STATISTICS).getEndTime()
            .before(parallelStepsNameToStepExecution.get(PopulationStatisticsJob.LOAD_STATISTICS)
                    .getStartTime()));
}

From source file:admin.jmx.BatchMBeanExporter.java

private void registerSteps() {
    if (!registerSteps) {
        return;//w  ww . j  a v a 2 s .  c  o m
    }
    for (String jobName : jobService.listJobs(0, Integer.MAX_VALUE)) {
        Collection<JobExecution> jobExecutions = Collections.emptySet();
        try {
            jobExecutions = jobService.listJobExecutionsForJob(jobName, 0, 1);
        } catch (NoSuchJobException e) {
            // do-nothing
            logger.error("Job listed but does not exist", e);
        }
        for (JobExecution jobExecution : jobExecutions) {
            for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
                String stepName = stepExecution.getStepName();
                String stepKey = String.format("%s/%s", jobName, stepName);
                String beanKey = getBeanKeyForStepExecution(jobName, stepName);
                if (!stepKeys.contains(stepKey)) {
                    stepKeys.add(stepKey);
                    logger.info("Registering step execution " + stepKey);
                    registerBeanNameOrInstance(
                            stepExecutionMetricsFactory.createMetricsForStep(jobName, stepName), beanKey);
                }
            }
        }
    }
}

From source file:uk.ac.ebi.eva.pipeline.jobs.GenotypedVcfJobWorkflowTest.java

@Test
public void statsStepsShouldBeSkipped() throws Exception {
    initVariantConfigurationJob();//from  w w w.j  av a2  s .  c om
    jobOptions.getPipelineOptions().put(PopulationStatisticsJob.SKIP_STATS, true);

    jobOptions.getPipelineOptions().put("db.name", "diegoTest");

    JobExecution execution = jobLauncherTestUtils.launchJob();

    assertEquals(ExitStatus.COMPLETED, execution.getExitStatus());
    assertEquals(5, execution.getStepExecutions().size());

    List<StepExecution> steps = new ArrayList<>(execution.getStepExecutions());
    StepExecution transformStep = steps.get(0);
    StepExecution loadStep = steps.get(1);

    Map<String, StepExecution> parallelStepsNameToStepExecution = new HashMap<>();
    for (int i = 2; i <= steps.size() - 1; i++) {
        parallelStepsNameToStepExecution.put(steps.get(i).getStepName(), steps.get(i));
    }

    assertEquals(GenotypedVcfJob.NORMALIZE_VARIANTS, transformStep.getStepName());
    assertEquals(GenotypedVcfJob.LOAD_VARIANTS, loadStep.getStepName());

    Set<String> parallelStepNamesExecuted = parallelStepsNameToStepExecution.keySet();
    Set<String> parallelStepNamesToCheck = new HashSet<>(Arrays.asList(
            VepInputGeneratorStep.FIND_VARIANTS_TO_ANNOTATE, VepAnnotationGeneratorStep.GENERATE_VEP_ANNOTATION,
            AnnotationLoaderStep.LOAD_VEP_ANNOTATION));

    assertEquals(parallelStepNamesToCheck, parallelStepNamesExecuted);

    assertTrue(transformStep.getEndTime().before(loadStep.getStartTime()));
    assertTrue(loadStep.getEndTime().before(parallelStepsNameToStepExecution
            .get(VepInputGeneratorStep.FIND_VARIANTS_TO_ANNOTATE).getStartTime()));

    assertTrue(parallelStepsNameToStepExecution.get(VepInputGeneratorStep.FIND_VARIANTS_TO_ANNOTATE)
            .getEndTime().before(parallelStepsNameToStepExecution
                    .get(VepAnnotationGeneratorStep.GENERATE_VEP_ANNOTATION).getStartTime()));
    assertTrue(parallelStepsNameToStepExecution.get(VepAnnotationGeneratorStep.GENERATE_VEP_ANNOTATION)
            .getEndTime().before(parallelStepsNameToStepExecution.get(AnnotationLoaderStep.LOAD_VEP_ANNOTATION)
                    .getStartTime()));
}

From source file:uk.ac.ebi.eva.pipeline.jobs.GenotypedVcfJobWorkflowTest.java

@Test
public void allStepsShouldBeExecuted() throws Exception {
    initVariantConfigurationJob();//  w w w . j  a v  a  2 s  .co m

    JobExecution execution = jobLauncherTestUtils.launchJob();

    assertEquals(ExitStatus.COMPLETED, execution.getExitStatus());
    assertEquals(7, execution.getStepExecutions().size());

    List<StepExecution> steps = new ArrayList<>(execution.getStepExecutions());
    StepExecution transformStep = steps.get(0);
    StepExecution loadStep = steps.get(1);

    Map<String, StepExecution> parallelStepsNameToStepExecution = new HashMap<>();
    for (int i = 2; i <= steps.size() - 1; i++) {
        parallelStepsNameToStepExecution.put(steps.get(i).getStepName(), steps.get(i));
    }

    assertEquals(GenotypedVcfJob.NORMALIZE_VARIANTS, transformStep.getStepName());
    assertEquals(GenotypedVcfJob.LOAD_VARIANTS, loadStep.getStepName());

    Set<String> parallelStepNamesExecuted = parallelStepsNameToStepExecution.keySet();
    Set<String> parallelStepNamesToCheck = new HashSet<>(Arrays.asList(
            PopulationStatisticsJob.CALCULATE_STATISTICS, PopulationStatisticsJob.LOAD_STATISTICS,
            VepInputGeneratorStep.FIND_VARIANTS_TO_ANNOTATE, VepAnnotationGeneratorStep.GENERATE_VEP_ANNOTATION,
            AnnotationLoaderStep.LOAD_VEP_ANNOTATION));

    assertEquals(parallelStepNamesToCheck, parallelStepNamesExecuted);

    assertTrue(transformStep.getEndTime().before(loadStep.getStartTime()));
    assertTrue(loadStep.getEndTime().before(
            parallelStepsNameToStepExecution.get(PopulationStatisticsJob.CALCULATE_STATISTICS).getStartTime()));
    assertTrue(loadStep.getEndTime().before(parallelStepsNameToStepExecution
            .get(VepInputGeneratorStep.FIND_VARIANTS_TO_ANNOTATE).getStartTime()));

    assertTrue(parallelStepsNameToStepExecution.get(PopulationStatisticsJob.CALCULATE_STATISTICS).getEndTime()
            .before(parallelStepsNameToStepExecution.get(PopulationStatisticsJob.LOAD_STATISTICS)
                    .getStartTime()));
    assertTrue(parallelStepsNameToStepExecution.get(VepInputGeneratorStep.FIND_VARIANTS_TO_ANNOTATE)
            .getEndTime().before(parallelStepsNameToStepExecution
                    .get(VepAnnotationGeneratorStep.GENERATE_VEP_ANNOTATION).getStartTime()));
    assertTrue(parallelStepsNameToStepExecution.get(VepAnnotationGeneratorStep.GENERATE_VEP_ANNOTATION)
            .getEndTime().before(parallelStepsNameToStepExecution.get(AnnotationLoaderStep.LOAD_VEP_ANNOTATION)
                    .getStartTime()));
}

From source file:uk.ac.ebi.eva.pipeline.jobs.AggregatedVcfJobTest.java

@Test
public void aggregatedTransformAndLoadShouldBeExecuted() throws Exception {
    Config.setOpenCGAHome(opencgaHome);//from   w w  w. j av a2 s .c om

    JobExecution jobExecution = jobLauncherTestUtils.launchJob();

    assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus());
    assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus());

    // check execution flow
    Assert.assertEquals(2, jobExecution.getStepExecutions().size());
    List<StepExecution> steps = new ArrayList<>(jobExecution.getStepExecutions());
    StepExecution transformStep = steps.get(0);
    StepExecution loadStep = steps.get(1);

    Assert.assertEquals(AggregatedVcfJob.NORMALIZE_VARIANTS, transformStep.getStepName());
    Assert.assertEquals(AggregatedVcfJob.LOAD_VARIANTS, loadStep.getStepName());

    assertTrue(transformStep.getEndTime().before(loadStep.getStartTime()));

    // check transformed file
    String outputFilename = getTransformedOutputPath(Paths.get(input).getFileName(), compressExtension,
            outputDir);

    long lines = JobTestUtils.getLines(new GZIPInputStream(new FileInputStream(outputFilename)));
    assertEquals(156, lines);

    // check ((documents in DB) == (lines in transformed file))
    VariantStorageManager variantStorageManager = StorageManagerFactory.getVariantStorageManager();
    VariantDBAdaptor variantDBAdaptor = variantStorageManager.getDBAdaptor(dbName, null);
    VariantDBIterator iterator = variantDBAdaptor.iterator(new QueryOptions());

    Assert.assertEquals(JobTestUtils.count(iterator), lines);

    // check that stats are loaded properly
    assertFalse(variantDBAdaptor.iterator(new QueryOptions()).next().getSourceEntries().values().iterator()
            .next().getCohortStats().isEmpty());
}

From source file:uk.ac.ebi.intact.editor.controller.dbmanager.ImportJobController.java

public List<Map.Entry<String, String>> getImportStatistics(JobExecution jobExecution) {
    Collection<StepExecution> stepExecutions = jobExecution.getStepExecutions();
    Map<String, String> statistics = new HashMap<String, String>();
    if (!stepExecutions.isEmpty()) {
        StepExecution firstStep = stepExecutions.iterator().next();
        ExecutionContext stepContext = firstStep.getExecutionContext();

        for (Map.Entry<String, Object> entry : stepContext.entrySet()) {
            // persisted count
            if (entry.getKey().startsWith(AbstractIntactDbImporter.PERSIST_MAP_COUNT)) {
                statistics.put(entry.getKey().substring(entry.getKey().lastIndexOf(".") + 1),
                        Integer.toString((Integer) entry.getValue()));
            }/* w  ww  .j a v  a  2  s . com*/
        }
    }
    return new ArrayList<Map.Entry<String, String>>(statistics.entrySet());
}

From source file:de.codecentric.batch.test.metrics.BatchMetricsFlatFileToDbIntegrationTest.java

@Test
public void testRunFlatFileToDbNoSkipJob_Failed() throws InterruptedException {
    JobExecution jobExecution = runJob("flatFileToDbNoSkipJob", "metrics/flatFileToDbNoSkipJob_Failed.csv");
    assertThat(jobExecution.getStatus(), is(BatchStatus.FAILED));
    ExecutionContext executionContext = jobExecution.getStepExecutions().iterator().next()
            .getExecutionContext();/*  www.j a v a2  s.  c  o  m*/
    long writeCount = 3L;
    MetricValidator validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext)
            .withBeforeChunkCount(1L).withStreamOpenCount(1L).withStreamUpdateCount(2L).withStreamCloseCount(0L)
            .withBeforeReadCount(6L).withReadCount(6L).withAfterReadCount(5L).withReadErrorCount(0L)
            .withBeforeProcessCount(3L).withProcessCount(3L).withAfterProcessCount(3L).withProcessErrorCount(1L)
            .withBeforeWriteCount(3L).withWriteCount(writeCount).withAfterWriteCount(3L).withAfterChunkCount(1L)
            .withChunkErrorCount(1L).withSkipInReadCount(0L).withSkipInProcessCount(0L).withSkipInWriteCount(0L)
            .build();
    validator.validate();
    // if one is correct, all will be in the metricReader, so I check just one
    assertThat((Double) metricReader
            .findOne("gauge.batch.flatFileToDbNoSkipJob.step." + MetricNames.PROCESS_COUNT.getName())
            .getValue(), is(notNullValue()));
    assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount));
}

From source file:fr.acxio.tools.agia.admin.StaleRunningJobsService.java

public void forceRunningJobsToFail() {
    if (logger.isInfoEnabled()) {
        logger.info("Reseting jobs...");
    }/*from w w w. j  a  v a  2 s  .  co  m*/

    List<String> aJobNames = jobExplorer.getJobNames();
    for (String aJobName : aJobNames) {
        Set<JobExecution> aJobExecutions = jobExplorer.findRunningJobExecutions(aJobName);
        for (JobExecution aJobExecution : aJobExecutions) {
            if (logger.isInfoEnabled()) {
                logger.info("  " + aJobName + " (" + aJobExecution.getId() + ")");
            }
            aJobExecution.setEndTime(new Date());
            aJobExecution.setStatus(BatchStatus.FAILED);
            aJobExecution.setExitStatus(ExitStatus.FAILED);
            jobRepository.update(aJobExecution);
            for (StepExecution aStepExecution : aJobExecution.getStepExecutions()) {
                if (aStepExecution.getStatus().isGreaterThan(BatchStatus.COMPLETED)) {
                    if (logger.isInfoEnabled()) {
                        logger.info("    " + aStepExecution.getStepName());
                    }
                    aStepExecution.setEndTime(new Date());
                    aStepExecution.setStatus(BatchStatus.FAILED);
                    aStepExecution.setExitStatus(ExitStatus.FAILED);
                    jobRepository.update(aStepExecution);
                }
            }
        }
    }
    if (logger.isInfoEnabled()) {
        logger.info("Done.");
    }
}

From source file:de.codecentric.batch.test.metrics.BatchMetricsFlatFileToDbIntegrationTest.java

@Test
public void testRunFlatFileToDbSkipJob_SkipInRead() throws InterruptedException {
    JobExecution jobExecution = runJob("flatFileToDbSkipJob", "metrics/flatFileToDbSkipJob_SkipInRead.csv");
    assertThat(jobExecution.getStatus(), is(BatchStatus.COMPLETED));
    ExecutionContext executionContext = jobExecution.getStepExecutions().iterator().next()
            .getExecutionContext();//from w ww  .  j  a va  2 s.  c o  m
    long writeCount = 7L;
    MetricValidator validator = MetricValidatorBuilder.metricValidator().withExecutionContext(executionContext)
            .withBeforeChunkCount(3L).withStreamOpenCount(1L).withStreamUpdateCount(4L).withStreamCloseCount(0L)
            .withBeforeReadCount(9L).withReadCount(9L).withAfterReadCount(7L).withReadErrorCount(1L)
            .withBeforeProcessCount(7L).withProcessCount(7L).withAfterProcessCount(7L).withProcessErrorCount(0L)
            .withBeforeWriteCount(7L).withWriteCount(writeCount).withAfterWriteCount(7L).withWriteErrorCount(0L)
            .withAfterChunkCount(3L).withChunkErrorCount(0L).withSkipInReadCount(1L).withSkipInProcessCount(0L)
            .withSkipInWriteCount(0L).build();
    validator.validate();
    // if one is correct, all will be in the metricReader, so I check just one
    assertThat((Double) metricReader
            .findOne("gauge.batch.flatFileToDbSkipJob.step." + MetricNames.PROCESS_COUNT.getName()).getValue(),
            is(notNullValue()));
    assertThat(jdbcTemplate.queryForObject("SELECT COUNT(*) FROM ITEM", Long.class), is(writeCount));
}