Example usage for org.springframework.batch.core JobExecution getStepExecutions

List of usage examples for org.springframework.batch.core JobExecution getStepExecutions

Introduction

In this page you can find the example usage for org.springframework.batch.core JobExecution getStepExecutions.

Prototype

public Collection<StepExecution> getStepExecutions() 

Source Link

Document

Accessor for the step executions.

Usage

From source file:de.codecentric.batch.listener.ProtocolListener.java

public void afterJob(JobExecution jobExecution) {
    StringBuilder protocol = new StringBuilder();
    protocol.append("\n");
    protocol.append(createFilledLine('*'));
    protocol.append(createFilledLine('-'));
    protocol.append("Protocol for " + jobExecution.getJobInstance().getJobName() + " \n");
    protocol.append("  Started:      " + jobExecution.getStartTime() + "\n");
    protocol.append("  Finished:     " + jobExecution.getEndTime() + "\n");
    protocol.append("  Exit-Code:    " + jobExecution.getExitStatus().getExitCode() + "\n");
    protocol.append("  Exit-Descr:   " + jobExecution.getExitStatus().getExitDescription() + "\n");
    protocol.append("  Status:       " + jobExecution.getStatus() + "\n");
    protocol.append("  Content of Job-ExecutionContext:\n");
    for (Entry<String, Object> entry : jobExecution.getExecutionContext().entrySet()) {
        protocol.append("  " + entry.getKey() + "=" + entry.getValue() + "\n");
    }/*  w  w  w  .  j av  a  2 s  .com*/
    protocol.append("  Job-Parameter: \n");
    JobParameters jp = jobExecution.getJobParameters();
    for (Iterator<Entry<String, JobParameter>> iter = jp.getParameters().entrySet().iterator(); iter
            .hasNext();) {
        Entry<String, JobParameter> entry = iter.next();
        protocol.append("  " + entry.getKey() + "=" + entry.getValue() + "\n");
    }
    protocol.append(createFilledLine('-'));
    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        protocol.append("Step " + stepExecution.getStepName() + " \n");
        protocol.append("  ReadCount:    " + stepExecution.getReadCount() + "\n");
        protocol.append("  WriteCount:   " + stepExecution.getWriteCount() + "\n");
        protocol.append("  Commits:      " + stepExecution.getCommitCount() + "\n");
        protocol.append("  SkipCount:    " + stepExecution.getSkipCount() + "\n");
        protocol.append("  Rollbacks:    " + stepExecution.getRollbackCount() + "\n");
        protocol.append("  Filter:       " + stepExecution.getFilterCount() + "\n");
        protocol.append("  Content of Step-ExecutionContext:\n");
        for (Entry<String, Object> entry : stepExecution.getExecutionContext().entrySet()) {
            protocol.append("  " + entry.getKey() + "=" + entry.getValue() + "\n");
        }
        protocol.append(createFilledLine('-'));
    }
    protocol.append(createFilledLine('*'));
    LOGGER.info(protocol.toString());
}

From source file:com.xchanging.support.batch.admin.service.SimpleJobService.java

public Collection<StepExecution> getStepExecutions(Long jobExecutionId) throws NoSuchJobExecutionException {

    JobExecution jobExecution = jobExecutionDao.getJobExecution(jobExecutionId);
    if (jobExecution == null) {
        throw new NoSuchJobExecutionException("No JobExecution with id=" + jobExecutionId);
    }/*from   w ww . jav  a 2s  .  c o m*/

    stepExecutionDao.addStepExecutions(jobExecution);

    String jobName = jobExecution.getJobInstance() == null ? null : jobExecution.getJobInstance().getJobName();
    Collection<String> missingStepNames = new LinkedHashSet<String>();

    if (jobName != null) {
        missingStepNames.addAll(stepExecutionDao.findStepNamesForJobExecution(jobName, "*:partition*"));
        logger.debug("Found step executions in repository: " + missingStepNames);
    }

    Job job = null;
    try {
        job = jobLocator.getJob(jobName);
    } catch (NoSuchJobException e) {
        // expected
    }
    if (job instanceof StepLocator) {
        Collection<String> stepNames = ((StepLocator) job).getStepNames();
        missingStepNames.addAll(stepNames);
        logger.debug("Added step executions from job: " + missingStepNames);
    }

    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        String stepName = stepExecution.getStepName();
        if (missingStepNames.contains(stepName)) {
            missingStepNames.remove(stepName);
        }
        logger.debug("Removed step executions from job execution: " + missingStepNames);
    }

    for (String stepName : missingStepNames) {
        StepExecution stepExecution = jobExecution.createStepExecution(stepName);
        stepExecution.setStatus(BatchStatus.UNKNOWN);
    }

    return jobExecution.getStepExecutions();

}

From source file:admin.service.SimpleJobService.java

@Override
public Collection<StepExecution> getStepExecutions(Long jobExecutionId) throws NoSuchJobExecutionException {

    JobExecution jobExecution = jobExecutionDao.getJobExecution(jobExecutionId);
    if (jobExecution == null) {
        throw new NoSuchJobExecutionException("No JobExecution with id=" + jobExecutionId);
    }//from   ww  w .  j ava 2s  .  c o  m

    stepExecutionDao.addStepExecutions(jobExecution);

    String jobName = jobExecution.getJobInstance() == null
            ? jobInstanceDao.getJobInstance(jobExecution).getJobName()
            : jobExecution.getJobInstance().getJobName();
    Collection<String> missingStepNames = new LinkedHashSet<String>();

    if (jobName != null) {
        missingStepNames.addAll(stepExecutionDao.findStepNamesForJobExecution(jobName, "*:partition*"));
        logger.debug("Found step executions in repository: " + missingStepNames);
    }

    Job job = null;
    try {
        job = jobLocator.getJob(jobName);
    } catch (NoSuchJobException e) {
        // expected
    }
    if (job instanceof StepLocator) {
        Collection<String> stepNames = ((StepLocator) job).getStepNames();
        missingStepNames.addAll(stepNames);
        logger.debug("Added step executions from job: " + missingStepNames);
    }

    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        String stepName = stepExecution.getStepName();
        if (missingStepNames.contains(stepName)) {
            missingStepNames.remove(stepName);
        }
        logger.debug("Removed step executions from job execution: " + missingStepNames);
    }

    for (String stepName : missingStepNames) {
        StepExecution stepExecution = jobExecution.createStepExecution(stepName);
        stepExecution.setStatus(BatchStatus.UNKNOWN);
    }

    return jobExecution.getStepExecutions();

}

From source file:uk.ac.ebi.eva.pipeline.jobs.GenotypedVcfJobTest.java

@Test
public void fullGenotypedVcfJob() throws Exception {
    String inputFile = GenotypedVcfJobTest.class.getResource(input).getFile();
    String mockVep = GenotypedVcfJobTest.class.getResource("/mockvep.pl").getFile();

    jobOptions.getPipelineOptions().put("input.vcf", inputFile);
    jobOptions.getPipelineOptions().put("app.vep.path", mockVep);

    Config.setOpenCGAHome(opencgaHome);/*from   w  w w  .  jav  a 2 s  .  com*/

    // transformedVcf file init
    String transformedVcf = outputDir + input + ".variants.json" + compressExtension;
    File transformedVcfFile = new File(transformedVcf);
    transformedVcfFile.delete();
    assertFalse(transformedVcfFile.exists());

    //stats file init
    VariantSource source = (VariantSource) jobOptions.getVariantOptions()
            .get(VariantStorageManager.VARIANT_SOURCE);
    File statsFile = new File(Paths.get(outputDir).resolve(VariantStorageManager.buildFilename(source))
            + ".variants.stats.json.gz");
    statsFile.delete();
    assertFalse(statsFile.exists()); // ensure the stats file doesn't exist from previous executions

    // annotation files init
    File vepInputFile = new File(vepInput);
    vepInputFile.delete();
    assertFalse(vepInputFile.exists());

    File vepOutputFile = new File(vepOutput);
    vepOutputFile.delete();
    assertFalse(vepOutputFile.exists());

    VariantDBIterator iterator;

    // Run the Job
    JobExecution jobExecution = jobLauncherTestUtils.launchJob();

    assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus());
    assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus());

    // 1 transform step: check transformed file
    long transformedLinesCount = getLines(new GZIPInputStream(new FileInputStream(transformedVcf)));
    assertEquals(300, transformedLinesCount);

    // 2 load step: check ((documents in DB) == (lines in transformed file))
    //variantStorageManager = StorageManagerFactory.getVariantStorageManager();
    //variantDBAdaptor = variantStorageManager.getDBAdaptor(dbName, null);
    iterator = getVariantDBIterator();
    assertEquals(transformedLinesCount, count(iterator));

    // 3 create stats step
    assertTrue(statsFile.exists());

    // 4 load stats step: check ((documents in DB) == (lines in transformed file))
    //variantStorageManager = StorageManagerFactory.getVariantStorageManager();
    //variantDBAdaptor = variantStorageManager.getDBAdaptor(dbName, null);
    iterator = getVariantDBIterator();
    assertEquals(transformedLinesCount, count(iterator));

    // check the DB docs have the field "st"
    iterator = getVariantDBIterator();

    assertEquals(1, iterator.next().getSourceEntries().values().iterator().next().getCohortStats().size());

    // 5 annotation flow
    // annotation input vep generate step
    BufferedReader testReader = new BufferedReader(new InputStreamReader(
            new FileInputStream(GenotypedVcfJobTest.class.getResource("/preannot.sorted").getFile())));
    BufferedReader actualReader = new BufferedReader(
            new InputStreamReader(new FileInputStream(vepInputFile.toString())));

    ArrayList<String> rows = new ArrayList<>();

    String s;
    while ((s = actualReader.readLine()) != null) {
        rows.add(s);
    }
    Collections.sort(rows);

    String testLine = testReader.readLine();
    for (String row : rows) {
        assertEquals(testLine, row);
        testLine = testReader.readLine();
    }
    assertNull(testLine); // if both files have the same length testReader should be after the last line

    // 6 annotation create step
    assertTrue(vepInputFile.exists());
    assertTrue(vepOutputFile.exists());

    // Check output file length
    assertEquals(537, getLines(new GZIPInputStream(new FileInputStream(vepOutput))));

    // 8 Annotation load step: check documents in DB have annotation (only consequence type)
    iterator = getVariantDBIterator();

    int cnt = 0;
    int consequenceTypeCount = 0;
    while (iterator.hasNext()) {
        cnt++;
        Variant next = iterator.next();
        if (next.getAnnotation().getConsequenceTypes() != null) {
            consequenceTypeCount += next.getAnnotation().getConsequenceTypes().size();
        }
    }

    assertEquals(300, cnt);
    assertEquals(536, consequenceTypeCount);

    //check that one line is skipped because malformed
    List<StepExecution> variantAnnotationLoadStepExecution = jobExecution.getStepExecutions().stream().filter(
            stepExecution -> stepExecution.getStepName().equals(AnnotationLoaderStep.LOAD_VEP_ANNOTATION))
            .collect(Collectors.toList());
    assertEquals(1, variantAnnotationLoadStepExecution.get(0).getReadSkipCount());

}

From source file:org.springframework.batch.admin.web.JobExecutionController.java

@RequestMapping(value = "/jobs/executions/{jobExecutionId}", method = RequestMethod.GET)
public String detail(Model model, @PathVariable Long jobExecutionId, @ModelAttribute("date") Date date,
        Errors errors) {//from  w w  w. j a  v  a 2  s.co  m

    try {
        JobExecution jobExecution = jobService.getJobExecution(jobExecutionId);
        model.addAttribute(new JobExecutionInfo(jobExecution, timeZone));
        String jobName = jobExecution.getJobInstance().getJobName();
        Collection<String> stepNames = new HashSet<String>(jobService.getStepNamesForJob(jobName));
        Collection<StepExecution> stepExecutions = new ArrayList<StepExecution>(
                jobExecution.getStepExecutions());
        Collection<StepExecutionInfo> stepExecutionInfos = new ArrayList<StepExecutionInfo>();

        for (String name : stepNames) {
            boolean found = false;
            for (Iterator<StepExecution> iterator = stepExecutions.iterator(); iterator.hasNext();) {
                StepExecution stepExecution = iterator.next();
                if (stepExecution.getStepName().equals(name)) {
                    stepExecutionInfos.add(new StepExecutionInfo(stepExecution, timeZone));
                    iterator.remove();
                    found = true;
                }
            }
            if (!found) {
                stepExecutionInfos.add(new StepExecutionInfo(jobName, jobExecutionId, name, timeZone));
            }
        }
        model.addAttribute("stepExecutionInfos", stepExecutionInfos);
    } catch (NoSuchJobExecutionException e) {
        errors.reject("no.such.job.execution", new Object[] { jobExecutionId },
                "There is no such job execution (" + jobExecutionId + ")");
    } catch (NoSuchJobException e) {
        errors.reject("no.such.job", new Object[] { jobExecutionId },
                "There is no such job with exeuction id (" + jobExecutionId + ")");
    }

    return "jobs/execution";

}

From source file:org.springframework.batch.core.job.AbstractJob.java

/**
 * Run the specified job, handling all listener and repository calls, and
 * delegating the actual processing to {@link #doExecute(JobExecution)}.
 *
 * @see Job#execute(JobExecution)/*from   w w  w  . j av  a2  s. c o  m*/
 * @throws StartLimitExceededException
 *             if start limit of one of the steps was exceeded
 */
@Override
public final void execute(JobExecution execution) {

    if (logger.isDebugEnabled()) {
        logger.debug("Job execution starting: " + execution);
    }

    JobSynchronizationManager.register(execution);

    try {

        jobParametersValidator.validate(execution.getJobParameters());

        if (execution.getStatus() != BatchStatus.STOPPING) {

            execution.setStartTime(new Date());
            updateStatus(execution, BatchStatus.STARTED);

            listener.beforeJob(execution);

            try {
                doExecute(execution);
                if (logger.isDebugEnabled()) {
                    logger.debug("Job execution complete: " + execution);
                }
            } catch (RepeatException e) {
                throw e.getCause();
            }
        } else {

            // The job was already stopped before we even got this far. Deal
            // with it in the same way as any other interruption.
            execution.setStatus(BatchStatus.STOPPED);
            execution.setExitStatus(ExitStatus.COMPLETED);
            if (logger.isDebugEnabled()) {
                logger.debug("Job execution was stopped: " + execution);
            }

        }

    } catch (JobInterruptedException e) {
        logger.info("Encountered interruption executing job: " + e.getMessage());
        if (logger.isDebugEnabled()) {
            logger.debug("Full exception", e);
        }
        execution.setExitStatus(getDefaultExitStatusForFailure(e, execution));
        execution.setStatus(BatchStatus.max(BatchStatus.STOPPED, e.getStatus()));
        execution.addFailureException(e);
    } catch (Throwable t) {
        logger.error("Encountered fatal error executing job", t);
        execution.setExitStatus(getDefaultExitStatusForFailure(t, execution));
        execution.setStatus(BatchStatus.FAILED);
        execution.addFailureException(t);
    } finally {
        try {
            if (execution.getStatus().isLessThanOrEqualTo(BatchStatus.STOPPED)
                    && execution.getStepExecutions().isEmpty()) {
                ExitStatus exitStatus = execution.getExitStatus();
                ExitStatus newExitStatus = ExitStatus.NOOP
                        .addExitDescription("All steps already completed or no steps configured for this job.");
                execution.setExitStatus(exitStatus.and(newExitStatus));
            }

            execution.setEndTime(new Date());

            try {
                listener.afterJob(execution);
            } catch (Exception e) {
                logger.error("Exception encountered in afterStep callback", e);
            }

            jobRepository.update(execution);
        } finally {
            JobSynchronizationManager.release();
        }

    }

}

From source file:org.springframework.batch.core.jsr.configuration.xml.RetryListenerTests.java

@Test
@SuppressWarnings("resource")
public void testReadRetryOnce() throws Exception {
    ApplicationContext context = new ClassPathXmlApplicationContext(
            "org/springframework/batch/core/jsr/configuration/xml/RetryReadListenerRetryOnce.xml");

    JobLauncher jobLauncher = context.getBean(JobLauncher.class);
    JobExecution jobExecution = jobLauncher.run(context.getBean(Job.class), new JobParameters());

    Collection<StepExecution> stepExecutions = jobExecution.getStepExecutions();
    assertEquals(1, stepExecutions.size());

    StepExecution stepExecution = stepExecutions.iterator().next();
    assertEquals(1, stepExecution.getCommitCount());
    assertEquals(2, stepExecution.getReadCount());

    assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus());
}

From source file:org.springframework.batch.core.jsr.job.DefaultStepHandler.java

/**
 * Given a step and configuration, return true if the step should start,
 * false if it should not, and throw an exception if the job should finish.
 * @param lastStepExecution the last step execution
 * @param jobInstance/*from   w  ww  .  j a v a  2 s .c om*/
 * @param step
 *
 * @throws StartLimitExceededException if the start limit has been exceeded
 * for this step
 * @throws JobRestartException if the job is in an inconsistent state from
 * an earlier failure
 */
@Override
protected boolean shouldStart(StepExecution lastStepExecution, JobExecution jobExecution, Step step)
        throws JobRestartException, StartLimitExceededException {
    BatchStatus stepStatus;
    String restartStep = null;
    if (lastStepExecution == null) {
        jobExecution.getExecutionContext().put("batch.startedStep", step.getName());
        stepStatus = BatchStatus.STARTING;
    } else {
        stepStatus = lastStepExecution.getStatus();

        JobExecution lastJobExecution = getLastJobExecution(jobExecution);

        if (lastJobExecution.getExecutionContext().containsKey("batch.restartStep")) {
            restartStep = lastJobExecution.getExecutionContext().getString("batch.restartStep");

            if (CollectionUtils.isEmpty(jobExecution.getStepExecutions())
                    && lastJobExecution.getStatus() == BatchStatus.STOPPED
                    && StringUtils.hasText(restartStep)) {
                if (!restartStep.equals(step.getName())
                        && !jobExecution.getExecutionContext().containsKey("batch.startedStep")) {
                    logger.info("Job was stopped and should restart at step " + restartStep
                            + ".  The current step is " + step.getName());
                    return false;
                } else {
                    // Indicates the starting point for execution evaluation per JSR-352
                    jobExecution.getExecutionContext().put("batch.startedStep", step.getName());
                }
            }
        }
    }

    if (stepStatus == BatchStatus.UNKNOWN) {
        throw new JobRestartException("Cannot restart step from UNKNOWN status. "
                + "The last execution ended with a failure that could not be rolled back, "
                + "so it may be dangerous to proceed. Manual intervention is probably necessary.");
    }

    if ((stepStatus == BatchStatus.COMPLETED && step.isAllowStartIfComplete() == false)
            || stepStatus == BatchStatus.ABANDONED) {
        // step is complete, false should be returned, indicating that the
        // step should not be started
        logger.info("Step already complete or not restartable, so no action to execute: " + lastStepExecution);
        return false;
    }

    if (getJobRepository().getStepExecutionCount(jobExecution.getJobInstance(), step.getName()) < step
            .getStartLimit()) {
        // step start count is less than start max, return true
        return true;
    } else {
        // start max has been exceeded, throw an exception.
        throw new StartLimitExceededException("Maximum start limit exceeded for step: " + step.getName()
                + "StartMax: " + step.getStartLimit());
    }
}

From source file:org.springframework.batch.core.launch.support.SimpleJobLauncher.java

/**
 * Run the provided job with the given {@link JobParameters}. The
 * {@link JobParameters} will be used to determine if this is an execution
 * of an existing job instance, or if a new one should be created.
 *
 * @param job the job to be run.//  ww w  .  ja  v a  2 s  .  c o m
 * @param jobParameters the {@link JobParameters} for this particular
 * execution.
 * @return JobExecutionAlreadyRunningException if the JobInstance already
 * exists and has an execution already running.
 * @throws JobRestartException if the execution would be a re-start, but a
 * re-start is either not allowed or not needed.
 * @throws JobInstanceAlreadyCompleteException if this instance has already
 * completed successfully
 * @throws JobParametersInvalidException
 */
@Override
public JobExecution run(final Job job, final JobParameters jobParameters)
        throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException,
        JobParametersInvalidException {

    Assert.notNull(job, "The Job must not be null.");
    Assert.notNull(jobParameters, "The JobParameters must not be null.");

    final JobExecution jobExecution;
    JobExecution lastExecution = jobRepository.getLastJobExecution(job.getName(), jobParameters);
    if (lastExecution != null) {
        if (!job.isRestartable()) {
            throw new JobRestartException("JobInstance already exists and is not restartable");
        }
        /*
         * validate here if it has stepExecutions that are UNKNOWN, STARTING, STARTED and STOPPING
         * retrieve the previous execution and check
         */
        for (StepExecution execution : lastExecution.getStepExecutions()) {
            BatchStatus status = execution.getStatus();
            if (status.isRunning() || status == BatchStatus.STOPPING) {
                throw new JobExecutionAlreadyRunningException(
                        "A job execution for this job is already running: " + lastExecution);
            } else if (status == BatchStatus.UNKNOWN) {
                throw new JobRestartException("Cannot restart step [" + execution.getStepName()
                        + "] from UNKNOWN status. "
                        + "The last execution ended with a failure that could not be rolled back, "
                        + "so it may be dangerous to proceed. Manual intervention is probably necessary.");
            }
        }
    }

    // Check the validity of the parameters before doing creating anything
    // in the repository...
    job.getJobParametersValidator().validate(jobParameters);

    /*
     * There is a very small probability that a non-restartable job can be
     * restarted, but only if another process or thread manages to launch
     * <i>and</i> fail a job execution for this instance between the last
     * assertion and the next method returning successfully.
     */
    jobExecution = jobRepository.createJobExecution(job.getName(), jobParameters);

    try {
        taskExecutor.execute(new Runnable() {

            @Override
            public void run() {
                try {
                    logger.info("Job: [" + job + "] launched with the following parameters: [" + jobParameters
                            + "]");
                    job.execute(jobExecution);
                    logger.info("Job: [" + job + "] completed with the following parameters: [" + jobParameters
                            + "] and the following status: [" + jobExecution.getStatus() + "]");
                } catch (Throwable t) {
                    logger.info("Job: [" + job
                            + "] failed unexpectedly and fatally with the following parameters: ["
                            + jobParameters + "]", t);
                    rethrow(t);
                }
            }

            private void rethrow(Throwable t) {
                if (t instanceof RuntimeException) {
                    throw (RuntimeException) t;
                } else if (t instanceof Error) {
                    throw (Error) t;
                }
                throw new IllegalStateException(t);
            }
        });
    } catch (TaskRejectedException e) {
        jobExecution.upgradeStatus(BatchStatus.FAILED);
        if (jobExecution.getExitStatus().equals(ExitStatus.UNKNOWN)) {
            jobExecution.setExitStatus(ExitStatus.FAILED.addExitDescription(e));
        }
        jobRepository.update(jobExecution);
    }

    return jobExecution;
}

From source file:org.springframework.batch.core.launch.support.SimpleJobOperator.java

@Override
public Map<Long, String> getStepExecutionSummaries(long executionId) throws NoSuchJobExecutionException {
    JobExecution jobExecution = findExecutionById(executionId);

    Map<Long, String> map = new LinkedHashMap<Long, String>();
    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        map.put(stepExecution.getId(), stepExecution.toString());
    }//w w  w .j a  v  a  2 s.c  om
    return map;
}