Example usage for org.springframework.batch.core JobParametersBuilder JobParametersBuilder

List of usage examples for org.springframework.batch.core JobParametersBuilder JobParametersBuilder

Introduction

In this page you can find the example usage for org.springframework.batch.core JobParametersBuilder JobParametersBuilder.

Prototype

public JobParametersBuilder() 

Source Link

Document

Default constructor.

Usage

From source file:org.cbioportal.annotation.AnnotationPipeline.java

private static void launchJob(String[] args, String filename, String outputFilename, String isoformOverride,
        String errorReportLocation, boolean replace, boolean verbose) throws Exception {
    SpringApplication app = new SpringApplication(AnnotationPipeline.class);
    ConfigurableApplicationContext ctx = app.run(args);
    JobLauncher jobLauncher = ctx.getBean(JobLauncher.class);

    Job annotationJob = ctx.getBean(BatchConfiguration.ANNOTATION_JOB, Job.class);
    JobParameters jobParameters = new JobParametersBuilder().addString("filename", filename)
            .addString("outputFilename", outputFilename).addString("replace", String.valueOf(replace))
            .addString("isoformOverride", isoformOverride).addString("errorReportLocation", errorReportLocation)
            .addString("verbose", String.valueOf(verbose)).toJobParameters();
    JobExecution jobExecution = jobLauncher.run(annotationJob, jobParameters);
    if (!jobExecution.getExitStatus().equals(ExitStatus.COMPLETED)) {
        System.exit(2);/*from  www .  j  ava  2  s  .co  m*/
    }
}

From source file:org.geoserver.backuprestore.Backup.java

/**
 * @return//from   w  ww.j a va  2  s .c  om
 * @throws IOException
 * 
 */
public BackupExecutionAdapter runBackupAsync(final Resource archiveFile, final boolean overwrite,
        final Filter filter, final Hints params) throws IOException {
    // Check if archiveFile exists
    if (archiveFile.file().exists()) {
        if (!overwrite && FileUtils.sizeOf(archiveFile.file()) > 0) {
            // Unless the user explicitly wants to overwrite the archiveFile, throw an exception whenever it already exists
            throw new IOException(
                    "The target archive file already exists. Use 'overwrite=TRUE' if you want to overwrite it.");
        } else {
            FileUtils.forceDelete(archiveFile.file());
        }
    } else {
        // Make sure the parent path exists
        if (!archiveFile.file().getParentFile().exists()) {
            try {
                archiveFile.file().getParentFile().mkdirs();
            } finally {
                if (!archiveFile.file().getParentFile().exists()) {
                    throw new IOException("The path to target archive file is unreachable.");
                }
            }
        }
    }

    // Initialize ZIP
    FileUtils.touch(archiveFile.file());

    // Write flat files into a temporary folder
    Resource tmpDir = BackupUtils.geoServerTmpDir(getGeoServerDataDirectory());

    // Fill Job Parameters
    JobParametersBuilder paramsBuilder = new JobParametersBuilder();

    if (filter != null) {
        paramsBuilder.addString("filter", ECQL.toCQL(filter));
    }

    paramsBuilder.addString(PARAM_JOB_NAME, BACKUP_JOB_NAME)
            .addString(PARAM_OUTPUT_FILE_PATH, BackupUtils.getArchiveURLProtocol(tmpDir) + tmpDir.path())
            .addLong(PARAM_TIME, System.currentTimeMillis());

    parseParams(params, paramsBuilder);

    JobParameters jobParameters = paramsBuilder.toJobParameters();

    // Send Execution Signal
    BackupExecutionAdapter backupExecution;
    try {
        if (getRestoreRunningExecutions().isEmpty() && getBackupRunningExecutions().isEmpty()) {
            synchronized (jobOperator) {
                // Start a new Job
                JobExecution jobExecution = jobLauncher.run(backupJob, jobParameters);
                backupExecution = new BackupExecutionAdapter(jobExecution, totalNumberOfBackupSteps);
                backupExecutions.put(backupExecution.getId(), backupExecution);

                backupExecution.setArchiveFile(archiveFile);
                backupExecution.setOverwrite(overwrite);
                backupExecution.setFilter(filter);

                backupExecution.getOptions().add("OVERWRITE=" + overwrite);
                for (Entry jobParam : jobParameters.toProperties().entrySet()) {
                    if (!PARAM_OUTPUT_FILE_PATH.equals(jobParam.getKey())
                            && !PARAM_INPUT_FILE_PATH.equals(jobParam.getKey())
                            && !PARAM_TIME.equals(jobParam.getKey())) {
                        backupExecution.getOptions().add(jobParam.getKey() + "=" + jobParam.getValue());
                    }
                }

                return backupExecution;
            }
        } else {
            throw new IOException(
                    "Could not start a new Backup Job Execution since there are currently Running jobs.");
        }
    } catch (JobExecutionAlreadyRunningException | JobRestartException | JobInstanceAlreadyCompleteException
            | JobParametersInvalidException e) {
        throw new IOException("Could not start a new Backup Job Execution: ", e);
    } finally {
    }
}

From source file:org.geoserver.backuprestore.Backup.java

/**
 * @return//from w w w  .  java2 s.  c  o m
 * @return
 * @throws IOException
 * 
 */
public RestoreExecutionAdapter runRestoreAsync(final Resource archiveFile, final Filter filter,
        final Hints params) throws IOException {
    // Extract archive into a temporary folder
    Resource tmpDir = BackupUtils.geoServerTmpDir(getGeoServerDataDirectory());
    BackupUtils.extractTo(archiveFile, tmpDir);

    // Fill Job Parameters
    JobParametersBuilder paramsBuilder = new JobParametersBuilder();

    if (filter != null) {
        paramsBuilder.addString("filter", ECQL.toCQL(filter));
    }

    paramsBuilder.addString(PARAM_JOB_NAME, RESTORE_JOB_NAME)
            .addString(PARAM_INPUT_FILE_PATH, BackupUtils.getArchiveURLProtocol(tmpDir) + tmpDir.path())
            .addLong(PARAM_TIME, System.currentTimeMillis());

    parseParams(params, paramsBuilder);

    JobParameters jobParameters = paramsBuilder.toJobParameters();

    RestoreExecutionAdapter restoreExecution;
    try {
        if (getRestoreRunningExecutions().isEmpty() && getBackupRunningExecutions().isEmpty()) {
            synchronized (jobOperator) {
                // Start a new Job
                JobExecution jobExecution = jobLauncher.run(restoreJob, jobParameters);
                restoreExecution = new RestoreExecutionAdapter(jobExecution, totalNumberOfRestoreSteps);
                restoreExecutions.put(restoreExecution.getId(), restoreExecution);
                restoreExecution.setArchiveFile(archiveFile);
                restoreExecution.setFilter(filter);

                for (Entry jobParam : jobParameters.toProperties().entrySet()) {
                    if (!PARAM_OUTPUT_FILE_PATH.equals(jobParam.getKey())
                            && !PARAM_INPUT_FILE_PATH.equals(jobParam.getKey())
                            && !PARAM_TIME.equals(jobParam.getKey())) {
                        restoreExecution.getOptions().add(jobParam.getKey() + "=" + jobParam.getValue());
                    }
                }

                return restoreExecution;
            }
        } else {
            throw new IOException(
                    "Could not start a new Restore Job Execution since there are currently Running jobs.");
        }
    } catch (JobExecutionAlreadyRunningException | JobRestartException | JobInstanceAlreadyCompleteException
            | JobParametersInvalidException e) {
        throw new IOException("Could not start a new Restore Job Execution: ", e);
    } finally {
    }
}

From source file:org.ohdsi.webapi.service.CohortDefinitionService.java

/**
 * Queues up a generate cohort task for the specified cohort definition id.
 *
 * @param id - the Cohort Definition ID to generate
 * @return information about the Cohort Analysis Job
 *//*ww w.j a  va 2s. co m*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/generate/{sourceKey}")
public JobExecutionResource generateCohort(@PathParam("id") final int id,
        @PathParam("sourceKey") final String sourceKey) {

    Source source = getSourceRepository().findBySourceKey(sourceKey);
    String cdmTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.CDM);
    String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);

    DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition();
    requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
    TransactionStatus initStatus = this.getTransactionTemplate().getTransactionManager()
            .getTransaction(requresNewTx);

    CohortDefinition currentDefinition = this.cohortDefinitionRepository.findOne(id);
    CohortGenerationInfo info = findBySourceId(currentDefinition.getGenerationInfoList(), source.getSourceId());
    if (info == null) {
        info = new CohortGenerationInfo(currentDefinition, source.getSourceId());
        currentDefinition.getGenerationInfoList().add(info);
    }
    info.setStatus(GenerationStatus.PENDING).setStartTime(Calendar.getInstance().getTime());

    this.cohortDefinitionRepository.save(currentDefinition);
    this.getTransactionTemplate().getTransactionManager().commit(initStatus);

    JobParametersBuilder builder = new JobParametersBuilder();
    builder.addString("jobName", "generating cohort " + currentDefinition.getId() + " : "
            + source.getSourceName() + " (" + source.getSourceKey() + ")");
    builder.addString("cdm_database_schema", cdmTableQualifier);
    builder.addString("results_database_schema", resultsTableQualifier);
    builder.addString("target_database_schema", resultsTableQualifier);
    builder.addString("target_dialect", source.getSourceDialect());
    builder.addString("target_table", "cohort");
    builder.addString("cohort_definition_id", ("" + id));
    builder.addString("source_id", ("" + source.getSourceId()));
    builder.addString("generate_stats", Boolean.TRUE.toString());

    final JobParameters jobParameters = builder.toJobParameters();

    log.info(String.format("Beginning generate cohort for cohort definition id: \n %s", "" + id));

    GenerateCohortTasklet generateTasklet = new GenerateCohortTasklet(getSourceJdbcTemplate(source),
            getTransactionTemplate(), cohortDefinitionRepository);

    Step generateCohortStep = stepBuilders.get("cohortDefinition.generateCohort").tasklet(generateTasklet)
            .build();

    Job generateCohortJob = jobBuilders.get("generateCohort").start(generateCohortStep).build();

    JobExecutionResource jobExec = this.jobTemplate.launch(generateCohortJob, jobParameters);
    return jobExec;
}

From source file:org.ohdsi.webapi.service.FeasibilityService.java

@GET
@Path("/{study_id}/generate/{sourceKey}")
@Produces(MediaType.APPLICATION_JSON)//w  w  w . j a v a 2 s  .com
@Consumes(MediaType.APPLICATION_JSON)
public JobExecutionResource performStudy(@PathParam("study_id") final int study_id,
        @PathParam("sourceKey") final String sourceKey) {
    Date startTime = Calendar.getInstance().getTime();

    Source source = this.getSourceRepository().findBySourceKey(sourceKey);
    String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
    String cdmTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.CDM);

    DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition();
    requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);

    TransactionStatus initStatus = this.getTransactionTemplate().getTransactionManager()
            .getTransaction(requresNewTx);

    FeasibilityStudy study = this.feasibilityStudyRepository.findOne(study_id);

    CohortDefinition indexRule = this.cohortDefinitionRepository.findOne(study.getIndexRule().getId());
    CohortGenerationInfo indexInfo = findCohortGenerationInfoBySourceId(indexRule.getGenerationInfoList(),
            source.getSourceId());
    if (indexInfo == null) {
        indexInfo = new CohortGenerationInfo(indexRule, source.getSourceId());
        indexRule.getGenerationInfoList().add(indexInfo);
    }
    indexInfo.setStatus(GenerationStatus.PENDING).setStartTime(startTime).setExecutionDuration(null);
    this.cohortDefinitionRepository.save(indexRule);

    if (study.getResultRule() != null) {
        CohortDefinition resultRule = this.cohortDefinitionRepository.findOne(study.getResultRule().getId());
        CohortGenerationInfo resultInfo = findCohortGenerationInfoBySourceId(resultRule.getGenerationInfoList(),
                source.getSourceId());
        if (resultInfo == null) {
            resultInfo = new CohortGenerationInfo(resultRule, source.getSourceId());
            resultRule.getGenerationInfoList().add(resultInfo);
        }
        resultInfo.setStatus(GenerationStatus.PENDING).setStartTime(startTime).setExecutionDuration(null);
        this.cohortDefinitionRepository.save(resultRule);
    }

    StudyGenerationInfo studyInfo = findStudyGenerationInfoBySourceId(study.getStudyGenerationInfoList(),
            source.getSourceId());
    if (studyInfo == null) {
        studyInfo = new StudyGenerationInfo(study, source);
        study.getStudyGenerationInfoList().add(studyInfo);
    }
    studyInfo.setStatus(GenerationStatus.PENDING).setStartTime(startTime).setExecutionDuration(null);

    this.feasibilityStudyRepository.save(study);

    this.getTransactionTemplate().getTransactionManager().commit(initStatus);

    JobParametersBuilder builder = new JobParametersBuilder();
    builder.addString("jobName", "performing feasibility study on " + indexRule.getName() + " : "
            + source.getSourceName() + " (" + source.getSourceKey() + ")");
    builder.addString("cdm_database_schema", cdmTableQualifier);
    builder.addString("results_database_schema", resultsTableQualifier);
    builder.addString("target_database_schema", resultsTableQualifier);
    builder.addString("target_dialect", source.getSourceDialect());
    builder.addString("target_table", "cohort");
    builder.addString("cohort_definition_id", ("" + indexRule.getId()));
    builder.addString("study_id", ("" + study_id));
    builder.addString("source_id", ("" + source.getSourceId()));
    builder.addString("generate_stats", Boolean.TRUE.toString());

    final JobParameters jobParameters = builder.toJobParameters();
    final JdbcTemplate sourceJdbcTemplate = getSourceJdbcTemplate(source);

    GenerateCohortTasklet indexRuleTasklet = new GenerateCohortTasklet(sourceJdbcTemplate,
            getTransactionTemplate(), cohortDefinitionRepository);

    Step generateCohortStep = stepBuilders.get("performStudy.generateIndexCohort").tasklet(indexRuleTasklet)
            .exceptionHandler(new TerminateJobStepExceptionHandler()).build();

    PerformFeasibilityTasklet simulateTasket = new PerformFeasibilityTasklet(sourceJdbcTemplate,
            getTransactionTemplate(), feasibilityStudyRepository, cohortDefinitionRepository);

    Step performStudyStep = stepBuilders.get("performStudy.performStudy").tasklet(simulateTasket).build();

    Job performStudyJob = jobBuilders.get("performStudy").start(generateCohortStep).next(performStudyStep)
            .build();

    JobExecutionResource jobExec = this.jobTemplate.launch(performStudyJob, jobParameters);
    return jobExec;
}

From source file:org.ohdsi.webapi.service.IRAnalysisService.java

@GET
@Path("/{analysis_id}/execute/{sourceKey}")
@Produces(MediaType.APPLICATION_JSON)//ww w .ja  v a  2 s. c  o m
@Consumes(MediaType.APPLICATION_JSON)
public JobExecutionResource performAnalysis(@PathParam("analysis_id") final int analysisId,
        @PathParam("sourceKey") final String sourceKey) {
    Date startTime = Calendar.getInstance().getTime();

    Source source = this.getSourceRepository().findBySourceKey(sourceKey);
    String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
    String cdmTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.CDM);

    DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition();
    requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);

    TransactionStatus initStatus = this.getTransactionTemplate().getTransactionManager()
            .getTransaction(requresNewTx);

    IncidenceRateAnalysis analysis = this.irAnalysisRepository.findOne(analysisId);

    ExecutionInfo analysisInfo = findExecutionInfoBySourceId(analysis.getExecutionInfoList(),
            source.getSourceId());
    if (analysisInfo != null) {
        if (analysisInfo.getStatus() != GenerationStatus.COMPLETE)
            return null; // Exit execution, another process has started it.
    } else {
        analysisInfo = new ExecutionInfo(analysis, source);
        analysis.getExecutionInfoList().add(analysisInfo);
    }

    analysisInfo.setStatus(GenerationStatus.PENDING).setStartTime(startTime).setExecutionDuration(null);

    this.irAnalysisRepository.save(analysis);

    this.getTransactionTemplate().getTransactionManager().commit(initStatus);

    JobParametersBuilder builder = new JobParametersBuilder();
    builder.addString("jobName", "IR Analysis: " + analysis.getId() + " : " + source.getSourceName() + " ("
            + source.getSourceKey() + ")");
    builder.addString("cdm_database_schema", cdmTableQualifier);
    builder.addString("results_database_schema", resultsTableQualifier);
    builder.addString("target_dialect", source.getSourceDialect());
    builder.addString("analysis_id", ("" + analysisId));
    builder.addString("source_id", ("" + source.getSourceId()));

    final JobParameters jobParameters = builder.toJobParameters();

    PerformAnalysisTasklet analysisTasklet = new PerformAnalysisTasklet(getSourceJdbcTemplate(source),
            getTransactionTemplate(), irAnalysisRepository);

    Step irAnalysisStep = stepBuilders.get("irAnalysis.execute").tasklet(analysisTasklet).build();

    Job executeAnalysis = jobBuilders.get("irAnalysis").start(irAnalysisStep).build();

    JobExecutionResource jobExec = this.jobTemplate.launch(executeAnalysis, jobParameters);
    return jobExec;
}

From source file:org.springframework.batch.core.step.item.FaultTolerantStepFactoryBeanRetryTests.java

@SuppressWarnings("unchecked")
@Before/*from   w  w w .  ja  va 2 s  .c  om*/
public void setUp() throws Exception {

    factory = new FaultTolerantStepFactoryBean<String, String>();
    factory.setBeanName("step");

    factory.setItemReader(new ListItemReader<String>(new ArrayList<String>()));
    factory.setItemWriter(writer);
    factory.setJobRepository(repository);
    factory.setTransactionManager(new ResourcelessTransactionManager());
    factory.setRetryableExceptionClasses(getExceptionMap(Exception.class));
    factory.setCommitInterval(1); // trivial by default

    factory.setSkippableExceptionClasses(getExceptionMap(Exception.class));

    JobParameters jobParameters = new JobParametersBuilder().addString("statefulTest", "make_this_unique")
            .toJobParameters();
    jobExecution = repository.createJobExecution("job", jobParameters);
    jobExecution.setEndTime(new Date());

}

From source file:org.springframework.batch.core.test.football.FootballJobIntegrationTests.java

@Test
public void testLaunchJob() throws Exception {
    JobExecution execution = jobLauncher.run(job,
            new JobParametersBuilder().addLong("commit.interval", 10L).toJobParameters());
    assertEquals(BatchStatus.COMPLETED, execution.getStatus());
    for (StepExecution stepExecution : execution.getStepExecutions()) {
        logger.info("Processed: " + stepExecution);
        if (stepExecution.getStepName().equals("playerload")) {
            // The effect of the retries
            assertEquals(new Double(Math.ceil(stepExecution.getReadCount() / 10. + 1)).intValue(),
                    stepExecution.getCommitCount());
        }/* w  ww. ja  v a  2  s  . c  om*/
    }
}

From source file:org.springframework.batch.core.test.football.FootballJobSkipIntegrationTests.java

@Test
public void testLaunchJob() throws Exception {
    try {//ww w.  j  a  va 2s.  c  o m
        if (databaseType == DatabaseType.POSTGRES || databaseType == DatabaseType.ORACLE) {
            // Extra special test for these platforms (would have failed
            // the job with UNKNOWN status in Batch 2.0):
            jdbcTemplate.update("SET CONSTRAINTS ALL DEFERRED");
        }
    } catch (Exception e) {
        // Ignore (wrong platform)
    }
    JobExecution execution = jobLauncher.run(job,
            new JobParametersBuilder().addLong("skip.limit", 0L).toJobParameters());
    assertEquals(BatchStatus.COMPLETED, execution.getStatus());
    for (StepExecution stepExecution : execution.getStepExecutions()) {
        logger.info("Processed: " + stepExecution);
    }
    // They all skip on the second execution because of a primary key
    // violation
    long retryLimit = 2L;
    execution = jobLauncher.run(job, new JobParametersBuilder().addLong("skip.limit", 100000L)
            .addLong("retry.limit", retryLimit).toJobParameters());
    assertEquals(BatchStatus.COMPLETED, execution.getStatus());
    for (StepExecution stepExecution : execution.getStepExecutions()) {
        logger.info("Processed: " + stepExecution);
        if (stepExecution.getStepName().equals("playerload")) {
            // The effect of the retries is to increase the number of
            // rollbacks
            int commitInterval = stepExecution.getReadCount() / (stepExecution.getCommitCount() - 1);
            // Account for the extra empty commit if the read count is
            // commensurate with the commit interval
            int effectiveCommitCount = stepExecution.getReadCount() % commitInterval == 0
                    ? stepExecution.getCommitCount() - 1
                    : stepExecution.getCommitCount();
            long expectedRollbacks = Math.max(1, retryLimit) * effectiveCommitCount
                    + stepExecution.getReadCount();
            assertEquals(expectedRollbacks, stepExecution.getRollbackCount());
            assertEquals(stepExecution.getReadCount(), stepExecution.getWriteSkipCount());
        }
    }

}

From source file:org.springframework.batch.core.test.football.ParallelJobIntegrationTests.java

@Test
public void testLaunchJob() throws Exception {
    JobExecution execution = jobLauncher.run(job, new JobParametersBuilder().toJobParameters());
    assertEquals(BatchStatus.COMPLETED, execution.getStatus());
    for (StepExecution stepExecution : execution.getStepExecutions()) {
        logger.info("Processed: " + stepExecution);
    }//w  w  w  .  j av  a2  s  . co  m
}