Example usage for org.springframework.batch.core JobParametersBuilder toJobParameters

List of usage examples for org.springframework.batch.core JobParametersBuilder toJobParameters

Introduction

In this page you can find the example usage for org.springframework.batch.core JobParametersBuilder toJobParameters.

Prototype

public JobParameters toJobParameters() 

Source Link

Document

Conversion method that takes the current state of this builder and returns it as a JobParameters object.

Usage

From source file:org.geoserver.backuprestore.Backup.java

/**
 * @return/*from w ww.ja  v a  2s  .c om*/
 * @return
 * @throws IOException
 * 
 */
public RestoreExecutionAdapter runRestoreAsync(final Resource archiveFile, final Filter filter,
        final Hints params) throws IOException {
    // Extract archive into a temporary folder
    Resource tmpDir = BackupUtils.geoServerTmpDir(getGeoServerDataDirectory());
    BackupUtils.extractTo(archiveFile, tmpDir);

    // Fill Job Parameters
    JobParametersBuilder paramsBuilder = new JobParametersBuilder();

    if (filter != null) {
        paramsBuilder.addString("filter", ECQL.toCQL(filter));
    }

    paramsBuilder.addString(PARAM_JOB_NAME, RESTORE_JOB_NAME)
            .addString(PARAM_INPUT_FILE_PATH, BackupUtils.getArchiveURLProtocol(tmpDir) + tmpDir.path())
            .addLong(PARAM_TIME, System.currentTimeMillis());

    parseParams(params, paramsBuilder);

    JobParameters jobParameters = paramsBuilder.toJobParameters();

    RestoreExecutionAdapter restoreExecution;
    try {
        if (getRestoreRunningExecutions().isEmpty() && getBackupRunningExecutions().isEmpty()) {
            synchronized (jobOperator) {
                // Start a new Job
                JobExecution jobExecution = jobLauncher.run(restoreJob, jobParameters);
                restoreExecution = new RestoreExecutionAdapter(jobExecution, totalNumberOfRestoreSteps);
                restoreExecutions.put(restoreExecution.getId(), restoreExecution);
                restoreExecution.setArchiveFile(archiveFile);
                restoreExecution.setFilter(filter);

                for (Entry jobParam : jobParameters.toProperties().entrySet()) {
                    if (!PARAM_OUTPUT_FILE_PATH.equals(jobParam.getKey())
                            && !PARAM_INPUT_FILE_PATH.equals(jobParam.getKey())
                            && !PARAM_TIME.equals(jobParam.getKey())) {
                        restoreExecution.getOptions().add(jobParam.getKey() + "=" + jobParam.getValue());
                    }
                }

                return restoreExecution;
            }
        } else {
            throw new IOException(
                    "Could not start a new Restore Job Execution since there are currently Running jobs.");
        }
    } catch (JobExecutionAlreadyRunningException | JobRestartException | JobInstanceAlreadyCompleteException
            | JobParametersInvalidException e) {
        throw new IOException("Could not start a new Restore Job Execution: ", e);
    } finally {
    }
}

From source file:org.geoserver.backuprestore.Backup.java

/**
 * @param params//from  w  ww .jav a2s.co m
 * @param paramsBuilder
 */
private void parseParams(final Hints params, JobParametersBuilder paramsBuilder) {
    if (params != null) {
        for (Entry<Object, Object> param : params.entrySet()) {
            if (param.getKey() instanceof Hints.OptionKey) {
                final Set<String> key = ((Hints.OptionKey) param.getKey()).getOptions();
                for (String k : key) {
                    switch (k) {
                    case PARAM_CLEANUP_TEMP:
                    case PARAM_DRY_RUN_MODE:
                    case PARAM_BEST_EFFORT_MODE:
                        if (paramsBuilder.toJobParameters().getString(k) == null) {
                            paramsBuilder.addString(k, "true");
                        }
                    }
                }
            }
        }
    }
}

From source file:org.ohdsi.webapi.service.CohortDefinitionService.java

/**
 * Queues up a generate cohort task for the specified cohort definition id.
 *
 * @param id - the Cohort Definition ID to generate
 * @return information about the Cohort Analysis Job
 *///from  w ww  .ja  va  2  s .  c o  m
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/generate/{sourceKey}")
public JobExecutionResource generateCohort(@PathParam("id") final int id,
        @PathParam("sourceKey") final String sourceKey) {

    Source source = getSourceRepository().findBySourceKey(sourceKey);
    String cdmTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.CDM);
    String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);

    DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition();
    requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
    TransactionStatus initStatus = this.getTransactionTemplate().getTransactionManager()
            .getTransaction(requresNewTx);

    CohortDefinition currentDefinition = this.cohortDefinitionRepository.findOne(id);
    CohortGenerationInfo info = findBySourceId(currentDefinition.getGenerationInfoList(), source.getSourceId());
    if (info == null) {
        info = new CohortGenerationInfo(currentDefinition, source.getSourceId());
        currentDefinition.getGenerationInfoList().add(info);
    }
    info.setStatus(GenerationStatus.PENDING).setStartTime(Calendar.getInstance().getTime());

    this.cohortDefinitionRepository.save(currentDefinition);
    this.getTransactionTemplate().getTransactionManager().commit(initStatus);

    JobParametersBuilder builder = new JobParametersBuilder();
    builder.addString("jobName", "generating cohort " + currentDefinition.getId() + " : "
            + source.getSourceName() + " (" + source.getSourceKey() + ")");
    builder.addString("cdm_database_schema", cdmTableQualifier);
    builder.addString("results_database_schema", resultsTableQualifier);
    builder.addString("target_database_schema", resultsTableQualifier);
    builder.addString("target_dialect", source.getSourceDialect());
    builder.addString("target_table", "cohort");
    builder.addString("cohort_definition_id", ("" + id));
    builder.addString("source_id", ("" + source.getSourceId()));
    builder.addString("generate_stats", Boolean.TRUE.toString());

    final JobParameters jobParameters = builder.toJobParameters();

    log.info(String.format("Beginning generate cohort for cohort definition id: \n %s", "" + id));

    GenerateCohortTasklet generateTasklet = new GenerateCohortTasklet(getSourceJdbcTemplate(source),
            getTransactionTemplate(), cohortDefinitionRepository);

    Step generateCohortStep = stepBuilders.get("cohortDefinition.generateCohort").tasklet(generateTasklet)
            .build();

    Job generateCohortJob = jobBuilders.get("generateCohort").start(generateCohortStep).build();

    JobExecutionResource jobExec = this.jobTemplate.launch(generateCohortJob, jobParameters);
    return jobExec;
}

From source file:org.ohdsi.webapi.service.FeasibilityService.java

@GET
@Path("/{study_id}/generate/{sourceKey}")
@Produces(MediaType.APPLICATION_JSON)//from  w ww.j a v  a2s  .  c  om
@Consumes(MediaType.APPLICATION_JSON)
public JobExecutionResource performStudy(@PathParam("study_id") final int study_id,
        @PathParam("sourceKey") final String sourceKey) {
    Date startTime = Calendar.getInstance().getTime();

    Source source = this.getSourceRepository().findBySourceKey(sourceKey);
    String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
    String cdmTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.CDM);

    DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition();
    requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);

    TransactionStatus initStatus = this.getTransactionTemplate().getTransactionManager()
            .getTransaction(requresNewTx);

    FeasibilityStudy study = this.feasibilityStudyRepository.findOne(study_id);

    CohortDefinition indexRule = this.cohortDefinitionRepository.findOne(study.getIndexRule().getId());
    CohortGenerationInfo indexInfo = findCohortGenerationInfoBySourceId(indexRule.getGenerationInfoList(),
            source.getSourceId());
    if (indexInfo == null) {
        indexInfo = new CohortGenerationInfo(indexRule, source.getSourceId());
        indexRule.getGenerationInfoList().add(indexInfo);
    }
    indexInfo.setStatus(GenerationStatus.PENDING).setStartTime(startTime).setExecutionDuration(null);
    this.cohortDefinitionRepository.save(indexRule);

    if (study.getResultRule() != null) {
        CohortDefinition resultRule = this.cohortDefinitionRepository.findOne(study.getResultRule().getId());
        CohortGenerationInfo resultInfo = findCohortGenerationInfoBySourceId(resultRule.getGenerationInfoList(),
                source.getSourceId());
        if (resultInfo == null) {
            resultInfo = new CohortGenerationInfo(resultRule, source.getSourceId());
            resultRule.getGenerationInfoList().add(resultInfo);
        }
        resultInfo.setStatus(GenerationStatus.PENDING).setStartTime(startTime).setExecutionDuration(null);
        this.cohortDefinitionRepository.save(resultRule);
    }

    StudyGenerationInfo studyInfo = findStudyGenerationInfoBySourceId(study.getStudyGenerationInfoList(),
            source.getSourceId());
    if (studyInfo == null) {
        studyInfo = new StudyGenerationInfo(study, source);
        study.getStudyGenerationInfoList().add(studyInfo);
    }
    studyInfo.setStatus(GenerationStatus.PENDING).setStartTime(startTime).setExecutionDuration(null);

    this.feasibilityStudyRepository.save(study);

    this.getTransactionTemplate().getTransactionManager().commit(initStatus);

    JobParametersBuilder builder = new JobParametersBuilder();
    builder.addString("jobName", "performing feasibility study on " + indexRule.getName() + " : "
            + source.getSourceName() + " (" + source.getSourceKey() + ")");
    builder.addString("cdm_database_schema", cdmTableQualifier);
    builder.addString("results_database_schema", resultsTableQualifier);
    builder.addString("target_database_schema", resultsTableQualifier);
    builder.addString("target_dialect", source.getSourceDialect());
    builder.addString("target_table", "cohort");
    builder.addString("cohort_definition_id", ("" + indexRule.getId()));
    builder.addString("study_id", ("" + study_id));
    builder.addString("source_id", ("" + source.getSourceId()));
    builder.addString("generate_stats", Boolean.TRUE.toString());

    final JobParameters jobParameters = builder.toJobParameters();
    final JdbcTemplate sourceJdbcTemplate = getSourceJdbcTemplate(source);

    GenerateCohortTasklet indexRuleTasklet = new GenerateCohortTasklet(sourceJdbcTemplate,
            getTransactionTemplate(), cohortDefinitionRepository);

    Step generateCohortStep = stepBuilders.get("performStudy.generateIndexCohort").tasklet(indexRuleTasklet)
            .exceptionHandler(new TerminateJobStepExceptionHandler()).build();

    PerformFeasibilityTasklet simulateTasket = new PerformFeasibilityTasklet(sourceJdbcTemplate,
            getTransactionTemplate(), feasibilityStudyRepository, cohortDefinitionRepository);

    Step performStudyStep = stepBuilders.get("performStudy.performStudy").tasklet(simulateTasket).build();

    Job performStudyJob = jobBuilders.get("performStudy").start(generateCohortStep).next(performStudyStep)
            .build();

    JobExecutionResource jobExec = this.jobTemplate.launch(performStudyJob, jobParameters);
    return jobExec;
}

From source file:org.ohdsi.webapi.service.IRAnalysisService.java

@GET
@Path("/{analysis_id}/execute/{sourceKey}")
@Produces(MediaType.APPLICATION_JSON)/*from  ww w.  j av a 2s.co  m*/
@Consumes(MediaType.APPLICATION_JSON)
public JobExecutionResource performAnalysis(@PathParam("analysis_id") final int analysisId,
        @PathParam("sourceKey") final String sourceKey) {
    Date startTime = Calendar.getInstance().getTime();

    Source source = this.getSourceRepository().findBySourceKey(sourceKey);
    String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
    String cdmTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.CDM);

    DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition();
    requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);

    TransactionStatus initStatus = this.getTransactionTemplate().getTransactionManager()
            .getTransaction(requresNewTx);

    IncidenceRateAnalysis analysis = this.irAnalysisRepository.findOne(analysisId);

    ExecutionInfo analysisInfo = findExecutionInfoBySourceId(analysis.getExecutionInfoList(),
            source.getSourceId());
    if (analysisInfo != null) {
        if (analysisInfo.getStatus() != GenerationStatus.COMPLETE)
            return null; // Exit execution, another process has started it.
    } else {
        analysisInfo = new ExecutionInfo(analysis, source);
        analysis.getExecutionInfoList().add(analysisInfo);
    }

    analysisInfo.setStatus(GenerationStatus.PENDING).setStartTime(startTime).setExecutionDuration(null);

    this.irAnalysisRepository.save(analysis);

    this.getTransactionTemplate().getTransactionManager().commit(initStatus);

    JobParametersBuilder builder = new JobParametersBuilder();
    builder.addString("jobName", "IR Analysis: " + analysis.getId() + " : " + source.getSourceName() + " ("
            + source.getSourceKey() + ")");
    builder.addString("cdm_database_schema", cdmTableQualifier);
    builder.addString("results_database_schema", resultsTableQualifier);
    builder.addString("target_dialect", source.getSourceDialect());
    builder.addString("analysis_id", ("" + analysisId));
    builder.addString("source_id", ("" + source.getSourceId()));

    final JobParameters jobParameters = builder.toJobParameters();

    PerformAnalysisTasklet analysisTasklet = new PerformAnalysisTasklet(getSourceJdbcTemplate(source),
            getTransactionTemplate(), irAnalysisRepository);

    Step irAnalysisStep = stepBuilders.get("irAnalysis.execute").tasklet(analysisTasklet).build();

    Job executeAnalysis = jobBuilders.get("irAnalysis").start(irAnalysisStep).build();

    JobExecutionResource jobExec = this.jobTemplate.launch(executeAnalysis, jobParameters);
    return jobExec;
}

From source file:org.springframework.xd.dirt.plugins.job.ExpandedJobParametersConverter.java

/**
 * If {@link JobParameters} contains a parameters named {@value #IS_RESTART_JOB_PARAMETER_KEY} removed it.
 *
 * @param jobParameters Must not be null
 * @return A new instance of {@link JobParameters}
 *///from   ww  w. j a v  a  2  s .c o m
public JobParameters removeRestartParameterIfExists(JobParameters jobParameters) {

    Assert.notNull(jobParameters, "'jobParameters' must not be null.");

    final JobParametersBuilder jobParametersBuilder = new JobParametersBuilder();

    for (Map.Entry<String, JobParameter> entry : jobParameters.getParameters().entrySet()) {
        if (!IS_RESTART_JOB_PARAMETER_KEY.equalsIgnoreCase(entry.getKey())) {
            jobParametersBuilder.addParameter(entry.getKey(), entry.getValue());
        }
    }

    return jobParametersBuilder.toJobParameters();
}

From source file:simple.spring.batch.JobLauncherDetails.java

private JobParameters getJobParametersFromJobMap(Map<String, Object> jobDataMap) {

    JobParametersBuilder builder = new JobParametersBuilder();

    for (Entry<String, Object> entry : jobDataMap.entrySet()) {
        String key = entry.getKey();
        Object value = entry.getValue();

        if (value instanceof String && !key.equals(JOB_NAME)) {
            builder.addString(key, (String) value);
        } else if (value instanceof Float || value instanceof Double) {
            builder.addDouble(key, ((Number) value).doubleValue());
        } else if (value instanceof Integer || value instanceof Long) {
            builder.addLong(key, ((Number) value).longValue());
        } else if (value instanceof Date) {
            builder.addDate(key, (Date) value);
        } else {/*from   ww w  . ja v  a  2 s  .  c om*/
            //System.out.println(">>> key : " + key + ", value : " + value);
            //log.debug("JobDataMap contains values which are not job parameters (ignoring).");
        }
    }

    return builder.toJobParameters();

}