Example usage for org.springframework.batch.core JobParametersBuilder addString

List of usage examples for org.springframework.batch.core JobParametersBuilder addString

Introduction

In this page you can find the example usage for org.springframework.batch.core JobParametersBuilder addString.

Prototype

public JobParametersBuilder addString(String key, String parameter) 

Source Link

Document

Add a new identifying String parameter for the given key.

Usage

From source file:com.vmware.bdd.manager.ClusterManager.java

@ClusterManagerPointcut
public Long fixDiskFailures(String clusterName, String groupName) throws Exception {
    opsBlocker.blockUnsupportedOpsByCluster("fixDisk", clusterName);

    ClusterEntity cluster = clusterEntityMgr.findByName(clusterName);
    if (cluster == null) {
        logger.error("cluster " + clusterName + " does not exist");
        throw BddException.NOT_FOUND("Cluster", clusterName);
    }/*www .  ja  va  2 s  . c o  m*/
    SoftwareManager softMgr = softwareManagerCollector.getSoftwareManager(cluster.getAppManager());

    ValidationUtils.validateVersion(clusterEntityMgr, clusterName);

    ClusterStatus oldStatus = cluster.getStatus();

    if (!oldStatus.isActiveServiceStatus()) {
        throw ClusterHealServiceException.NOT_SUPPORTED(clusterName, "The cluster status must be RUNNING");
    }

    List<NodeGroupEntity> nodeGroups;

    if (groupName != null) {
        NodeGroupEntity nodeGroup = clusterEntityMgr.findByName(clusterName, groupName);
        if (nodeGroup == null) {
            logger.error("node group " + groupName + " does not exist");
            throw BddException.NOT_FOUND("group", groupName);
        }

        nodeGroups = new ArrayList<NodeGroupEntity>(1);
        nodeGroups.add(nodeGroup);
    } else {
        nodeGroups = clusterEntityMgr.findAllGroups(clusterName);
    }

    // only fix worker nodes that have datanode or tasktracker roles
    boolean workerNodesFound = false;
    JobParametersBuilder parametersBuilder = new JobParametersBuilder();
    List<JobParameters> jobParameterList = new ArrayList<JobParameters>();

    for (NodeGroupEntity nodeGroup : nodeGroups) {
        List<String> roles = nodeGroup.getRoleNameList();

        workerNodesFound = true;
        for (NodeEntity node : clusterEntityMgr.findAllNodes(clusterName, nodeGroup.getName())) {
            if (node.isObsoleteNode()) {
                logger.info("Ingore node " + node.getVmName() + ", for it violate VM name convention."
                        + "or exceed defined group instance number. ");
                continue;
            }
            if (clusterHealService.hasBadDisks(node.getVmName())) {
                logger.warn("node " + node.getVmName() + " has bad disks. Fixing it..");

                boolean vmPowerOn = (node.getStatus().ordinal() != NodeStatus.POWERED_OFF.ordinal());

                JobParameters nodeParameters = parametersBuilder
                        .addString(JobConstants.CLUSTER_NAME_JOB_PARAM, clusterName)
                        .addString(JobConstants.TARGET_NAME_JOB_PARAM, node.getVmName())
                        .addString(JobConstants.GROUP_NAME_JOB_PARAM, nodeGroup.getName())
                        .addString(JobConstants.SUB_JOB_NODE_NAME, node.getVmName())
                        .addString(JobConstants.IS_VM_POWER_ON, String.valueOf(vmPowerOn)).toJobParameters();
                jobParameterList.add(nodeParameters);
            }
        }
    }

    if (!workerNodesFound) {
        throw ClusterHealServiceException.NOT_SUPPORTED(clusterName,
                "only support fixing disk failures for worker/non-management nodes");
    }

    // all target nodes are healthy, simply return
    if (jobParameterList.isEmpty()) {
        logger.info("all target nodes are healthy, simply return");
        throw ClusterHealServiceException.NOT_NEEDED(clusterName);
    }

    try {
        clusterEntityMgr.updateClusterStatus(clusterName, ClusterStatus.MAINTENANCE);
        clusterEntityMgr.cleanupActionError(clusterName);
        return jobManager.runSubJobForNodes(JobConstants.FIX_NODE_DISK_FAILURE_JOB_NAME, jobParameterList,
                clusterName, oldStatus, oldStatus);
    } catch (Exception e) {
        logger.error("failed to fix disk failures, " + e.getMessage());
        throw e;
    }
}

From source file:org.geoserver.backuprestore.Backup.java

/**
 * @return/*from   w  ww .jav  a2 s .c  om*/
 * @throws IOException
 * 
 */
public BackupExecutionAdapter runBackupAsync(final Resource archiveFile, final boolean overwrite,
        final Filter filter, final Hints params) throws IOException {
    // Check if archiveFile exists
    if (archiveFile.file().exists()) {
        if (!overwrite && FileUtils.sizeOf(archiveFile.file()) > 0) {
            // Unless the user explicitly wants to overwrite the archiveFile, throw an exception whenever it already exists
            throw new IOException(
                    "The target archive file already exists. Use 'overwrite=TRUE' if you want to overwrite it.");
        } else {
            FileUtils.forceDelete(archiveFile.file());
        }
    } else {
        // Make sure the parent path exists
        if (!archiveFile.file().getParentFile().exists()) {
            try {
                archiveFile.file().getParentFile().mkdirs();
            } finally {
                if (!archiveFile.file().getParentFile().exists()) {
                    throw new IOException("The path to target archive file is unreachable.");
                }
            }
        }
    }

    // Initialize ZIP
    FileUtils.touch(archiveFile.file());

    // Write flat files into a temporary folder
    Resource tmpDir = BackupUtils.geoServerTmpDir(getGeoServerDataDirectory());

    // Fill Job Parameters
    JobParametersBuilder paramsBuilder = new JobParametersBuilder();

    if (filter != null) {
        paramsBuilder.addString("filter", ECQL.toCQL(filter));
    }

    paramsBuilder.addString(PARAM_JOB_NAME, BACKUP_JOB_NAME)
            .addString(PARAM_OUTPUT_FILE_PATH, BackupUtils.getArchiveURLProtocol(tmpDir) + tmpDir.path())
            .addLong(PARAM_TIME, System.currentTimeMillis());

    parseParams(params, paramsBuilder);

    JobParameters jobParameters = paramsBuilder.toJobParameters();

    // Send Execution Signal
    BackupExecutionAdapter backupExecution;
    try {
        if (getRestoreRunningExecutions().isEmpty() && getBackupRunningExecutions().isEmpty()) {
            synchronized (jobOperator) {
                // Start a new Job
                JobExecution jobExecution = jobLauncher.run(backupJob, jobParameters);
                backupExecution = new BackupExecutionAdapter(jobExecution, totalNumberOfBackupSteps);
                backupExecutions.put(backupExecution.getId(), backupExecution);

                backupExecution.setArchiveFile(archiveFile);
                backupExecution.setOverwrite(overwrite);
                backupExecution.setFilter(filter);

                backupExecution.getOptions().add("OVERWRITE=" + overwrite);
                for (Entry jobParam : jobParameters.toProperties().entrySet()) {
                    if (!PARAM_OUTPUT_FILE_PATH.equals(jobParam.getKey())
                            && !PARAM_INPUT_FILE_PATH.equals(jobParam.getKey())
                            && !PARAM_TIME.equals(jobParam.getKey())) {
                        backupExecution.getOptions().add(jobParam.getKey() + "=" + jobParam.getValue());
                    }
                }

                return backupExecution;
            }
        } else {
            throw new IOException(
                    "Could not start a new Backup Job Execution since there are currently Running jobs.");
        }
    } catch (JobExecutionAlreadyRunningException | JobRestartException | JobInstanceAlreadyCompleteException
            | JobParametersInvalidException e) {
        throw new IOException("Could not start a new Backup Job Execution: ", e);
    } finally {
    }
}

From source file:org.geoserver.backuprestore.Backup.java

/**
 * @return/*from   www  .  j  a v a  2  s .  c o m*/
 * @return
 * @throws IOException
 * 
 */
public RestoreExecutionAdapter runRestoreAsync(final Resource archiveFile, final Filter filter,
        final Hints params) throws IOException {
    // Extract archive into a temporary folder
    Resource tmpDir = BackupUtils.geoServerTmpDir(getGeoServerDataDirectory());
    BackupUtils.extractTo(archiveFile, tmpDir);

    // Fill Job Parameters
    JobParametersBuilder paramsBuilder = new JobParametersBuilder();

    if (filter != null) {
        paramsBuilder.addString("filter", ECQL.toCQL(filter));
    }

    paramsBuilder.addString(PARAM_JOB_NAME, RESTORE_JOB_NAME)
            .addString(PARAM_INPUT_FILE_PATH, BackupUtils.getArchiveURLProtocol(tmpDir) + tmpDir.path())
            .addLong(PARAM_TIME, System.currentTimeMillis());

    parseParams(params, paramsBuilder);

    JobParameters jobParameters = paramsBuilder.toJobParameters();

    RestoreExecutionAdapter restoreExecution;
    try {
        if (getRestoreRunningExecutions().isEmpty() && getBackupRunningExecutions().isEmpty()) {
            synchronized (jobOperator) {
                // Start a new Job
                JobExecution jobExecution = jobLauncher.run(restoreJob, jobParameters);
                restoreExecution = new RestoreExecutionAdapter(jobExecution, totalNumberOfRestoreSteps);
                restoreExecutions.put(restoreExecution.getId(), restoreExecution);
                restoreExecution.setArchiveFile(archiveFile);
                restoreExecution.setFilter(filter);

                for (Entry jobParam : jobParameters.toProperties().entrySet()) {
                    if (!PARAM_OUTPUT_FILE_PATH.equals(jobParam.getKey())
                            && !PARAM_INPUT_FILE_PATH.equals(jobParam.getKey())
                            && !PARAM_TIME.equals(jobParam.getKey())) {
                        restoreExecution.getOptions().add(jobParam.getKey() + "=" + jobParam.getValue());
                    }
                }

                return restoreExecution;
            }
        } else {
            throw new IOException(
                    "Could not start a new Restore Job Execution since there are currently Running jobs.");
        }
    } catch (JobExecutionAlreadyRunningException | JobRestartException | JobInstanceAlreadyCompleteException
            | JobParametersInvalidException e) {
        throw new IOException("Could not start a new Restore Job Execution: ", e);
    } finally {
    }
}

From source file:org.geoserver.backuprestore.Backup.java

/**
 * @param params/*from  ww  w .  j  a v  a2s.co m*/
 * @param paramsBuilder
 */
private void parseParams(final Hints params, JobParametersBuilder paramsBuilder) {
    if (params != null) {
        for (Entry<Object, Object> param : params.entrySet()) {
            if (param.getKey() instanceof Hints.OptionKey) {
                final Set<String> key = ((Hints.OptionKey) param.getKey()).getOptions();
                for (String k : key) {
                    switch (k) {
                    case PARAM_CLEANUP_TEMP:
                    case PARAM_DRY_RUN_MODE:
                    case PARAM_BEST_EFFORT_MODE:
                        if (paramsBuilder.toJobParameters().getString(k) == null) {
                            paramsBuilder.addString(k, "true");
                        }
                    }
                }
            }
        }
    }
}

From source file:org.ohdsi.webapi.service.CohortDefinitionService.java

/**
 * Queues up a generate cohort task for the specified cohort definition id.
 *
 * @param id - the Cohort Definition ID to generate
 * @return information about the Cohort Analysis Job
 *///from w  ww  .  j av a  2s  . c  o m
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("/{id}/generate/{sourceKey}")
public JobExecutionResource generateCohort(@PathParam("id") final int id,
        @PathParam("sourceKey") final String sourceKey) {

    Source source = getSourceRepository().findBySourceKey(sourceKey);
    String cdmTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.CDM);
    String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);

    DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition();
    requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
    TransactionStatus initStatus = this.getTransactionTemplate().getTransactionManager()
            .getTransaction(requresNewTx);

    CohortDefinition currentDefinition = this.cohortDefinitionRepository.findOne(id);
    CohortGenerationInfo info = findBySourceId(currentDefinition.getGenerationInfoList(), source.getSourceId());
    if (info == null) {
        info = new CohortGenerationInfo(currentDefinition, source.getSourceId());
        currentDefinition.getGenerationInfoList().add(info);
    }
    info.setStatus(GenerationStatus.PENDING).setStartTime(Calendar.getInstance().getTime());

    this.cohortDefinitionRepository.save(currentDefinition);
    this.getTransactionTemplate().getTransactionManager().commit(initStatus);

    JobParametersBuilder builder = new JobParametersBuilder();
    builder.addString("jobName", "generating cohort " + currentDefinition.getId() + " : "
            + source.getSourceName() + " (" + source.getSourceKey() + ")");
    builder.addString("cdm_database_schema", cdmTableQualifier);
    builder.addString("results_database_schema", resultsTableQualifier);
    builder.addString("target_database_schema", resultsTableQualifier);
    builder.addString("target_dialect", source.getSourceDialect());
    builder.addString("target_table", "cohort");
    builder.addString("cohort_definition_id", ("" + id));
    builder.addString("source_id", ("" + source.getSourceId()));
    builder.addString("generate_stats", Boolean.TRUE.toString());

    final JobParameters jobParameters = builder.toJobParameters();

    log.info(String.format("Beginning generate cohort for cohort definition id: \n %s", "" + id));

    GenerateCohortTasklet generateTasklet = new GenerateCohortTasklet(getSourceJdbcTemplate(source),
            getTransactionTemplate(), cohortDefinitionRepository);

    Step generateCohortStep = stepBuilders.get("cohortDefinition.generateCohort").tasklet(generateTasklet)
            .build();

    Job generateCohortJob = jobBuilders.get("generateCohort").start(generateCohortStep).build();

    JobExecutionResource jobExec = this.jobTemplate.launch(generateCohortJob, jobParameters);
    return jobExec;
}

From source file:org.ohdsi.webapi.service.FeasibilityService.java

@GET
@Path("/{study_id}/generate/{sourceKey}")
@Produces(MediaType.APPLICATION_JSON)//from   w ww. j a  va 2  s.co m
@Consumes(MediaType.APPLICATION_JSON)
public JobExecutionResource performStudy(@PathParam("study_id") final int study_id,
        @PathParam("sourceKey") final String sourceKey) {
    Date startTime = Calendar.getInstance().getTime();

    Source source = this.getSourceRepository().findBySourceKey(sourceKey);
    String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
    String cdmTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.CDM);

    DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition();
    requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);

    TransactionStatus initStatus = this.getTransactionTemplate().getTransactionManager()
            .getTransaction(requresNewTx);

    FeasibilityStudy study = this.feasibilityStudyRepository.findOne(study_id);

    CohortDefinition indexRule = this.cohortDefinitionRepository.findOne(study.getIndexRule().getId());
    CohortGenerationInfo indexInfo = findCohortGenerationInfoBySourceId(indexRule.getGenerationInfoList(),
            source.getSourceId());
    if (indexInfo == null) {
        indexInfo = new CohortGenerationInfo(indexRule, source.getSourceId());
        indexRule.getGenerationInfoList().add(indexInfo);
    }
    indexInfo.setStatus(GenerationStatus.PENDING).setStartTime(startTime).setExecutionDuration(null);
    this.cohortDefinitionRepository.save(indexRule);

    if (study.getResultRule() != null) {
        CohortDefinition resultRule = this.cohortDefinitionRepository.findOne(study.getResultRule().getId());
        CohortGenerationInfo resultInfo = findCohortGenerationInfoBySourceId(resultRule.getGenerationInfoList(),
                source.getSourceId());
        if (resultInfo == null) {
            resultInfo = new CohortGenerationInfo(resultRule, source.getSourceId());
            resultRule.getGenerationInfoList().add(resultInfo);
        }
        resultInfo.setStatus(GenerationStatus.PENDING).setStartTime(startTime).setExecutionDuration(null);
        this.cohortDefinitionRepository.save(resultRule);
    }

    StudyGenerationInfo studyInfo = findStudyGenerationInfoBySourceId(study.getStudyGenerationInfoList(),
            source.getSourceId());
    if (studyInfo == null) {
        studyInfo = new StudyGenerationInfo(study, source);
        study.getStudyGenerationInfoList().add(studyInfo);
    }
    studyInfo.setStatus(GenerationStatus.PENDING).setStartTime(startTime).setExecutionDuration(null);

    this.feasibilityStudyRepository.save(study);

    this.getTransactionTemplate().getTransactionManager().commit(initStatus);

    JobParametersBuilder builder = new JobParametersBuilder();
    builder.addString("jobName", "performing feasibility study on " + indexRule.getName() + " : "
            + source.getSourceName() + " (" + source.getSourceKey() + ")");
    builder.addString("cdm_database_schema", cdmTableQualifier);
    builder.addString("results_database_schema", resultsTableQualifier);
    builder.addString("target_database_schema", resultsTableQualifier);
    builder.addString("target_dialect", source.getSourceDialect());
    builder.addString("target_table", "cohort");
    builder.addString("cohort_definition_id", ("" + indexRule.getId()));
    builder.addString("study_id", ("" + study_id));
    builder.addString("source_id", ("" + source.getSourceId()));
    builder.addString("generate_stats", Boolean.TRUE.toString());

    final JobParameters jobParameters = builder.toJobParameters();
    final JdbcTemplate sourceJdbcTemplate = getSourceJdbcTemplate(source);

    GenerateCohortTasklet indexRuleTasklet = new GenerateCohortTasklet(sourceJdbcTemplate,
            getTransactionTemplate(), cohortDefinitionRepository);

    Step generateCohortStep = stepBuilders.get("performStudy.generateIndexCohort").tasklet(indexRuleTasklet)
            .exceptionHandler(new TerminateJobStepExceptionHandler()).build();

    PerformFeasibilityTasklet simulateTasket = new PerformFeasibilityTasklet(sourceJdbcTemplate,
            getTransactionTemplate(), feasibilityStudyRepository, cohortDefinitionRepository);

    Step performStudyStep = stepBuilders.get("performStudy.performStudy").tasklet(simulateTasket).build();

    Job performStudyJob = jobBuilders.get("performStudy").start(generateCohortStep).next(performStudyStep)
            .build();

    JobExecutionResource jobExec = this.jobTemplate.launch(performStudyJob, jobParameters);
    return jobExec;
}

From source file:org.ohdsi.webapi.service.IRAnalysisService.java

@GET
@Path("/{analysis_id}/execute/{sourceKey}")
@Produces(MediaType.APPLICATION_JSON)/* www  . ja  v a 2s .co m*/
@Consumes(MediaType.APPLICATION_JSON)
public JobExecutionResource performAnalysis(@PathParam("analysis_id") final int analysisId,
        @PathParam("sourceKey") final String sourceKey) {
    Date startTime = Calendar.getInstance().getTime();

    Source source = this.getSourceRepository().findBySourceKey(sourceKey);
    String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
    String cdmTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.CDM);

    DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition();
    requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);

    TransactionStatus initStatus = this.getTransactionTemplate().getTransactionManager()
            .getTransaction(requresNewTx);

    IncidenceRateAnalysis analysis = this.irAnalysisRepository.findOne(analysisId);

    ExecutionInfo analysisInfo = findExecutionInfoBySourceId(analysis.getExecutionInfoList(),
            source.getSourceId());
    if (analysisInfo != null) {
        if (analysisInfo.getStatus() != GenerationStatus.COMPLETE)
            return null; // Exit execution, another process has started it.
    } else {
        analysisInfo = new ExecutionInfo(analysis, source);
        analysis.getExecutionInfoList().add(analysisInfo);
    }

    analysisInfo.setStatus(GenerationStatus.PENDING).setStartTime(startTime).setExecutionDuration(null);

    this.irAnalysisRepository.save(analysis);

    this.getTransactionTemplate().getTransactionManager().commit(initStatus);

    JobParametersBuilder builder = new JobParametersBuilder();
    builder.addString("jobName", "IR Analysis: " + analysis.getId() + " : " + source.getSourceName() + " ("
            + source.getSourceKey() + ")");
    builder.addString("cdm_database_schema", cdmTableQualifier);
    builder.addString("results_database_schema", resultsTableQualifier);
    builder.addString("target_dialect", source.getSourceDialect());
    builder.addString("analysis_id", ("" + analysisId));
    builder.addString("source_id", ("" + source.getSourceId()));

    final JobParameters jobParameters = builder.toJobParameters();

    PerformAnalysisTasklet analysisTasklet = new PerformAnalysisTasklet(getSourceJdbcTemplate(source),
            getTransactionTemplate(), irAnalysisRepository);

    Step irAnalysisStep = stepBuilders.get("irAnalysis.execute").tasklet(analysisTasklet).build();

    Job executeAnalysis = jobBuilders.get("irAnalysis").start(irAnalysisStep).build();

    JobExecutionResource jobExec = this.jobTemplate.launch(executeAnalysis, jobParameters);
    return jobExec;
}

From source file:simple.spring.batch.JobLauncherDetails.java

private JobParameters getJobParametersFromJobMap(Map<String, Object> jobDataMap) {

    JobParametersBuilder builder = new JobParametersBuilder();

    for (Entry<String, Object> entry : jobDataMap.entrySet()) {
        String key = entry.getKey();
        Object value = entry.getValue();

        if (value instanceof String && !key.equals(JOB_NAME)) {
            builder.addString(key, (String) value);
        } else if (value instanceof Float || value instanceof Double) {
            builder.addDouble(key, ((Number) value).doubleValue());
        } else if (value instanceof Integer || value instanceof Long) {
            builder.addLong(key, ((Number) value).longValue());
        } else if (value instanceof Date) {
            builder.addDate(key, (Date) value);
        } else {//from  w  w  w. ja  v  a2 s .c o m
            //System.out.println(">>> key : " + key + ", value : " + value);
            //log.debug("JobDataMap contains values which are not job parameters (ignoring).");
        }
    }

    return builder.toJobParameters();

}