Example usage for org.springframework.batch.core JobParameters toProperties

List of usage examples for org.springframework.batch.core JobParameters toProperties

Introduction

In this page you can find the example usage for org.springframework.batch.core JobParameters toProperties.

Prototype

public Properties toProperties() 

Source Link

Usage

From source file:org.geoserver.backuprestore.Backup.java

/**
 * @return//from   w w  w.  j  a v  a  2 s .co m
 * @throws IOException
 * 
 */
public BackupExecutionAdapter runBackupAsync(final Resource archiveFile, final boolean overwrite,
        final Filter filter, final Hints params) throws IOException {
    // Check if archiveFile exists
    if (archiveFile.file().exists()) {
        if (!overwrite && FileUtils.sizeOf(archiveFile.file()) > 0) {
            // Unless the user explicitly wants to overwrite the archiveFile, throw an exception whenever it already exists
            throw new IOException(
                    "The target archive file already exists. Use 'overwrite=TRUE' if you want to overwrite it.");
        } else {
            FileUtils.forceDelete(archiveFile.file());
        }
    } else {
        // Make sure the parent path exists
        if (!archiveFile.file().getParentFile().exists()) {
            try {
                archiveFile.file().getParentFile().mkdirs();
            } finally {
                if (!archiveFile.file().getParentFile().exists()) {
                    throw new IOException("The path to target archive file is unreachable.");
                }
            }
        }
    }

    // Initialize ZIP
    FileUtils.touch(archiveFile.file());

    // Write flat files into a temporary folder
    Resource tmpDir = BackupUtils.geoServerTmpDir(getGeoServerDataDirectory());

    // Fill Job Parameters
    JobParametersBuilder paramsBuilder = new JobParametersBuilder();

    if (filter != null) {
        paramsBuilder.addString("filter", ECQL.toCQL(filter));
    }

    paramsBuilder.addString(PARAM_JOB_NAME, BACKUP_JOB_NAME)
            .addString(PARAM_OUTPUT_FILE_PATH, BackupUtils.getArchiveURLProtocol(tmpDir) + tmpDir.path())
            .addLong(PARAM_TIME, System.currentTimeMillis());

    parseParams(params, paramsBuilder);

    JobParameters jobParameters = paramsBuilder.toJobParameters();

    // Send Execution Signal
    BackupExecutionAdapter backupExecution;
    try {
        if (getRestoreRunningExecutions().isEmpty() && getBackupRunningExecutions().isEmpty()) {
            synchronized (jobOperator) {
                // Start a new Job
                JobExecution jobExecution = jobLauncher.run(backupJob, jobParameters);
                backupExecution = new BackupExecutionAdapter(jobExecution, totalNumberOfBackupSteps);
                backupExecutions.put(backupExecution.getId(), backupExecution);

                backupExecution.setArchiveFile(archiveFile);
                backupExecution.setOverwrite(overwrite);
                backupExecution.setFilter(filter);

                backupExecution.getOptions().add("OVERWRITE=" + overwrite);
                for (Entry jobParam : jobParameters.toProperties().entrySet()) {
                    if (!PARAM_OUTPUT_FILE_PATH.equals(jobParam.getKey())
                            && !PARAM_INPUT_FILE_PATH.equals(jobParam.getKey())
                            && !PARAM_TIME.equals(jobParam.getKey())) {
                        backupExecution.getOptions().add(jobParam.getKey() + "=" + jobParam.getValue());
                    }
                }

                return backupExecution;
            }
        } else {
            throw new IOException(
                    "Could not start a new Backup Job Execution since there are currently Running jobs.");
        }
    } catch (JobExecutionAlreadyRunningException | JobRestartException | JobInstanceAlreadyCompleteException
            | JobParametersInvalidException e) {
        throw new IOException("Could not start a new Backup Job Execution: ", e);
    } finally {
    }
}

From source file:org.geoserver.backuprestore.Backup.java

/**
 * @return/*from   w  ww. ja va  2  s  .  com*/
 * @return
 * @throws IOException
 * 
 */
public RestoreExecutionAdapter runRestoreAsync(final Resource archiveFile, final Filter filter,
        final Hints params) throws IOException {
    // Extract archive into a temporary folder
    Resource tmpDir = BackupUtils.geoServerTmpDir(getGeoServerDataDirectory());
    BackupUtils.extractTo(archiveFile, tmpDir);

    // Fill Job Parameters
    JobParametersBuilder paramsBuilder = new JobParametersBuilder();

    if (filter != null) {
        paramsBuilder.addString("filter", ECQL.toCQL(filter));
    }

    paramsBuilder.addString(PARAM_JOB_NAME, RESTORE_JOB_NAME)
            .addString(PARAM_INPUT_FILE_PATH, BackupUtils.getArchiveURLProtocol(tmpDir) + tmpDir.path())
            .addLong(PARAM_TIME, System.currentTimeMillis());

    parseParams(params, paramsBuilder);

    JobParameters jobParameters = paramsBuilder.toJobParameters();

    RestoreExecutionAdapter restoreExecution;
    try {
        if (getRestoreRunningExecutions().isEmpty() && getBackupRunningExecutions().isEmpty()) {
            synchronized (jobOperator) {
                // Start a new Job
                JobExecution jobExecution = jobLauncher.run(restoreJob, jobParameters);
                restoreExecution = new RestoreExecutionAdapter(jobExecution, totalNumberOfRestoreSteps);
                restoreExecutions.put(restoreExecution.getId(), restoreExecution);
                restoreExecution.setArchiveFile(archiveFile);
                restoreExecution.setFilter(filter);

                for (Entry jobParam : jobParameters.toProperties().entrySet()) {
                    if (!PARAM_OUTPUT_FILE_PATH.equals(jobParam.getKey())
                            && !PARAM_INPUT_FILE_PATH.equals(jobParam.getKey())
                            && !PARAM_TIME.equals(jobParam.getKey())) {
                        restoreExecution.getOptions().add(jobParam.getKey() + "=" + jobParam.getValue());
                    }
                }

                return restoreExecution;
            }
        } else {
            throw new IOException(
                    "Could not start a new Restore Job Execution since there are currently Running jobs.");
        }
    } catch (JobExecutionAlreadyRunningException | JobRestartException | JobInstanceAlreadyCompleteException
            | JobParametersInvalidException e) {
        throw new IOException("Could not start a new Restore Job Execution: ", e);
    } finally {
    }
}

From source file:org.springframework.batch.core.jsr.launch.JsrJobOperator.java

protected Properties getJobRestartProperties(Properties params,
        org.springframework.batch.core.JobExecution previousJobExecution) {
    Properties jobRestartProperties = new Properties();

    if (previousJobExecution != null) {
        JobParameters previousJobParameters = previousJobExecution.getJobParameters();

        if (previousJobParameters != null && !previousJobParameters.isEmpty()) {
            jobRestartProperties.putAll(previousJobParameters.toProperties());
        }/*from  w ww .jav a  2 s.  com*/
    }

    if (params != null) {
        Enumeration<?> propertyNames = params.propertyNames();

        while (propertyNames.hasMoreElements()) {
            String curName = (String) propertyNames.nextElement();
            jobRestartProperties.setProperty(curName, params.getProperty(curName));
        }
    }

    return jobRestartProperties;
}

From source file:org.springframework.cloud.dataflow.server.batch.SimpleJobService.java

@Override
public JobExecution restart(Long jobExecutionId, JobParameters params)
        throws NoSuchJobExecutionException, JobExecutionAlreadyRunningException, JobRestartException,
        JobInstanceAlreadyCompleteException, NoSuchJobException, JobParametersInvalidException {

    JobExecution jobExecution = null;//w w w.j ava2 s.com

    JobExecution target = getJobExecution(jobExecutionId);
    JobInstance lastInstance = target.getJobInstance();

    if (jobLocator.getJobNames().contains(lastInstance.getJobName())) {
        Job job = jobLocator.getJob(lastInstance.getJobName());

        jobExecution = jobLauncher.run(job, target.getJobParameters());

        if (jobExecution.isRunning()) {
            activeExecutions.add(jobExecution);
        }
    } else {
        if (jsrJobOperator != null) {
            if (params != null) {
                jobExecution = new JobExecution(jsrJobOperator.restart(jobExecutionId, params.toProperties()));
            } else {
                jobExecution = new JobExecution(jsrJobOperator.restart(jobExecutionId, new Properties()));
            }
        } else {
            throw new NoSuchJobException(
                    String.format("Can't find job associated with job execution id %s to restart",
                            String.valueOf(jobExecutionId)));
        }
    }

    return jobExecution;
}

From source file:org.springframework.cloud.dataflow.server.batch.SimpleJobService.java

@Override
public JobExecution launch(String jobName, JobParameters jobParameters)
        throws NoSuchJobException, JobExecutionAlreadyRunningException, JobRestartException,
        JobInstanceAlreadyCompleteException, JobParametersInvalidException {

    JobExecution jobExecution = null;/*from   w  w w. ja va 2 s  .  c  om*/

    if (jobLocator.getJobNames().contains(jobName)) {
        Job job = jobLocator.getJob(jobName);

        JobExecution lastJobExecution = jobRepository.getLastJobExecution(jobName, jobParameters);
        boolean restart = false;
        if (lastJobExecution != null) {
            BatchStatus status = lastJobExecution.getStatus();
            if (status.isUnsuccessful() && status != BatchStatus.ABANDONED) {
                restart = true;
            }
        }

        if (job.getJobParametersIncrementer() != null && !restart) {
            jobParameters = job.getJobParametersIncrementer().getNext(jobParameters);
        }

        jobExecution = jobLauncher.run(job, jobParameters);

        if (jobExecution.isRunning()) {
            activeExecutions.add(jobExecution);
        }
    } else {
        if (jsrJobOperator != null) {
            jobExecution = new JobExecution(jsrJobOperator.start(jobName, jobParameters.toProperties()));
        } else {
            throw new NoSuchJobException(
                    String.format("Unable to find job %s to launch", String.valueOf(jobName)));
        }
    }

    return jobExecution;
}