Example usage for org.springframework.batch.core JobExecution getJobParameters

List of usage examples for org.springframework.batch.core JobExecution getJobParameters

Introduction

In this page you can find the example usage for org.springframework.batch.core JobExecution getJobParameters.

Prototype

public JobParameters getJobParameters() 

Source Link

Usage

From source file:org.jasig.ssp.util.importer.job.report.ReportGenerator.java

@SuppressWarnings("unchecked")
private String buildReport(JobExecution jobExecution) {
    StringBuffer emailMessage = new StringBuffer();
    String EOL = System.getProperty("line.separator");
    SimpleDateFormat dt = new SimpleDateFormat("MM-dd-yyyy hh:mm:ss");
    long diff = jobExecution.getEndTime().getTime() - jobExecution.getCreateTime().getTime();//as given

    emailMessage.append("Start Time:    " + dt.format(jobExecution.getCreateTime()) + EOL);
    emailMessage.append("End Time:      " + dt.format(jobExecution.getEndTime()) + EOL);
    emailMessage.append("Duration:      " + DurationFormatUtils.formatDurationWords(diff, true, true) + " ("
            + DurationFormatUtils.formatDurationHMS(diff) + ")" + EOL);
    emailMessage.append("Job Id:        " + jobExecution.getJobId() + EOL);
    emailMessage.append("Job Paramters: " + jobExecution.getJobParameters() + EOL);
    emailMessage.append("Job Status:    " + jobExecution.getExitStatus().getExitCode() + EOL);

    emailMessage.append(EOL).append(EOL);

    emailMessage.append("Job Details: " + EOL);
    Map<String, ReportEntry> report = (Map<String, ReportEntry>) jobExecution.getExecutionContext()
            .get("report");
    if (report != null) {
        Set<Entry<String, ReportEntry>> entrySet = report.entrySet();
        for (Entry<String, ReportEntry> entry : entrySet) {
            emailMessage.append(entry.getValue().toString() + EOL);
        }/*w  ww .  jav a2s  . c o m*/
        if (entrySet.size() > 0)
            emailReport = true;
    } else {
        emailMessage.append("NO FILES PROCESSED." + EOL);
    }

    emailMessage.append(EOL).append(EOL);

    emailMessage.append("Errors: " + EOL);
    List<ErrorEntry> errors = (List<ErrorEntry>) jobExecution.getExecutionContext().get("errors");
    List<Throwable> failureExceptions = jobExecution.getAllFailureExceptions();
    if (errors != null) {
        for (ErrorEntry errorEntry : errors) {
            emailMessage.append(errorEntry.toString() + EOL);
            emailMessage.append(EOL);
        }
    } else if (failureExceptions == null || failureExceptions.size() == 0) {
        emailMessage.append("No Errors Found." + EOL);
    }

    if (failureExceptions != null) {
        for (Throwable failureException : failureExceptions) {
            if (ExceptionUtils.indexOfThrowable(failureException, PartialUploadGuardException.class) >= 0
                    || ExceptionUtils.indexOfThrowable(failureException, BeanCreationException.class) >= 0) {
                emailReport = true;
                logger.info("emailReport:" + emailReport);
            }
            logger.info("failureException:" + failureException.getClass().getName());
            emailMessage.append(failureException.getMessage() + EOL);
        }
    }

    String validations = (String) jobExecution.getExecutionContext().get("databaseValidations");
    if (validations != null) {
        emailMessage.append("Database Validations:" + EOL + validations);
    }

    logger.info(emailMessage.toString());
    return emailMessage.toString();
}

From source file:de.codecentric.batch.listener.ProtocolListener.java

public void afterJob(JobExecution jobExecution) {
    StringBuilder protocol = new StringBuilder();
    protocol.append("\n");
    protocol.append(createFilledLine('*'));
    protocol.append(createFilledLine('-'));
    protocol.append("Protocol for " + jobExecution.getJobInstance().getJobName() + " \n");
    protocol.append("  Started:      " + jobExecution.getStartTime() + "\n");
    protocol.append("  Finished:     " + jobExecution.getEndTime() + "\n");
    protocol.append("  Exit-Code:    " + jobExecution.getExitStatus().getExitCode() + "\n");
    protocol.append("  Exit-Descr:   " + jobExecution.getExitStatus().getExitDescription() + "\n");
    protocol.append("  Status:       " + jobExecution.getStatus() + "\n");
    protocol.append("  Content of Job-ExecutionContext:\n");
    for (Entry<String, Object> entry : jobExecution.getExecutionContext().entrySet()) {
        protocol.append("  " + entry.getKey() + "=" + entry.getValue() + "\n");
    }/*from  ww  w  . j  a  v a  2s  .  c  o m*/
    protocol.append("  Job-Parameter: \n");
    JobParameters jp = jobExecution.getJobParameters();
    for (Iterator<Entry<String, JobParameter>> iter = jp.getParameters().entrySet().iterator(); iter
            .hasNext();) {
        Entry<String, JobParameter> entry = iter.next();
        protocol.append("  " + entry.getKey() + "=" + entry.getValue() + "\n");
    }
    protocol.append(createFilledLine('-'));
    for (StepExecution stepExecution : jobExecution.getStepExecutions()) {
        protocol.append("Step " + stepExecution.getStepName() + " \n");
        protocol.append("  ReadCount:    " + stepExecution.getReadCount() + "\n");
        protocol.append("  WriteCount:   " + stepExecution.getWriteCount() + "\n");
        protocol.append("  Commits:      " + stepExecution.getCommitCount() + "\n");
        protocol.append("  SkipCount:    " + stepExecution.getSkipCount() + "\n");
        protocol.append("  Rollbacks:    " + stepExecution.getRollbackCount() + "\n");
        protocol.append("  Filter:       " + stepExecution.getFilterCount() + "\n");
        protocol.append("  Content of Step-ExecutionContext:\n");
        for (Entry<String, Object> entry : stepExecution.getExecutionContext().entrySet()) {
            protocol.append("  " + entry.getKey() + "=" + entry.getValue() + "\n");
        }
        protocol.append(createFilledLine('-'));
    }
    protocol.append(createFilledLine('*'));
    LOGGER.info(protocol.toString());
}

From source file:org.geoserver.backuprestore.Backup.java

@Override
public void beforeJob(JobExecution jobExecution) {
    // Acquire GeoServer Configuration Lock in READ mode
    List<BackupRestoreCallback> callbacks = GeoServerExtensions.extensions(BackupRestoreCallback.class);
    for (BackupRestoreCallback callback : callbacks) {
        callback.onBeginRequest(jobExecution.getJobParameters().getString(PARAM_JOB_NAME));
    }/*from ww  w .  j av  a2  s.  com*/
}

From source file:org.geoserver.backuprestore.tasklet.CatalogBackupRestoreTasklet.java

/**
 * Perform Backup//w ww .  ja va 2 s.  c  om
 * 
 * @param jobExecution
 * @param geoserver
 * @param dd
 * @param resourceStore
 * @throws Exception
 * @throws IOException
 */
private void doBackup(JobExecution jobExecution, final GeoServer geoserver, final GeoServerDataDirectory dd,
        final ResourceStore resourceStore) throws Exception {
    try {
        final String outputFolderURL = jobExecution.getJobParameters().getString(Backup.PARAM_OUTPUT_FILE_PATH);
        Resource targetBackupFolder = Resources.fromURL(outputFolderURL);

        // Store GeoServer Global Info
        doWrite(geoserver.getGlobal(), targetBackupFolder, "global.xml");

        // Store GeoServer Global Settings
        doWrite(geoserver.getSettings(), targetBackupFolder, "settings.xml");

        // Store GeoServer Global Logging Settings
        doWrite(geoserver.getLogging(), targetBackupFolder, "logging.xml");

        // Store GeoServer Global Services
        for (ServiceInfo service : geoserver.getServices()) {
            // Local Services will be saved later on ...
            if (service.getWorkspace() == null) {
                doWrite(service, targetBackupFolder, "services");
            }
        }

        // Save Workspace specific settings
        Resource targetWorkspacesFolder = BackupUtils.dir(targetBackupFolder, "workspaces");

        // Store Default Workspace
        if (filteredWorkspace(getCatalog().getDefaultWorkspace())) {
            doWrite(getCatalog().getDefaultNamespace(), targetWorkspacesFolder, "defaultnamespace.xml");
            doWrite(getCatalog().getDefaultWorkspace(), targetWorkspacesFolder, "default.xml");
        }

        // Store Workspace Specific Settings and Services
        for (WorkspaceInfo ws : getCatalog().getWorkspaces()) {
            if (filteredWorkspace(ws)) {
                if (geoserver.getSettings(ws) != null) {
                    doWrite(geoserver.getSettings(ws), BackupUtils.dir(targetWorkspacesFolder, ws.getName()),
                            "settings.xml");
                }

                if (geoserver.getServices(ws) != null) {
                    for (ServiceInfo service : geoserver.getServices(ws)) {
                        doWrite(service, targetWorkspacesFolder, ws.getName());
                    }
                }

                // Backup other configuration bits, like images, palettes, user projections and so on...
                GeoServerDataDirectory wsDd = new GeoServerDataDirectory(
                        dd.get(Paths.path("workspaces", ws.getName())).dir());
                backupRestoreAdditionalResources(wsDd.getResourceStore(),
                        targetWorkspacesFolder.get(ws.getName()));

                // Backup Style SLDs
                for (StyleInfo sty : getCatalog().getStylesByWorkspace(ws)) {
                    Resource styResource = wsDd.get(Paths.path("styles", sty.getFilename()));
                    if (Resources.exists(styResource)) {
                        Resources.copy(styResource.file(),
                                BackupUtils.dir(targetWorkspacesFolder.get(ws.getName()), "styles"));
                    }
                }
            }
        }

        // Backup GeoServer Plugins
        final GeoServerResourceLoader targetGeoServerResourceLoader = new GeoServerResourceLoader(
                targetBackupFolder.dir());
        for (GeoServerPluginConfigurator pluginConfig : GeoServerExtensions
                .extensions(GeoServerPluginConfigurator.class)) {
            // On restore invoke 'pluginConfig.loadConfiguration(resourceLoader);' after having replaced the config files.
            pluginConfig.saveConfiguration(targetGeoServerResourceLoader);
        }

        for (GeoServerPropertyConfigurer props : GeoServerExtensions
                .extensions(GeoServerPropertyConfigurer.class)) {
            // On restore invoke 'props.reload();' after having replaced the properties files.
            Resource configFile = props.getConfigFile();

            if (configFile != null && Resources.exists(configFile)) {
                Resource targetDir = Files.asResource(targetGeoServerResourceLoader.findOrCreateDirectory(
                        Paths.convert(dd.getResourceLoader().getBaseDirectory(), configFile.parent().dir())));

                Resources.copy(configFile.file(), targetDir);
            }
        }

        // Backup other configuration bits, like images, palettes, user projections and so on...
        backupRestoreAdditionalResources(resourceStore, targetBackupFolder);

        // Backup GWC Configuration bits
        try {
            if (GeoServerExtensions.bean("gwcGeoServervConfigPersister") != null) {
                backupGWCSettings(targetBackupFolder);
            }
        } catch (NoSuchBeanDefinitionException e) {
            LOGGER.log(Level.WARNING, "Skipped GWC GeoServer Config Persister: ", e);
        }
    } catch (Exception e) {
        logValidationExceptions((ValidationResult) null, new UnexpectedJobExecutionException(
                "Exception occurred while storing GeoServer globals and services settings!", e));
    }
}

From source file:org.geoserver.backuprestore.tasklet.CatalogBackupRestoreTasklet.java

/**
 * @param jobExecution//  w w  w. jav  a  2  s .  c o m
 * @param geoserver
 * @param dd
 * @throws Exception
 * @throws IOException
 * @throws UnexpectedJobExecutionException
 */
@SuppressWarnings("unused")
private void doRestore(JobExecution jobExecution, final GeoServer geoserver, final GeoServerDataDirectory dd)
        throws Exception {
    final String inputFolderURL = jobExecution.getJobParameters().getString(Backup.PARAM_INPUT_FILE_PATH);
    Resource sourceRestoreFolder = Resources.fromURL(inputFolderURL);
    Resource sourceWorkspacesFolder = null;

    // Try first to load all the settings available into the source restore folder
    GeoServerInfo newGeoServerInfo = null;
    SettingsInfo newSettings = null;
    LoggingInfo newLoggingInfo = null;
    try {
        newGeoServerInfo = (GeoServerInfo) doRead(sourceRestoreFolder, "global.xml");
        newSettings = (SettingsInfo) doRead(sourceRestoreFolder, "settings.xml");
        newLoggingInfo = (LoggingInfo) doRead(sourceRestoreFolder, "logging.xml");
    } catch (Exception e) {
        logValidationExceptions((ValidationResult) null, new UnexpectedJobExecutionException(
                "Exception occurred while storing GeoServer globals and services settings!", e));
    }

    // Save Workspace specific settings
    try {
        sourceWorkspacesFolder = BackupUtils.dir(sourceRestoreFolder, "workspaces");

        // Set Default Namespace and Workspace
        if (Resources.exists(sourceWorkspacesFolder.get("default.xml"))) {
            NamespaceInfo newDefaultNamespace = (NamespaceInfo) doRead(sourceWorkspacesFolder,
                    "defaultnamespace.xml");
            WorkspaceInfo newDefaultWorkspace = (WorkspaceInfo) doRead(sourceWorkspacesFolder, "default.xml");
            getCatalog().setDefaultNamespace(newDefaultNamespace);
            getCatalog().setDefaultWorkspace(newDefaultWorkspace);
        }
    } catch (Exception e) {
        logValidationExceptions((ValidationResult) null, new UnexpectedJobExecutionException(
                "Exception occurred while storing GeoServer globals and services settings!", e));
    }

    // RESTORE
    // TODO: Save old settings
    /*
     * GeoServerInfo oldGeoServerInfo = geoserver.getGlobal(); SettingsInfo oldSettings = geoserver.getSettings(); LoggingInfo oldLoggingInfo =
     * geoserver.getLogging(); WorkspaceInfo oldDefaultWorkspace = geoserver.getCatalog().getDefaultWorkspace(); NamespaceInfo oldDefaultNamespace
     * = geoserver.getCatalog().getDefaultNamespace();
     */

    // Do this *ONLY* when DRY-RUN-MODE == OFF
    if (!isDryRun()) {
        try {
            hardRestore(geoserver, dd, sourceRestoreFolder, sourceWorkspacesFolder, newGeoServerInfo,
                    newLoggingInfo);
        } catch (Exception e) {
            logValidationExceptions((ValidationResult) null, new UnexpectedJobExecutionException(
                    "Exception occurred while storing GeoServer globals and services settings!", e));
        } finally {
            /*
             * TODO: - Handle Revert ??
             */
        }
    } else {
        // DRY-RUN-MODE ON: Try to check backup files consistency as much as possible
        try {
            // Temporary GeoServer Data Dir just for testing
            GeoServerDataDirectory td = new GeoServerDataDirectory(BackupUtils.tmpDir().dir());
            softRestore(geoserver, td, sourceRestoreFolder, sourceWorkspacesFolder, newGeoServerInfo,
                    newLoggingInfo);
        } catch (Exception e) {
            logValidationExceptions((ValidationResult) null, new UnexpectedJobExecutionException(
                    "Exception occurred while storing GeoServer globals and services settings!", e));
        } finally {
        }
    }
}

From source file:org.springframework.batch.core.job.AbstractJob.java

/**
 * Run the specified job, handling all listener and repository calls, and
 * delegating the actual processing to {@link #doExecute(JobExecution)}.
 *
 * @see Job#execute(JobExecution)//from w w  w . j av a  2 s.c o  m
 * @throws StartLimitExceededException
 *             if start limit of one of the steps was exceeded
 */
@Override
public final void execute(JobExecution execution) {

    if (logger.isDebugEnabled()) {
        logger.debug("Job execution starting: " + execution);
    }

    JobSynchronizationManager.register(execution);

    try {

        jobParametersValidator.validate(execution.getJobParameters());

        if (execution.getStatus() != BatchStatus.STOPPING) {

            execution.setStartTime(new Date());
            updateStatus(execution, BatchStatus.STARTED);

            listener.beforeJob(execution);

            try {
                doExecute(execution);
                if (logger.isDebugEnabled()) {
                    logger.debug("Job execution complete: " + execution);
                }
            } catch (RepeatException e) {
                throw e.getCause();
            }
        } else {

            // The job was already stopped before we even got this far. Deal
            // with it in the same way as any other interruption.
            execution.setStatus(BatchStatus.STOPPED);
            execution.setExitStatus(ExitStatus.COMPLETED);
            if (logger.isDebugEnabled()) {
                logger.debug("Job execution was stopped: " + execution);
            }

        }

    } catch (JobInterruptedException e) {
        logger.info("Encountered interruption executing job: " + e.getMessage());
        if (logger.isDebugEnabled()) {
            logger.debug("Full exception", e);
        }
        execution.setExitStatus(getDefaultExitStatusForFailure(e, execution));
        execution.setStatus(BatchStatus.max(BatchStatus.STOPPED, e.getStatus()));
        execution.addFailureException(e);
    } catch (Throwable t) {
        logger.error("Encountered fatal error executing job", t);
        execution.setExitStatus(getDefaultExitStatusForFailure(t, execution));
        execution.setStatus(BatchStatus.FAILED);
        execution.addFailureException(t);
    } finally {
        try {
            if (execution.getStatus().isLessThanOrEqualTo(BatchStatus.STOPPED)
                    && execution.getStepExecutions().isEmpty()) {
                ExitStatus exitStatus = execution.getExitStatus();
                ExitStatus newExitStatus = ExitStatus.NOOP
                        .addExitDescription("All steps already completed or no steps configured for this job.");
                execution.setExitStatus(exitStatus.and(newExitStatus));
            }

            execution.setEndTime(new Date());

            try {
                listener.afterJob(execution);
            } catch (Exception e) {
                logger.error("Exception encountered in afterStep callback", e);
            }

            jobRepository.update(execution);
        } finally {
            JobSynchronizationManager.release();
        }

    }

}

From source file:org.springframework.batch.core.launch.support.CommandLineJobRunner.java

@SuppressWarnings("resource")
int start(String jobPath, String jobIdentifier, String[] parameters, Set<String> opts) {

    ConfigurableApplicationContext context = null;

    try {/*from  w  w w  .  j  av a2  s . c  o m*/
        try {
            context = new AnnotationConfigApplicationContext(Class.forName(jobPath));
        } catch (ClassNotFoundException cnfe) {
            context = new ClassPathXmlApplicationContext(jobPath);
        }

        context.getAutowireCapableBeanFactory().autowireBeanProperties(this,
                AutowireCapableBeanFactory.AUTOWIRE_BY_TYPE, false);

        Assert.state(launcher != null, "A JobLauncher must be provided.  Please add one to the configuration.");
        if (opts.contains("-restart") || opts.contains("-next")) {
            Assert.state(jobExplorer != null,
                    "A JobExplorer must be provided for a restart or start next operation.  Please add one to the configuration.");
        }

        String jobName = jobIdentifier;

        JobParameters jobParameters = jobParametersConverter
                .getJobParameters(StringUtils.splitArrayElementsIntoProperties(parameters, "="));
        Assert.isTrue(parameters == null || parameters.length == 0 || !jobParameters.isEmpty(),
                "Invalid JobParameters " + Arrays.asList(parameters)
                        + ". If parameters are provided they should be in the form name=value (no whitespace).");

        if (opts.contains("-stop")) {
            List<JobExecution> jobExecutions = getRunningJobExecutions(jobIdentifier);
            if (jobExecutions == null) {
                throw new JobExecutionNotRunningException(
                        "No running execution found for job=" + jobIdentifier);
            }
            for (JobExecution jobExecution : jobExecutions) {
                jobExecution.setStatus(BatchStatus.STOPPING);
                jobRepository.update(jobExecution);
            }
            return exitCodeMapper.intValue(ExitStatus.COMPLETED.getExitCode());
        }

        if (opts.contains("-abandon")) {
            List<JobExecution> jobExecutions = getStoppedJobExecutions(jobIdentifier);
            if (jobExecutions == null) {
                throw new JobExecutionNotStoppedException(
                        "No stopped execution found for job=" + jobIdentifier);
            }
            for (JobExecution jobExecution : jobExecutions) {
                jobExecution.setStatus(BatchStatus.ABANDONED);
                jobRepository.update(jobExecution);
            }
            return exitCodeMapper.intValue(ExitStatus.COMPLETED.getExitCode());
        }

        if (opts.contains("-restart")) {
            JobExecution jobExecution = getLastFailedJobExecution(jobIdentifier);
            if (jobExecution == null) {
                throw new JobExecutionNotFailedException(
                        "No failed or stopped execution found for job=" + jobIdentifier);
            }
            jobParameters = jobExecution.getJobParameters();
            jobName = jobExecution.getJobInstance().getJobName();
        }

        Job job = null;
        if (jobLocator != null) {
            try {
                job = jobLocator.getJob(jobName);
            } catch (NoSuchJobException e) {
            }
        }
        if (job == null) {
            job = (Job) context.getBean(jobName);
        }

        if (opts.contains("-next")) {
            JobParameters nextParameters = getNextJobParameters(job);
            Map<String, JobParameter> map = new HashMap<String, JobParameter>(nextParameters.getParameters());
            map.putAll(jobParameters.getParameters());
            jobParameters = new JobParameters(map);
        }

        JobExecution jobExecution = launcher.run(job, jobParameters);
        return exitCodeMapper.intValue(jobExecution.getExitStatus().getExitCode());

    } catch (Throwable e) {
        String message = "Job Terminated in error: " + e.getMessage();
        logger.error(message, e);
        CommandLineJobRunner.message = message;
        return exitCodeMapper.intValue(ExitStatus.FAILED.getExitCode());
    } finally {
        if (context != null) {
            context.close();
        }
    }
}

From source file:org.springframework.batch.core.launch.support.SimpleJobOperator.java

@Override
public String getParameters(long executionId) throws NoSuchJobExecutionException {
    JobExecution jobExecution = findExecutionById(executionId);

    return PropertiesConverter
            .propertiesToString(jobParametersConverter.getProperties(jobExecution.getJobParameters()));
}

From source file:org.springframework.batch.core.launch.support.SimpleJobOperator.java

@Override
public Long restart(long executionId) throws JobInstanceAlreadyCompleteException, NoSuchJobExecutionException,
        NoSuchJobException, JobRestartException, JobParametersInvalidException {

    logger.info("Checking status of job execution with id=" + executionId);

    JobExecution jobExecution = findExecutionById(executionId);

    String jobName = jobExecution.getJobInstance().getJobName();
    Job job = jobRegistry.getJob(jobName);
    JobParameters parameters = jobExecution.getJobParameters();

    logger.info(String.format("Attempting to resume job with name=%s and parameters=%s", jobName, parameters));
    try {/*from   ww w.  jav  a 2s  .co  m*/
        return jobLauncher.run(job, parameters).getId();
    } catch (JobExecutionAlreadyRunningException e) {
        throw new UnexpectedJobExecutionException(
                String.format(ILLEGAL_STATE_MSG, "job execution already running", jobName, parameters), e);
    }

}

From source file:org.springframework.batch.core.repository.dao.JdbcJobExecutionDao.java

/**
 *
 * SQL implementation using Sequences via the Spring incrementer
 * abstraction. Once a new id has been obtained, the JobExecution is saved
 * via a SQL INSERT statement.//from   ww w.  ja v a 2  s . co  m
 *
 * @see JobExecutionDao#saveJobExecution(JobExecution)
 * @throws IllegalArgumentException if jobExecution is null, as well as any
 * of it's fields to be persisted.
 */
@Override
public void saveJobExecution(JobExecution jobExecution) {

    validateJobExecution(jobExecution);

    jobExecution.incrementVersion();

    jobExecution.setId(jobExecutionIncrementer.nextLongValue());
    Object[] parameters = new Object[] { jobExecution.getId(), jobExecution.getJobId(),
            jobExecution.getStartTime(), jobExecution.getEndTime(), jobExecution.getStatus().toString(),
            jobExecution.getExitStatus().getExitCode(), jobExecution.getExitStatus().getExitDescription(),
            jobExecution.getVersion(), jobExecution.getCreateTime(), jobExecution.getLastUpdated(),
            jobExecution.getJobConfigurationName() };
    getJdbcTemplate().update(getQuery(SAVE_JOB_EXECUTION), parameters,
            new int[] { Types.BIGINT, Types.BIGINT, Types.TIMESTAMP, Types.TIMESTAMP, Types.VARCHAR,
                    Types.VARCHAR, Types.VARCHAR, Types.INTEGER, Types.TIMESTAMP, Types.TIMESTAMP,
                    Types.VARCHAR });

    insertJobParameters(jobExecution.getId(), jobExecution.getJobParameters());
}