List of usage examples for org.springframework.batch.core JobExecution getExitStatus
public ExitStatus getExitStatus()
From source file:org.springframework.batch.core.repository.dao.JdbcJobExecutionDao.java
/** * Update given JobExecution using a SQL UPDATE statement. The JobExecution * is first checked to ensure all fields are not null, and that it has an * ID. The database is then queried to ensure that the ID exists, which * ensures that it is valid./* w w w. j a va 2s.co m*/ * * @see JobExecutionDao#updateJobExecution(JobExecution) */ @Override public void updateJobExecution(JobExecution jobExecution) { validateJobExecution(jobExecution); Assert.notNull(jobExecution.getId(), "JobExecution ID cannot be null. JobExecution must be saved before it can be updated"); Assert.notNull(jobExecution.getVersion(), "JobExecution version cannot be null. JobExecution must be saved before it can be updated"); synchronized (jobExecution) { Integer version = jobExecution.getVersion() + 1; String exitDescription = jobExecution.getExitStatus().getExitDescription(); if (exitDescription != null && exitDescription.length() > exitMessageLength) { exitDescription = exitDescription.substring(0, exitMessageLength); if (logger.isDebugEnabled()) { logger.debug("Truncating long message before update of JobExecution: " + jobExecution); } } Object[] parameters = new Object[] { jobExecution.getStartTime(), jobExecution.getEndTime(), jobExecution.getStatus().toString(), jobExecution.getExitStatus().getExitCode(), exitDescription, version, jobExecution.getCreateTime(), jobExecution.getLastUpdated(), jobExecution.getId(), jobExecution.getVersion() }; // Check if given JobExecution's Id already exists, if none is found // it // is invalid and // an exception should be thrown. if (getJdbcTemplate().queryForObject(getQuery(CHECK_JOB_EXECUTION_EXISTS), Integer.class, new Object[] { jobExecution.getId() }) != 1) { throw new NoSuchObjectException("Invalid JobExecution, ID " + jobExecution.getId() + " not found."); } int count = getJdbcTemplate().update(getQuery(UPDATE_JOB_EXECUTION), parameters, new int[] { Types.TIMESTAMP, Types.TIMESTAMP, Types.VARCHAR, Types.VARCHAR, Types.VARCHAR, Types.INTEGER, Types.TIMESTAMP, Types.TIMESTAMP, Types.BIGINT, Types.INTEGER }); // Avoid concurrent modifications... if (count == 0) { int curentVersion = getJdbcTemplate().queryForObject(getQuery(CURRENT_VERSION_JOB_EXECUTION), Integer.class, new Object[] { jobExecution.getId() }); throw new OptimisticLockingFailureException( "Attempt to update job execution id=" + jobExecution.getId() + " with wrong version (" + jobExecution.getVersion() + "), where current version is " + curentVersion); } jobExecution.incrementVersion(); } }
From source file:org.springframework.batch.integration.partition.JmsIntegrationTests.java
@Test public void testLaunchJob() throws Exception { int before = jobExplorer.getJobInstances(job.getName(), 0, 100).size(); assertNotNull(jobLauncher.run(job, new JobParameters())); List<JobInstance> jobInstances = jobExplorer.getJobInstances(job.getName(), 0, 100); int after = jobInstances.size(); assertEquals(1, after - before);//from www. j ava 2 s .co m JobExecution jobExecution = jobExplorer.getJobExecutions(jobInstances.get(jobInstances.size() - 1)).get(0); assertEquals(jobExecution.getExitStatus().getExitDescription(), BatchStatus.COMPLETED, jobExecution.getStatus()); assertEquals(3, jobExecution.getStepExecutions().size()); for (StepExecution stepExecution : jobExecution.getStepExecutions()) { // BATCH-1703: we are using a map dao so the step executions in the job execution are old and we need to // pull them back out of the repository... stepExecution = jobExplorer.getStepExecution(jobExecution.getId(), stepExecution.getId()); logger.debug("" + stepExecution); assertEquals(BatchStatus.COMPLETED, stepExecution.getStatus()); } }
From source file:org.springframework.batch.integration.x.RemoteFileToHadoopTests.java
@Test public void testSimple() throws Exception { FileSystem fs = FileSystem.get(configuration); Path p1 = new Path("/qux/foo/bar.txt"); fs.delete(p1, true);//from w ww . j ava2 s. c o m Path p2 = new Path("/qux/foo/baz.txt"); fs.delete(p2, true); assertFalse(fs.exists(p1)); assertFalse(fs.exists(p2)); Map<String, JobParameter> params = new HashMap<String, JobParameter>(); params.put("remoteDirectory", new JobParameter("/foo/")); params.put("hdfsDirectory", new JobParameter("/qux")); JobParameters parameters = new JobParameters(params); JobExecution execution = launcher.run(job, parameters); assertEquals(ExitStatus.COMPLETED, execution.getExitStatus()); assertTrue(fs.exists(p1)); assertTrue(fs.exists(p2)); FSDataInputStream stream = fs.open(p1); byte[] out = new byte[9]; stream.readFully(out); stream.close(); assertEquals("foobarbaz", new String(out)); stream = fs.open(p2); stream.readFully(out); stream.close(); assertEquals("foobarbaz", new String(out)); }
From source file:org.springframework.batch.integration.x.RemoteFileToTmpTests.java
@Test public void testSimple() throws Exception { Map<String, JobParameter> params = new HashMap<String, JobParameter>(); params.put("remoteDirectory", new JobParameter("/foo/")); params.put("hdfsDirectory", new JobParameter("/qux")); JobParameters parameters = new JobParameters(params); JobExecution execution = launcher.run(job, parameters); assertEquals(ExitStatus.COMPLETED, execution.getExitStatus()); File file = new File(tmpDir, "_foo_bar.txt"); assertTrue(file.exists());//from ww w . j a v a 2s . co m ByteArrayOutputStream baos = new ByteArrayOutputStream(); FileCopyUtils.copy(new FileInputStream(file), baos); assertEquals("foo", new String(baos.toByteArray())); file.delete(); file = new File(tmpDir, "_foo_baz.txt"); assertTrue(file.exists()); baos = new ByteArrayOutputStream(); FileCopyUtils.copy(new FileInputStream(file), baos); assertEquals("foo", new String(baos.toByteArray())); file.delete(); }
From source file:org.springframework.cloud.task.batch.handler.TaskJobLauncherCommandLineRunner.java
protected void execute(Job job, JobParameters jobParameters) throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException, JobParametersNotFoundException { JobParameters nextParameters = new JobParametersBuilder(jobParameters, this.jobExplorer) .getNextJobParameters(job).toJobParameters(); JobExecution execution = this.jobLauncher.run(job, nextParameters); if (this.publisher != null) { this.publisher.publishEvent(new JobExecutionEvent(execution)); }/*from www . ja v a 2 s . co m*/ if (execution.getExitStatus().getExitCode().equals(ExitStatus.FAILED.getExitCode())) { String message = String.format( "Job %s failed during " + "execution for jobId %s with jobExecutionId of %s", execution.getJobInstance().getJobName(), execution.getJobId(), execution.getId()); logger.error(message); throw new TaskException(message); } }
From source file:simple.spring.batch.JobLauncherDetails.java
@SuppressWarnings("unchecked") protected void executeInternal(JobExecutionContext context) { Map<String, Object> jobDataMap = context.getMergedJobDataMap(); String jobName = (String) jobDataMap.get(JOB_NAME); log.info("Quartz trigger firing with Spring Batch jobName=" + jobName); // JobParameters? // ? ?? /*from w ww. j av a 2s . com*/ //jobDataMap.put("date", new Date()); JobParameters jobParameters = getJobParametersFromJobMap(jobDataMap); try { Job job = jobLocator.getJob(jobName); JobExecution jobExecution = jobLauncher.run(jobLocator.getJob(jobName), jobParameters); System.out.println("<<< " + jobExecution.getExitStatus().getExitCode()); if ("COMPLETED".equals(jobExecution.getStatus())) { System.out.println("COMPLETED!!"); } Collection<StepExecution> collection = jobExecution.getStepExecutions(); for (StepExecution s : collection) { System.out.println(s); ExecutionContext excutionContext = s.getExecutionContext(); } } catch (JobExecutionException e) { // log.error("Could not execute job.", e); } }