List of usage examples for org.springframework.batch.item ExecutionContext put
public void put(String key, @Nullable Object value)
From source file:org.beanio.spring.SpringTest.java
@Test @SuppressWarnings("unchecked") public void testRestartItemReader() throws Exception { ItemStreamReader<Map<String, Object>> reader = (ItemStreamReader<Map<String, Object>>) context .getBean("itemReader-restart"); assertNotNull(reader);/*from ww w .j a v a2 s .c o m*/ try { ExecutionContext executionContext = new ExecutionContext(); executionContext.put("BeanIOFlatFileItemReader.read.count", new Integer(2)); reader.open(executionContext); Map<String, Object> map = reader.read(); assertNotNull(map); assertEquals(new Integer(3), map.get("id")); assertEquals("Joe", map.get("name")); } finally { reader.close(); } }
From source file:de.langmi.spring.batch.examples.readers.file.multiresourcepartitioner.CustomMultiResourcePartitioner.java
/** * Assign the filename of each of the injected resources to an * {@link ExecutionContext}./*w ww .ja v a 2 s. co m*/ * * @see Partitioner#partition(int) */ @Override public Map<String, ExecutionContext> partition(int gridSize) { Map<String, ExecutionContext> map = new HashMap<String, ExecutionContext>(gridSize); int i = 0; for (Resource resource : resources) { ExecutionContext context = new ExecutionContext(); Assert.state(resource.exists(), "Resource does not exist: " + resource); try { context.putString(inputKeyName, resource.getURL().toExternalForm()); context.put(outputKeyName, createOutputFilename(i, resource)); } catch (IOException e) { throw new IllegalArgumentException("File could not be located for: " + resource, e); } map.put(PARTITION_KEY + i, context); i++; } return map; }
From source file:trionsoft.test.StepRunner.java
/** * Launch just the specified step as its own job. An IllegalStateException * is thrown if there is no Step with the given name. * // ww w .j av a 2 s .c o m * @param stepName The name of the step to launch * @param jobParameters The JobParameters to use during the launch * @param jobExecutionContext An ExecutionContext whose values will be * loaded into the Job ExecutionContext prior to launching the step. * @return JobExecution */ public JobExecution launchStep(Step step, JobParameters jobParameters, final ExecutionContext jobExecutionContext) { // // Create a fake job // SimpleJob job = new SimpleJob(); job.setName("TestJob"); job.setJobRepository(this.jobRepository); List<Step> stepsToExecute = new ArrayList<Step>(); stepsToExecute.add(step); job.setSteps(stepsToExecute); // // Dump the given Job ExecutionContext using a listener // if (jobExecutionContext != null && !jobExecutionContext.isEmpty()) { job.setJobExecutionListeners(new JobExecutionListener[] { new JobExecutionListenerSupport() { public void beforeJob(JobExecution jobExecution) { ExecutionContext jobContext = jobExecution.getExecutionContext(); for (Map.Entry<String, Object> entry : jobExecutionContext.entrySet()) { jobContext.put(entry.getKey(), entry.getValue()); } } } }); } // // Launch the job // return this.launchJob(job, jobParameters); }
From source file:org.trpr.platform.batch.impl.spring.admin.repository.MapExecutionContextDao.java
private ExecutionContext copy(ExecutionContext original) { if (original == null) return null; Map<String, Object> m = new HashMap<String, Object>(); for (java.util.Map.Entry<String, Object> me : original.entrySet()) { m.put(me.getKey(), me.getValue()); }// w w w . j a v a 2s. co m ExecutionContext copy = new ExecutionContext(); Map<String, Object> map = serializer.deserialize(serializer.serialize(m)); for (Map.Entry<String, Object> entry : map.entrySet()) { copy.put(entry.getKey(), entry.getValue()); } return copy; }
From source file:org.springframework.cloud.dataflow.server.support.StepExecutionJacksonMixInTests.java
private StepExecution getStepExecution() { JobExecution jobExecution = new JobExecution(1L, null, "hi"); final StepExecution stepExecution = new StepExecution("step1", jobExecution); jobExecution.createStepExecution("step1"); final ExecutionContext executionContext = stepExecution.getExecutionContext(); executionContext.putInt("counter", 1234); executionContext.putDouble("myDouble", 1.123456d); executionContext.putLong("Josh", 4444444444L); executionContext.putString("awesomeString", "Yep"); executionContext.put("hello", "world"); executionContext.put("counter2", 9999); return stepExecution; }
From source file:fr.acxio.tools.agia.alfresco.AlfrescoNodeReader.java
@Override public void update(ExecutionContext sExecutionContext) throws ItemStreamException { sExecutionContext.putString(CONTEXT_KEY_CURRENTPATH, currentDirPath); sExecutionContext.put(CONTEXT_KEY_CURRENTINDEXES, currentIndexes.toArray(new Integer[] {})); }
From source file:org.cbioportal.annotation.pipeline.MutationRecordReader.java
private void processComments(ExecutionContext ec) { List<String> comments = new ArrayList<>(); String sequencedSamples = ""; BufferedReader reader = null; try {/*from w ww . j a v a 2s.c om*/ reader = new BufferedReader(new FileReader(filename)); String line; while ((line = reader.readLine()) != null) { if (line.startsWith("#")) { comments.add(line); } else { // no more comments, go on processing break; } } reader.close(); } catch (Exception e) { throw new ItemStreamException(e); } // Add comments to the config for the writer to access later ec.put("commentLines", comments); }
From source file:org.cbioportal.annotation.pipeline.MutationRecordReader.java
@Override public void open(ExecutionContext ec) throws ItemStreamException { processComments(ec);/*from w ww . jav a2 s. c om*/ FlatFileItemReader<MutationRecord> reader = new FlatFileItemReader<>(); reader.setResource(new FileSystemResource(filename)); DefaultLineMapper<MutationRecord> mapper = new DefaultLineMapper<>(); final DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer(); tokenizer.setDelimiter("\t"); mapper.setLineTokenizer(tokenizer); mapper.setFieldSetMapper(new MutationFieldSetMapper()); reader.setLineMapper(mapper); reader.setLinesToSkip(1); reader.setSkippedLinesCallback(new LineCallbackHandler() { @Override public void handleLine(String line) { tokenizer.setNames(line.split("\t")); } }); reader.open(ec); LOG.info("Loading records from: " + filename); MutationRecord mutationRecord; try { while ((mutationRecord = reader.read()) != null) { mutationRecords.add(mutationRecord); } } catch (Exception e) { throw new ItemStreamException(e); } reader.close(); int variantsToAnnotateCount = mutationRecords.size(); int annotatedVariantsCount = 0; LOG.info(String.valueOf(variantsToAnnotateCount) + " records to annotate"); for (MutationRecord record : mutationRecords) { annotatedVariantsCount++; if (annotatedVariantsCount % 2000 == 0) { LOG.info("\tOn record " + String.valueOf(annotatedVariantsCount) + " out of " + String.valueOf(variantsToAnnotateCount) + ", annotation " + String.valueOf((int) (((annotatedVariantsCount * 1.0) / variantsToAnnotateCount) * 100)) + "% complete"); } // save variant details for logging String variantDetails = "(sampleId,chr,start,end,ref,alt,url)= (" + record.getTUMOR_SAMPLE_BARCODE() + "," + record.getCHROMOSOME() + "," + record.getSTART_POSITION() + "," + record.getEND_POSITION() + "," + record.getREFERENCE_ALLELE() + "," + record.getTUMOR_SEQ_ALLELE2() + "," + annotator.getUrlForRecord(record, isoformOverride) + ")"; // init annotated record w/o genome nexus in case server error occurs // if no error then annotated record will get overwritten anyway with genome nexus response String serverErrorMessage = ""; AnnotatedRecord annotatedRecord = new AnnotatedRecord(record); try { annotatedRecord = annotator.annotateRecord(record, replace, isoformOverride, true); } catch (HttpServerErrorException ex) { serverErrorMessage = "Failed to annotate variant due to internal server error"; } catch (HttpClientErrorException ex) { serverErrorMessage = "Failed to annotate variant due to client error"; } catch (HttpMessageNotReadableException ex) { serverErrorMessage = "Failed to annotate variant due to message not readable error"; } catch (GenomeNexusAnnotationFailureException ex) { serverErrorMessage = "Failed to annotate variant due to Genome Nexus : " + ex.getMessage(); } annotatedRecords.add(annotatedRecord); header.addAll(annotatedRecord.getHeaderWithAdditionalFields()); // log server failure message if applicable if (!serverErrorMessage.isEmpty()) { LOG.warn(serverErrorMessage); failedAnnotations++; failedServerAnnotations++; if (errorReportLocation != null) updateErrorMessages(record, record.getVARIANT_CLASSIFICATION(), annotator.getUrlForRecord(record, isoformOverride), serverErrorMessage); continue; } String annotationErrorMessage = ""; if (MafUtil.variantContainsAmbiguousTumorSeqAllele(record.getREFERENCE_ALLELE(), record.getTUMOR_SEQ_ALLELE1(), record.getTUMOR_SEQ_ALLELE2())) { snpAndIndelVariants++; annotationErrorMessage = "Record contains ambiguous SNP and INDEL allele change - SNP allele will be used"; } if (annotatedRecord.getHGVSC().isEmpty() && annotatedRecord.getHGVSP().isEmpty()) { if (annotator.isHgvspNullClassifications(annotatedRecord.getVARIANT_CLASSIFICATION())) { failedNullHgvspAnnotations++; annotationErrorMessage = "Ignoring record with HGVSp null classification '" + annotatedRecord.getVARIANT_CLASSIFICATION() + "'"; } else { annotationErrorMessage = "Failed to annotate variant"; } failedAnnotations++; } if (!annotationErrorMessage.isEmpty()) { if (verbose) LOG.info(annotationErrorMessage + ": " + variantDetails); if (errorReportLocation != null) updateErrorMessages(record, annotatedRecord.getVARIANT_CLASSIFICATION(), annotator.getUrlForRecord(record, isoformOverride), annotationErrorMessage); } } // print summary statistics and save error messages to file if applicable printSummaryStatistics(failedAnnotations, failedNullHgvspAnnotations, snpAndIndelVariants, failedServerAnnotations); if (errorReportLocation != null) { saveErrorMessagesToFile(errorMessages); } ec.put("mutation_header", new ArrayList(header)); }