List of usage examples for org.springframework.batch.core JobExecution getExitStatus
public ExitStatus getExitStatus()
From source file:admin.jmx.SimpleJobExecutionMetrics.java
public String getLatestExitCode() { JobExecution jobExecution = getLatestJobExecution(jobName); return jobExecution == null ? "NONE" : jobExecution.getExitStatus().getExitCode(); }
From source file:uk.ac.ebi.eva.pipeline.jobs.steps.VepAnnotationGeneratorStepTest.java
@Test public void shouldGenerateVepAnnotations() throws Exception { makeGzipFile("20\t60343\t60343\tG/A\t+", jobOptions.getVepInput()); File vepOutputFile = JobTestUtils.createTempFile(); jobOptions.setVepOutput(vepOutputFile.getAbsolutePath()); vepOutputFile.delete();/*from w w w . jav a2 s. c om*/ TestCase.assertFalse(vepOutputFile.exists()); // ensure the annot file doesn't exist from previous executions // When the execute method in variantsAnnotCreate is executed JobExecution jobExecution = jobLauncherTestUtils.launchStep(AnnotationJob.GENERATE_VEP_ANNOTATION); //Then variantsAnnotCreate step should complete correctly assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); // And VEP output should exist and annotations should be in the file TestCase.assertTrue(vepOutputFile.exists()); Assert.assertEquals(537, JobTestUtils.getLines(new GZIPInputStream(new FileInputStream(vepOutputFile)))); vepOutputFile.delete(); }
From source file:uk.ac.ebi.eva.pipeline.jobs.AggregatedVcfJobTest.java
@Test public void aggregationNoneOptionShouldNotLoadStats() throws Exception { VariantSource source = (VariantSource) jobOptions.getVariantOptions() .get(VariantStorageManager.VARIANT_SOURCE); jobOptions.getVariantOptions().put(VariantStorageManager.VARIANT_SOURCE, new VariantSource(input, source.getFileId(), source.getStudyId(), source.getStudyName(), source.getType(), VariantSource.Aggregation.NONE)); Config.setOpenCGAHome(opencgaHome);// w ww .j a v a 2s .c o m JobExecution jobExecution = jobLauncherTestUtils.launchJob(); assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); // check transformed file String outputFilename = getTransformedOutputPath(Paths.get(input).getFileName(), compressExtension, outputDir); long lines = JobTestUtils.getLines(new GZIPInputStream(new FileInputStream(outputFilename))); assertEquals(156, lines); // check ((documents in DB) == (lines in transformed file)) VariantStorageManager variantStorageManager = StorageManagerFactory.getVariantStorageManager(); VariantDBAdaptor variantDBAdaptor = variantStorageManager.getDBAdaptor(dbName, null); VariantDBIterator iterator = variantDBAdaptor.iterator(new QueryOptions()); Assert.assertEquals(JobTestUtils.count(iterator), lines); // check that stats are NOT loaded assertTrue(variantDBAdaptor.iterator(new QueryOptions()).next().getSourceEntries().values().iterator() .next().getCohortStats().isEmpty()); }
From source file:uk.ac.ebi.eva.pipeline.jobs.steps.VariantNormalizerStepTest.java
/** * This test has to fail because the vcf FILE_WRONG_NO_ALT is malformed: * in a variant a reference and a alternate allele are the same */// ww w . ja va 2 s.co m @Test public void normalizerStepShouldFailIfVariantsAreMalformed() { final String FILE_WRONG_NO_ALT = "/wrong_no_alt.vcf.gz"; Config.setOpenCGAHome(opencgaHome); //Given a malformed VCF input file String inputFile = VariantNormalizerStepTest.class.getResource(FILE_WRONG_NO_ALT).getFile(); jobOptions.getPipelineOptions().put("input.vcf", inputFile); String outputFilename = getTransformedOutputPath(Paths.get(FILE_WRONG_NO_ALT).getFileName(), ".gz", "/tmp"); File file = new File(outputFilename); file.delete(); assertFalse(file.exists()); //When the execute method in variantsTransform is invoked then a StorageManagerException is thrown JobExecution jobExecution = jobLauncherTestUtils.launchStep(GenotypedVcfJob.NORMALIZE_VARIANTS); assertEquals(ExitStatus.FAILED.getExitCode(), jobExecution.getExitStatus().getExitCode()); }
From source file:uk.ac.ebi.eva.pipeline.jobs.AggregatedVcfJobTest.java
@Test public void aggregatedTransformAndLoadShouldBeExecuted() throws Exception { Config.setOpenCGAHome(opencgaHome);//from w w w .j ava2 s .co m JobExecution jobExecution = jobLauncherTestUtils.launchJob(); assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); // check execution flow Assert.assertEquals(2, jobExecution.getStepExecutions().size()); List<StepExecution> steps = new ArrayList<>(jobExecution.getStepExecutions()); StepExecution transformStep = steps.get(0); StepExecution loadStep = steps.get(1); Assert.assertEquals(AggregatedVcfJob.NORMALIZE_VARIANTS, transformStep.getStepName()); Assert.assertEquals(AggregatedVcfJob.LOAD_VARIANTS, loadStep.getStepName()); assertTrue(transformStep.getEndTime().before(loadStep.getStartTime())); // check transformed file String outputFilename = getTransformedOutputPath(Paths.get(input).getFileName(), compressExtension, outputDir); long lines = JobTestUtils.getLines(new GZIPInputStream(new FileInputStream(outputFilename))); assertEquals(156, lines); // check ((documents in DB) == (lines in transformed file)) VariantStorageManager variantStorageManager = StorageManagerFactory.getVariantStorageManager(); VariantDBAdaptor variantDBAdaptor = variantStorageManager.getDBAdaptor(dbName, null); VariantDBIterator iterator = variantDBAdaptor.iterator(new QueryOptions()); Assert.assertEquals(JobTestUtils.count(iterator), lines); // check that stats are loaded properly assertFalse(variantDBAdaptor.iterator(new QueryOptions()).next().getSourceEntries().values().iterator() .next().getCohortStats().isEmpty()); }
From source file:uk.ac.ebi.eva.pipeline.jobs.AnnotationJobTest.java
@Test public void allAnnotationStepsShouldBeExecuted() throws Exception { String dump = PopulationStatisticsJobTest.class.getResource("/dump/").getFile(); JobTestUtils.restoreMongoDbFromDump(dump); JobExecution jobExecution = jobLauncherTestUtils.launchJob(); assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); Assert.assertEquals(3, jobExecution.getStepExecutions().size()); List<StepExecution> steps = new ArrayList<>(jobExecution.getStepExecutions()); StepExecution findVariantsToAnnotateStep = steps.get(0); StepExecution generateVepAnnotationsStep = steps.get(1); StepExecution loadVepAnnotationsStep = steps.get(2); Assert.assertEquals(FIND_VARIANTS_TO_ANNOTATE, findVariantsToAnnotateStep.getStepName()); Assert.assertEquals(GENERATE_VEP_ANNOTATION, generateVepAnnotationsStep.getStepName()); Assert.assertEquals(LOAD_VEP_ANNOTATION, loadVepAnnotationsStep.getStepName()); //check list of variants without annotation output file assertTrue(vepInputFile.exists());/*from w ww . j a v a2 s . c o m*/ assertEquals("20\t60343\t60343\tG/A\t+", JobTestUtils.readFirstLine(vepInputFile)); //check that documents have the annotation DBCursor cursor = collection(dbName, jobOptions.getDbCollectionsVariantsName()).find(); int cnt = 0; int consequenceTypeCount = 0; while (cursor.hasNext()) { cnt++; DBObject dbObject = (DBObject) cursor.next().get("annot"); if (dbObject != null) { VariantAnnotation annot = converter.convertToDataModelType(dbObject); assertNotNull(annot.getConsequenceTypes()); consequenceTypeCount += annot.getConsequenceTypes().size(); } } assertEquals(300, cnt); assertEquals(536, consequenceTypeCount); //check that one line is skipped because malformed List<StepExecution> variantAnnotationLoadStepExecution = jobExecution.getStepExecutions().stream() .filter(stepExecution -> stepExecution.getStepName().equals(LOAD_VEP_ANNOTATION)) .collect(Collectors.toList()); assertEquals(1, variantAnnotationLoadStepExecution.get(0).getReadSkipCount()); }
From source file:uk.ac.ebi.eva.pipeline.jobs.steps.PopulationStatisticsGeneratorStepTest.java
/** * This test has to fail because it will try to extract variants from a non-existent DB. * Variants not loaded.. so nothing to query! *///w w w . j a v a 2 s.c o m @Test public void statisticsGeneratorStepShouldFailIfVariantLoadStepIsNotCompleted() throws Exception { //Given a valid VCF input file String input = SMALL_VCF_FILE; pipelineOptions.put("input.vcf", input); variantOptions.put(VariantStorageManager.DB_NAME, STATS_DB); VariantSource source = new VariantSource(input, "1", "1", "studyName", VariantStudy.StudyType.COLLECTION, VariantSource.Aggregation.NONE); variantOptions.put(VARIANT_SOURCE, source); statsFile = new File(Paths.get(pipelineOptions.getString("output.dir.statistics")) .resolve(VariantStorageManager.buildFilename(source)) + ".variants.stats.json.gz"); statsFile.delete(); assertFalse(statsFile.exists()); // ensure the stats file doesn't exist from previous executions // When the execute method in variantsStatsCreate is executed JobExecution jobExecution = jobLauncherTestUtils.launchStep(PopulationStatisticsJob.CALCULATE_STATISTICS); assertEquals(ExitStatus.FAILED.getExitCode(), jobExecution.getExitStatus().getExitCode()); }
From source file:uk.ac.ebi.eva.pipeline.jobs.steps.VariantNormalizerStepTest.java
@Test public void normalizerStepShouldTransformAllVariants() throws Exception { Config.setOpenCGAHome(opencgaHome);//from w ww . ja v a2s. c o m String inputFile = VariantNormalizerStepTest.class.getResource(input).getFile(); jobOptions.getPipelineOptions().put("input.vcf", inputFile); String outputFilename = getTransformedOutputPath(Paths.get(input).getFileName(), ".gz", "/tmp"); File file = new File(outputFilename); if (file.exists()) file.delete(); assertFalse(file.exists()); // When the execute method in variantsTransform is executed JobExecution jobExecution = jobLauncherTestUtils.launchStep(GenotypedVcfJob.NORMALIZE_VARIANTS); //Then variantsTransform should complete correctly assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus()); assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus()); // And the transformed file should contain the same number of line of the Vcf input file Assert.assertEquals(300, getLines(new GZIPInputStream(new FileInputStream(outputFilename)))); file.delete(); new File(outputDir, "small20.vcf.gz.file.json.gz").delete(); }
From source file:org.cloudfoundry.workers.stocks.batch.NightlyStockSymbolRecorder.java
@Scheduled(fixedRate = 10 * 1000) public void runNightlyStockPriceRecorder() throws Throwable { JobParameters params = new JobParametersBuilder().addDate("date", new Date()).toJobParameters(); JobExecution jobExecution = jobLauncher.run(job, params); BatchStatus batchStatus = jobExecution.getStatus(); while (batchStatus.isRunning()) { logger.info("Still running..."); Thread.sleep(1000);//from w w w . j av a 2s.c o m } logger.info(String.format("Exit status: %s", jobExecution.getExitStatus().getExitCode())); JobInstance jobInstance = jobExecution.getJobInstance(); logger.info(String.format("job instance Id: %d", jobInstance.getId())); }
From source file:org.cloudfoundry.workers.twitter.batch.NightlyTweetRecorder.java
@Scheduled(fixedRate = 10 * 1000) public void runNightlyTweetRecorder() throws Throwable { logger.info("Running nightly tweet recorder"); JobParameters params = new JobParametersBuilder().addDate("date", new Date()).toJobParameters(); JobExecution jobExecution = jobLauncher.run(job, params); BatchStatus batchStatus = jobExecution.getStatus(); while (batchStatus.isRunning()) { logger.info("Still running..."); Thread.sleep(1000);//from ww w . j a va 2 s.c o m } logger.info(String.format("Exit status: %s", jobExecution.getExitStatus().getExitCode())); JobInstance jobInstance = jobExecution.getJobInstance(); logger.info(String.format("job instance Id: %d", jobInstance.getId())); }