Example usage for org.springframework.batch.core ExitStatus COMPLETED

List of usage examples for org.springframework.batch.core ExitStatus COMPLETED

Introduction

In this page you can find the example usage for org.springframework.batch.core ExitStatus COMPLETED.

Prototype

ExitStatus COMPLETED

To view the source code for org.springframework.batch.core ExitStatus COMPLETED.

Click Source Link

Document

Convenient constant value representing finished processing.

Usage

From source file:org.duracloud.snapshot.service.impl.SyncWriterTest.java

@Test
public void testAfterStep() throws Exception {

    EasyMock.expect(this.restoreManager.transitionRestoreStatus(EasyMock.eq(restorationId),
            EasyMock.eq(RestoreStatus.TRANSFER_TO_DURACLOUD_COMPLETE), EasyMock.isA(String.class)))
            .andReturn(restoration);//from   w  w  w .  ja v a 2 s . c o m

    EasyMock.expect(stepExecution.getExitStatus()).andReturn(ExitStatus.COMPLETED);
    replayAll();

    this.writer.afterStep(stepExecution);

}

From source file:uk.ac.ebi.eva.pipeline.jobs.PopulationStatisticsJobTest.java

@Test
public void fullPopulationStatisticsJob() throws Exception {
    //Given a valid VCF input file
    String input = SMALL_VCF_FILE;

    pipelineOptions.put("input.vcf", input);
    variantOptions.put(VariantStorageManager.DB_NAME, STATS_DB);

    VariantSource source = new VariantSource(input, "1", "1", "studyName", VariantStudy.StudyType.COLLECTION,
            VariantSource.Aggregation.NONE);

    variantOptions.put(VARIANT_SOURCE, source);

    statsFile = new File(Paths.get(pipelineOptions.getString("output.dir.statistics"))
            .resolve(VariantStorageManager.buildFilename(source)) + ".variants.stats.json.gz");
    statsFile.delete();/*from  ww w.j  a  v a 2 s  .c  om*/
    assertFalse(statsFile.exists()); // ensure the stats file doesn't exist from previous executions

    initStatsLoadStepFiles();

    JobExecution jobExecution = jobLauncherTestUtils.launchJob();
    assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus());
    assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus());

    //and the file containing statistics should exist
    assertTrue(statsFile.exists());

    //delete created files
    statsFile.delete();
    new File(Paths.get(pipelineOptions.getString("output.dir.statistics"))
            .resolve(VariantStorageManager.buildFilename(source)) + ".source.stats.json.gz").delete();

    // The DB docs should have the field "st"
    VariantStorageManager variantStorageManager = StorageManagerFactory.getVariantStorageManager();
    VariantDBAdaptor variantDBAdaptor = variantStorageManager.getDBAdaptor(STATS_DB, null);
    VariantDBIterator iterator = variantDBAdaptor.iterator(new QueryOptions());
    assertEquals(1, iterator.next().getSourceEntries().values().iterator().next().getCohortStats().size());

    statsFileToLoad.delete();
    sourceFileToLoad.delete();
    vcfFileToLoad.delete();
}

From source file:uk.ac.ebi.eva.pipeline.jobs.steps.PopulationStatisticsGeneratorStepTest.java

@Test
public void statisticsGeneratorStepShouldCalculateStats() throws IOException, InterruptedException {
    //and a valid variants load step already completed
    String dump = PopulationStatisticsGeneratorStepTest.class.getResource("/dump/").getFile();
    restoreMongoDbFromDump(dump);//from   w  ww . jav  a  2 s  . com

    //Given a valid VCF input file
    String input = SMALL_VCF_FILE;

    pipelineOptions.put("input.vcf", input);
    variantOptions.put(VariantStorageManager.DB_NAME, STATS_DB);

    VariantSource source = new VariantSource(input, "1", "1", "studyName", VariantStudy.StudyType.COLLECTION,
            VariantSource.Aggregation.NONE);

    variantOptions.put(VARIANT_SOURCE, source);

    statsFile = new File(Paths.get(pipelineOptions.getString("output.dir.statistics"))
            .resolve(VariantStorageManager.buildFilename(source)) + ".variants.stats.json.gz");
    statsFile.delete();
    assertFalse(statsFile.exists()); // ensure the stats file doesn't exist from previous executions

    // When the execute method in variantsStatsCreate is executed
    JobExecution jobExecution = jobLauncherTestUtils.launchStep(PopulationStatisticsJob.CALCULATE_STATISTICS);

    //Then variantsStatsCreate step should complete correctly
    assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus());
    assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus());

    //and the file containing statistics should exist
    assertTrue(statsFile.exists());

    //delete created files
    statsFile.delete();
    new File(Paths.get(pipelineOptions.getString("output.dir.statistics"))
            .resolve(VariantStorageManager.buildFilename(source)) + ".source.stats.json.gz").delete();

}

From source file:org.jasig.ssp.util.importer.job.csv.RawItemCsvWriter.java

@Override
public ExitStatus afterStep(StepExecution stepExecution) {
    logger.info("End Raw Write Step for " + stepExecution.getExecutionContext().getString("fileName")
            + " lines read: " + stepExecution.getReadCount() + " lines skipped: "
            + stepExecution.getReadSkipCount());
    logger.info(stepExecution.getSummary());
    return ExitStatus.COMPLETED;
}

From source file:fr.acxio.tools.agia.alfresco.AlfrescoNodeReader.java

@Override
public ExitStatus afterStep(StepExecution sStepExecution) {
    return ExitStatus.COMPLETED;
}

From source file:uk.ac.ebi.eva.pipeline.jobs.steps.VariantLoaderStepTest.java

@Test
public void loaderStepShouldLoadAllVariants() throws Exception {
    Config.setOpenCGAHome(opencgaHome);/* w ww.  j a  v  a  2 s . com*/

    jobOptions.getVariantOptions().put(VariantStorageManager.DB_NAME, dbName);
    jobOptions.getVariantOptions().put(VARIANT_SOURCE, new VariantSource(input, "1", "1", "studyName",
            VariantStudy.StudyType.COLLECTION, VariantSource.Aggregation.NONE));

    //and a variants transform step already executed
    File transformedVcfVariantsFile = new File(
            VariantLoaderStepTest.class.getResource("/small20.vcf.gz.variants.json.gz").getFile());
    File tmpTransformedVcfVariantsFile = new File(outputDir, transformedVcfVariantsFile.getName());
    FileUtils.copyFile(transformedVcfVariantsFile, tmpTransformedVcfVariantsFile);

    File transformedVariantsFile = new File(
            VariantLoaderStepTest.class.getResource("/small20.vcf.gz.file.json.gz").getFile());
    File tmpTransformedVariantsFile = new File(outputDir, transformedVariantsFile.getName());
    FileUtils.copyFile(transformedVariantsFile, tmpTransformedVariantsFile);

    // When the execute method in variantsLoad is executed
    JobExecution jobExecution = jobLauncherTestUtils.launchStep(GenotypedVcfJob.LOAD_VARIANTS);

    //Then variantsLoad step should complete correctly
    assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus());
    assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus());

    // And the number of documents in db should be the same number of line of the vcf transformed file
    VariantStorageManager variantStorageManager = StorageManagerFactory.getVariantStorageManager();
    VariantDBAdaptor variantDBAdaptor = variantStorageManager.getDBAdaptor(dbName, null);
    VariantDBIterator iterator = variantDBAdaptor.iterator(new QueryOptions());
    long lines = getLines(new GZIPInputStream(new FileInputStream(transformedVcfVariantsFile)));

    assertEquals(count(iterator), lines);

    tmpTransformedVcfVariantsFile.delete();
    tmpTransformedVariantsFile.delete();
}

From source file:org.obiba.onyx.core.etl.participant.impl.AppointmentListUpdateListenerTest.java

@Test
public void testAfterUpdateCompleted() {
    Map<String, JobParameter> jobParameterMap = new HashMap<String, JobParameter>();
    jobParameterMap.put("date", new JobParameter(new Date()));
    JobInstance job = new JobInstance(1l, new JobParameters(jobParameterMap), "jobTest");
    StepExecution stepExecution = new StepExecution("compltion", new JobExecution(job));
    stepExecution.setExitStatus(ExitStatus.COMPLETED);
    ExecutionContext context = new ExecutionContext();
    context.put("fileName", "fileName.xls");
    stepExecution.setExecutionContext(context);

    appointmentManagementServiceMock.saveAppointmentUpdateStats((AppointmentUpdateStats) EasyMock.anyObject());

    replay(appointmentManagementServiceMock);
    appointmentListUpdateListener.afterUpdateCompleted(stepExecution);
    verify(appointmentManagementServiceMock);

}

From source file:uk.ac.ebi.eva.pipeline.jobs.AnnotationJobTest.java

@Test
public void noVariantsToAnnotateOnlyFindVariantsToAnnotateStepShouldRun() throws Exception {

    JobExecution jobExecution = jobLauncherTestUtils.launchJob();

    assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus());
    assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus());

    Assert.assertEquals(1, jobExecution.getStepExecutions().size());
    StepExecution findVariantsToAnnotateStep = new ArrayList<>(jobExecution.getStepExecutions()).get(0);

    assertEquals(FIND_VARIANTS_TO_ANNOTATE, findVariantsToAnnotateStep.getStepName());

    assertTrue(vepInputFile.exists());//www . j a  v a2s. co m
    assertTrue(Files.size(Paths.get(vepInputFile.toPath().toUri())) == 0);
}

From source file:uk.ac.ebi.eva.pipeline.jobs.AggregatedVcfJobTest.java

@Test
public void aggregationNoneOptionShouldNotLoadStats() throws Exception {
    VariantSource source = (VariantSource) jobOptions.getVariantOptions()
            .get(VariantStorageManager.VARIANT_SOURCE);
    jobOptions.getVariantOptions().put(VariantStorageManager.VARIANT_SOURCE,
            new VariantSource(input, source.getFileId(), source.getStudyId(), source.getStudyName(),
                    source.getType(), VariantSource.Aggregation.NONE));

    Config.setOpenCGAHome(opencgaHome);/*www  .  ja v a 2  s.  com*/

    JobExecution jobExecution = jobLauncherTestUtils.launchJob();

    assertEquals(ExitStatus.COMPLETED, jobExecution.getExitStatus());
    assertEquals(BatchStatus.COMPLETED, jobExecution.getStatus());

    // check transformed file
    String outputFilename = getTransformedOutputPath(Paths.get(input).getFileName(), compressExtension,
            outputDir);

    long lines = JobTestUtils.getLines(new GZIPInputStream(new FileInputStream(outputFilename)));
    assertEquals(156, lines);

    // check ((documents in DB) == (lines in transformed file))
    VariantStorageManager variantStorageManager = StorageManagerFactory.getVariantStorageManager();
    VariantDBAdaptor variantDBAdaptor = variantStorageManager.getDBAdaptor(dbName, null);
    VariantDBIterator iterator = variantDBAdaptor.iterator(new QueryOptions());

    Assert.assertEquals(JobTestUtils.count(iterator), lines);

    // check that stats are NOT loaded
    assertTrue(variantDBAdaptor.iterator(new QueryOptions()).next().getSourceEntries().values().iterator()
            .next().getCohortStats().isEmpty());
}

From source file:org.duracloud.snapshot.service.impl.SpaceItemWriterTest.java

private void test(int threads) throws IOException, SnapshotException {
    outputWriter = new CSVFileOutputWriter(workDir);
    BufferedWriter propsWriter = createWriter(contentDir, "properties.json");
    BufferedWriter md5Writer = createWriter(contentDir, MD5_MANIFEST_TXT_FILE_NAME);
    BufferedWriter sha256Writer = createWriter(contentDir, SHA256_MANIFEST_TXT_FILE_NAME);

    String spaceId = "space-id";
    String contentId = "content-id";
    List<ContentItem> items = new ArrayList<>();

    List<File> sourceFiles = new ArrayList<>();
    for (int i = 0; i < 100; i++) {
        sourceFiles.add(setupContentItem(items, spaceId, contentId + i));
    }/*w  ww.j a va2 s.  co m*/

    Collections.sort(sourceFiles, new Comparator<File>() {
        /* (non-Javadoc)
         * @see java.util.Comparator#compare(java.lang.Object, java.lang.Object)
         */
        @Override
        public int compare(File o1, File o2) {
            return o1.getName().compareTo(o2.getName());
        }
    });

    Collections.sort(items, new Comparator<ContentItem>() {
        /* (non-Javadoc)
         * @see java.util.Comparator#compare(java.lang.Object, java.lang.Object)
         */
        @Override
        public int compare(ContentItem o1, ContentItem o2) {
            return o1.getContentId().compareTo(o2.getContentId());
        }
    });

    expect(stepExecution.getExitStatus()).andReturn(ExitStatus.COMPLETED).times(2);
    replayAll();
    writer = new SpaceItemWriter(snapshot, retrievalSource, contentDir, outputWriter, propsWriter, md5Writer,
            sha256Writer, snapshotManager);

    writer.beforeStep(stepExecution);
    writeItems(items, threads);
    writer.afterStep(stepExecution);

    List<String> md5Lines = getLines(MD5_MANIFEST_TXT_FILE_NAME);

    for (int i = 0; i < sourceFiles.size(); i++) {
        File file = sourceFiles.get(i);
        ContentItem content = items.get(i);
        ChecksumUtil md5 = new ChecksumUtil(Algorithm.MD5);
        String md5Checksum = md5.generateChecksum(file);
        String md5Line = md5Lines.get(i);
        String contentId2 = content.getContentId();
        log.debug("md5 line: \"{}\", md5Checksum={}, filename={}, contentId={}", md5Line, md5Checksum,
                file.getName(), contentId2);
        assertTrue(md5Line.contains(contentId2));
        assertTrue(md5Line.contains(md5Checksum));
    }

    List<String> shaLines = getLines(SHA256_MANIFEST_TXT_FILE_NAME);

    for (int i = 0; i < sourceFiles.size(); i++) {
        File file = sourceFiles.get(i);
        ContentItem content = items.get(i);
        ChecksumUtil sha = new ChecksumUtil(Algorithm.SHA_256);
        String shaChecksum = sha.generateChecksum(file);
        String shaLine = shaLines.get(i);
        String contentId3 = content.getContentId();
        log.debug("sha256 line: \"{}\", md5Checksum={}, filename={}, contentId={}", shaLine, shaChecksum,
                file.getName(), contentId3);

        assertTrue(shaLine.endsWith(contentId3));
        assertTrue(shaLine.contains(shaChecksum));
    }

}