Example usage for org.springframework.batch.item.file FlatFileItemReader close

List of usage examples for org.springframework.batch.item.file FlatFileItemReader close

Introduction

In this page you can find the example usage for org.springframework.batch.item.file FlatFileItemReader close.

Prototype

void close() throws ItemStreamException;

Source Link

Document

If any resources are needed for the stream to operate they need to be destroyed here.

Usage

From source file:de.mediait.batch.FlatFileItemReaderTest.java

@Test
public void testFlatFileReader2() throws Exception {

    final FlatFileItemReader<String[]> reader = createFlatFileReader(',', '\'');

    reader.setResource(new ClassPathResource("csv-fix-samples/fixcomma.txt"));

    final ExecutionContext executionContext = new ExecutionContext();
    reader.open(executionContext);//from  w  w w .  ja v a  2  s. c  o m
    final String[] object = (String[]) reader.read();
    reader.close();

    assertArrayEquals(new String[] { "begin", "abc' \"d\" 'ef", "end" }, object);
}

From source file:de.mediait.batch.FlatFileItemReaderTest.java

@Test
public void testFlatFileReader() throws Exception {

    final FlatFileItemReader<String[]> reader = createFlatFileReader(';', '\'');

    reader.setResource(new ClassPathResource("csv-fix-samples/fixsemicolon.txt"));

    final ExecutionContext executionContext = new ExecutionContext();
    reader.open(executionContext);/*from  w  ww. ja va 2  s.c o  m*/
    final String[] object = (String[]) reader.read();
    reader.close();

    assertArrayEquals(new String[] { "begin", "abc' \"d\" 'ef", "end" }, object);
}

From source file:org.cbioportal.annotation.pipeline.MutationRecordReader.java

@Override
public void open(ExecutionContext ec) throws ItemStreamException {

    processComments(ec);/* w  w  w .  j  a  v a2s. c  o  m*/

    FlatFileItemReader<MutationRecord> reader = new FlatFileItemReader<>();
    reader.setResource(new FileSystemResource(filename));
    DefaultLineMapper<MutationRecord> mapper = new DefaultLineMapper<>();
    final DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer();
    tokenizer.setDelimiter("\t");
    mapper.setLineTokenizer(tokenizer);
    mapper.setFieldSetMapper(new MutationFieldSetMapper());
    reader.setLineMapper(mapper);
    reader.setLinesToSkip(1);
    reader.setSkippedLinesCallback(new LineCallbackHandler() {
        @Override
        public void handleLine(String line) {
            tokenizer.setNames(line.split("\t"));
        }
    });
    reader.open(ec);

    LOG.info("Loading records from: " + filename);
    MutationRecord mutationRecord;
    try {
        while ((mutationRecord = reader.read()) != null) {
            mutationRecords.add(mutationRecord);
        }
    } catch (Exception e) {
        throw new ItemStreamException(e);
    }
    reader.close();

    int variantsToAnnotateCount = mutationRecords.size();
    int annotatedVariantsCount = 0;
    LOG.info(String.valueOf(variantsToAnnotateCount) + " records to annotate");
    for (MutationRecord record : mutationRecords) {
        annotatedVariantsCount++;
        if (annotatedVariantsCount % 2000 == 0) {
            LOG.info("\tOn record " + String.valueOf(annotatedVariantsCount) + " out of "
                    + String.valueOf(variantsToAnnotateCount) + ", annotation "
                    + String.valueOf((int) (((annotatedVariantsCount * 1.0) / variantsToAnnotateCount) * 100))
                    + "% complete");
        }
        // save variant details for logging
        String variantDetails = "(sampleId,chr,start,end,ref,alt,url)= (" + record.getTUMOR_SAMPLE_BARCODE()
                + "," + record.getCHROMOSOME() + "," + record.getSTART_POSITION() + ","
                + record.getEND_POSITION() + "," + record.getREFERENCE_ALLELE() + ","
                + record.getTUMOR_SEQ_ALLELE2() + "," + annotator.getUrlForRecord(record, isoformOverride)
                + ")";

        // init annotated record w/o genome nexus in case server error occurs
        // if no error then annotated record will get overwritten anyway with genome nexus response
        String serverErrorMessage = "";
        AnnotatedRecord annotatedRecord = new AnnotatedRecord(record);
        try {
            annotatedRecord = annotator.annotateRecord(record, replace, isoformOverride, true);
        } catch (HttpServerErrorException ex) {
            serverErrorMessage = "Failed to annotate variant due to internal server error";
        } catch (HttpClientErrorException ex) {
            serverErrorMessage = "Failed to annotate variant due to client error";
        } catch (HttpMessageNotReadableException ex) {
            serverErrorMessage = "Failed to annotate variant due to message not readable error";
        } catch (GenomeNexusAnnotationFailureException ex) {
            serverErrorMessage = "Failed to annotate variant due to Genome Nexus : " + ex.getMessage();
        }
        annotatedRecords.add(annotatedRecord);
        header.addAll(annotatedRecord.getHeaderWithAdditionalFields());

        // log server failure message if applicable
        if (!serverErrorMessage.isEmpty()) {
            LOG.warn(serverErrorMessage);
            failedAnnotations++;
            failedServerAnnotations++;
            if (errorReportLocation != null)
                updateErrorMessages(record, record.getVARIANT_CLASSIFICATION(),
                        annotator.getUrlForRecord(record, isoformOverride), serverErrorMessage);
            continue;
        }
        String annotationErrorMessage = "";
        if (MafUtil.variantContainsAmbiguousTumorSeqAllele(record.getREFERENCE_ALLELE(),
                record.getTUMOR_SEQ_ALLELE1(), record.getTUMOR_SEQ_ALLELE2())) {
            snpAndIndelVariants++;
            annotationErrorMessage = "Record contains ambiguous SNP and INDEL allele change - SNP allele will be used";
        }
        if (annotatedRecord.getHGVSC().isEmpty() && annotatedRecord.getHGVSP().isEmpty()) {
            if (annotator.isHgvspNullClassifications(annotatedRecord.getVARIANT_CLASSIFICATION())) {
                failedNullHgvspAnnotations++;
                annotationErrorMessage = "Ignoring record with HGVSp null classification '"
                        + annotatedRecord.getVARIANT_CLASSIFICATION() + "'";
            } else {
                annotationErrorMessage = "Failed to annotate variant";
            }
            failedAnnotations++;
        }
        if (!annotationErrorMessage.isEmpty()) {
            if (verbose)
                LOG.info(annotationErrorMessage + ": " + variantDetails);
            if (errorReportLocation != null)
                updateErrorMessages(record, annotatedRecord.getVARIANT_CLASSIFICATION(),
                        annotator.getUrlForRecord(record, isoformOverride), annotationErrorMessage);
        }
    }
    // print summary statistics and save error messages to file if applicable
    printSummaryStatistics(failedAnnotations, failedNullHgvspAnnotations, snpAndIndelVariants,
            failedServerAnnotations);
    if (errorReportLocation != null) {
        saveErrorMessagesToFile(errorMessages);
    }
    ec.put("mutation_header", new ArrayList(header));
}