Example usage for org.springframework.batch.item ItemStreamException ItemStreamException

List of usage examples for org.springframework.batch.item ItemStreamException ItemStreamException

Introduction

In this page you can find the example usage for org.springframework.batch.item ItemStreamException ItemStreamException.

Prototype

public ItemStreamException(Throwable nested) 

Source Link

Document

Constructs a new instance with a nested exception and empty message.

Usage

From source file:es.fcs.batch.integration.chunk.MyChunkMessageChannelItemWriter.java

public void open(ExecutionContext executionContext) throws ItemStreamException {
    if (executionContext.containsKey(EXPECTED)) {
        localState.open(executionContext.getInt(EXPECTED), executionContext.getInt(ACTUAL));
        if (!waitForResults()) {
            throw new ItemStreamException("Timed out waiting for back log on open");
        }/*from   ww w . j a  va2s  .c  om*/
    }
}

From source file:org.cbioportal.annotation.pipeline.MutationRecordReader.java

private void processComments(ExecutionContext ec) {
    List<String> comments = new ArrayList<>();
    String sequencedSamples = "";
    BufferedReader reader = null;
    try {//w  w  w .j  a  va2s  .c  o  m
        reader = new BufferedReader(new FileReader(filename));
        String line;
        while ((line = reader.readLine()) != null) {
            if (line.startsWith("#")) {
                comments.add(line);
            } else {
                // no more comments, go on processing
                break;
            }
        }
        reader.close();
    } catch (Exception e) {
        throw new ItemStreamException(e);
    }

    // Add comments to the config for the writer to access later
    ec.put("commentLines", comments);
}

From source file:egovframework.rte.bat.core.item.file.EgovPartitionFlatFileItemWriter.java

/**
 * state? ?  ??  position? /* ww w  .j  a  va  2  s.c  om*/
 * @see ItemStream#update(ExecutionContext)
 */
public void update(ExecutionContext executionContext) {
    if (state == null) {
        throw new ItemStreamException("ItemStream not open or already closed.");
    }

    Assert.notNull(executionContext, "ExecutionContext must not be null");

    if (saveState) {

        try {

            executionContext.putLong(getKey(RESTART_DATA_NAME), state.position());
        } catch (IOException e) {
            throw new ItemStreamException("ItemStream does not return current position properly", e);
        }

        executionContext.putLong(getKey(WRITTEN_STATISTICS_NAME), state.linesWritten);
    }
}

From source file:me.andpay.ti.spring.batch.FlatFileItemWriter.java

/**
 * @see ItemStream#update(ExecutionContext)
 *//*from   w w w  .  j  ava2s .  c o m*/
public void update(ExecutionContext executionContext) {
    if (state == null) {
        throw new ItemStreamException("ItemStream not open or already closed.");
    }

    Assert.notNull(executionContext, "ExecutionContext must not be null");

    if (saveState) {

        try {
            executionContext.putLong(getKey(RESTART_DATA_NAME), state.position());
        } catch (IOException e) {
            throw new ItemStreamException("ItemStream does not return current position properly", e);
        }

        executionContext.putLong(getKey(WRITTEN_STATISTICS_NAME), state.linesWritten);
    }
}

From source file:org.cbioportal.annotation.pipeline.MutationRecordReader.java

@Override
public void open(ExecutionContext ec) throws ItemStreamException {

    processComments(ec);/* w  ww. j  ava 2s  .  c  o  m*/

    FlatFileItemReader<MutationRecord> reader = new FlatFileItemReader<>();
    reader.setResource(new FileSystemResource(filename));
    DefaultLineMapper<MutationRecord> mapper = new DefaultLineMapper<>();
    final DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer();
    tokenizer.setDelimiter("\t");
    mapper.setLineTokenizer(tokenizer);
    mapper.setFieldSetMapper(new MutationFieldSetMapper());
    reader.setLineMapper(mapper);
    reader.setLinesToSkip(1);
    reader.setSkippedLinesCallback(new LineCallbackHandler() {
        @Override
        public void handleLine(String line) {
            tokenizer.setNames(line.split("\t"));
        }
    });
    reader.open(ec);

    LOG.info("Loading records from: " + filename);
    MutationRecord mutationRecord;
    try {
        while ((mutationRecord = reader.read()) != null) {
            mutationRecords.add(mutationRecord);
        }
    } catch (Exception e) {
        throw new ItemStreamException(e);
    }
    reader.close();

    int variantsToAnnotateCount = mutationRecords.size();
    int annotatedVariantsCount = 0;
    LOG.info(String.valueOf(variantsToAnnotateCount) + " records to annotate");
    for (MutationRecord record : mutationRecords) {
        annotatedVariantsCount++;
        if (annotatedVariantsCount % 2000 == 0) {
            LOG.info("\tOn record " + String.valueOf(annotatedVariantsCount) + " out of "
                    + String.valueOf(variantsToAnnotateCount) + ", annotation "
                    + String.valueOf((int) (((annotatedVariantsCount * 1.0) / variantsToAnnotateCount) * 100))
                    + "% complete");
        }
        // save variant details for logging
        String variantDetails = "(sampleId,chr,start,end,ref,alt,url)= (" + record.getTUMOR_SAMPLE_BARCODE()
                + "," + record.getCHROMOSOME() + "," + record.getSTART_POSITION() + ","
                + record.getEND_POSITION() + "," + record.getREFERENCE_ALLELE() + ","
                + record.getTUMOR_SEQ_ALLELE2() + "," + annotator.getUrlForRecord(record, isoformOverride)
                + ")";

        // init annotated record w/o genome nexus in case server error occurs
        // if no error then annotated record will get overwritten anyway with genome nexus response
        String serverErrorMessage = "";
        AnnotatedRecord annotatedRecord = new AnnotatedRecord(record);
        try {
            annotatedRecord = annotator.annotateRecord(record, replace, isoformOverride, true);
        } catch (HttpServerErrorException ex) {
            serverErrorMessage = "Failed to annotate variant due to internal server error";
        } catch (HttpClientErrorException ex) {
            serverErrorMessage = "Failed to annotate variant due to client error";
        } catch (HttpMessageNotReadableException ex) {
            serverErrorMessage = "Failed to annotate variant due to message not readable error";
        } catch (GenomeNexusAnnotationFailureException ex) {
            serverErrorMessage = "Failed to annotate variant due to Genome Nexus : " + ex.getMessage();
        }
        annotatedRecords.add(annotatedRecord);
        header.addAll(annotatedRecord.getHeaderWithAdditionalFields());

        // log server failure message if applicable
        if (!serverErrorMessage.isEmpty()) {
            LOG.warn(serverErrorMessage);
            failedAnnotations++;
            failedServerAnnotations++;
            if (errorReportLocation != null)
                updateErrorMessages(record, record.getVARIANT_CLASSIFICATION(),
                        annotator.getUrlForRecord(record, isoformOverride), serverErrorMessage);
            continue;
        }
        String annotationErrorMessage = "";
        if (MafUtil.variantContainsAmbiguousTumorSeqAllele(record.getREFERENCE_ALLELE(),
                record.getTUMOR_SEQ_ALLELE1(), record.getTUMOR_SEQ_ALLELE2())) {
            snpAndIndelVariants++;
            annotationErrorMessage = "Record contains ambiguous SNP and INDEL allele change - SNP allele will be used";
        }
        if (annotatedRecord.getHGVSC().isEmpty() && annotatedRecord.getHGVSP().isEmpty()) {
            if (annotator.isHgvspNullClassifications(annotatedRecord.getVARIANT_CLASSIFICATION())) {
                failedNullHgvspAnnotations++;
                annotationErrorMessage = "Ignoring record with HGVSp null classification '"
                        + annotatedRecord.getVARIANT_CLASSIFICATION() + "'";
            } else {
                annotationErrorMessage = "Failed to annotate variant";
            }
            failedAnnotations++;
        }
        if (!annotationErrorMessage.isEmpty()) {
            if (verbose)
                LOG.info(annotationErrorMessage + ": " + variantDetails);
            if (errorReportLocation != null)
                updateErrorMessages(record, annotatedRecord.getVARIANT_CLASSIFICATION(),
                        annotator.getUrlForRecord(record, isoformOverride), annotationErrorMessage);
        }
    }
    // print summary statistics and save error messages to file if applicable
    printSummaryStatistics(failedAnnotations, failedNullHgvspAnnotations, snpAndIndelVariants,
            failedServerAnnotations);
    if (errorReportLocation != null) {
        saveErrorMessagesToFile(errorMessages);
    }
    ec.put("mutation_header", new ArrayList(header));
}

From source file:org.geoserver.backuprestore.writer.CatalogFileWriter.java

/**
 * @throws Exception//from  w w  w  . j  ava  2s .c  o  m
 * @see ItemStream#update(ExecutionContext)
 */
@Override
public void update(ExecutionContext executionContext) {
    super.update(executionContext);
    if (state == null) {
        throw new ItemStreamException("ItemStream not open or already closed.");
    }

    Assert.notNull(executionContext, "ExecutionContext must not be null");

    if (saveState) {
        try {
            executionContext.putLong(getExecutionContextKey(RESTART_DATA_NAME), state.position());
        } catch (IOException e) {
            logValidationExceptions((T) null,
                    new ItemStreamException("ItemStream does not return current position properly", e));
        }

        executionContext.putLong(getExecutionContextKey(WRITTEN_STATISTICS_NAME), state.linesWritten);
    }
}

From source file:org.springframework.batch.item.file.FlatFileItemWriter.java

/**
 * @see ItemStream#update(ExecutionContext)
 *///ww w .j a  va 2s . com
@Override
public void update(ExecutionContext executionContext) {
    super.update(executionContext);
    if (state == null) {
        throw new ItemStreamException("ItemStream not open or already closed.");
    }

    Assert.notNull(executionContext, "ExecutionContext must not be null");

    if (saveState) {

        try {
            executionContext.putLong(getExecutionContextKey(RESTART_DATA_NAME), state.position());
        } catch (IOException e) {
            throw new ItemStreamException("ItemStream does not return current position properly", e);
        }

        executionContext.putLong(getExecutionContextKey(WRITTEN_STATISTICS_NAME), state.linesWritten);
    }
}

From source file:org.springframework.batch.jsr.item.CheckpointSupport.java

@Override
public void open(ExecutionContext executionContext) throws ItemStreamException {
    try {/*from w  w  w  . j  ava2  s  .c  o  m*/
        String executionContextKey = getExecutionContextKey(checkpointKey);
        Serializable checkpoint = (Serializable) executionContext.get(executionContextKey);
        doOpen(checkpoint);
    } catch (Exception e) {
        throw new ItemStreamException(e);
    }
}

From source file:org.springframework.batch.jsr.item.CheckpointSupport.java

@Override
public void update(ExecutionContext executionContext) throws ItemStreamException {
    try {/*from   w  w w.  j  a va  2 s  . c o  m*/
        executionContext.put(getExecutionContextKey(checkpointKey), deepCopy(doCheckpoint()));
    } catch (Exception e) {
        throw new ItemStreamException(e);
    }
}

From source file:org.springframework.batch.jsr.item.CheckpointSupport.java

@Override
public void close() throws ItemStreamException {
    try {//from  w  ww. j a  v a 2  s  . co m
        doClose();
    } catch (Exception e) {
        throw new ItemStreamException(e);
    }
}