Example usage for org.springframework.batch.item.file FlatFileHeaderCallback FlatFileHeaderCallback

List of usage examples for org.springframework.batch.item.file FlatFileHeaderCallback FlatFileHeaderCallback

Introduction

In this page you can find the example usage for org.springframework.batch.item.file FlatFileHeaderCallback FlatFileHeaderCallback.

Prototype

FlatFileHeaderCallback

Source Link

Usage

From source file:org.cbio.portal.pipelines.foundation.ClinicalDataWriter.java

@Override
public void open(ExecutionContext executionContext) throws ItemStreamException {
    String stagingFile = outputDirectory + "data_clinical.txt";

    PassThroughLineAggregator aggr = new PassThroughLineAggregator();
    flatFileItemWriter.setLineAggregator(aggr);
    flatFileItemWriter.setHeaderCallback(new FlatFileHeaderCallback() {
        @Override//from  w  w  w  . ja v  a  2 s  .c om
        public void writeHeader(Writer writer) throws IOException {
            writer.write(getHeader());
        }
    });
    flatFileItemWriter.setResource(new FileSystemResource(stagingFile));
    flatFileItemWriter.open(executionContext);
}

From source file:org.cbio.portal.pipelines.foundation.CnaDataWriter.java

@Override
public void open(ExecutionContext executionContext) throws ItemStreamException {
    // retrieve list of foundation cases from execution context
    final Map<String, CaseType> fmiCaseTypeMap = (Map<String, CaseType>) executionContext.get("fmiCaseTypeMap");

    String stagingFile = outputDirectory + "data_CNA.txt";
    PassThroughLineAggregator aggr = new PassThroughLineAggregator();
    flatFileItemWriter.setLineAggregator(aggr);
    flatFileItemWriter.setHeaderCallback(new FlatFileHeaderCallback() {
        @Override/*from   w  w  w .j av  a  2s .  co m*/
        public void writeHeader(Writer writer) throws IOException {
            writer.write(getHeader(fmiCaseTypeMap.keySet()));
        }
    });
    flatFileItemWriter.setResource(new FileSystemResource(stagingFile));
    flatFileItemWriter.open(executionContext);
}

From source file:org.cbio.portal.pipelines.foundation.FusionDataWriter.java

@Override
public void open(ExecutionContext executionContext) throws ItemStreamException {
    String stagingFile = outputDirectory + "data_fusions.txt";

    PassThroughLineAggregator aggr = new PassThroughLineAggregator();
    flatFileItemWriter.setLineAggregator(aggr);
    flatFileItemWriter.setHeaderCallback(new FlatFileHeaderCallback() {
        @Override//from  www  .  java  2s  .  co  m
        public void writeHeader(Writer writer) throws IOException {
            writer.write(getHeader());
        }
    });
    flatFileItemWriter.setResource(new FileSystemResource(stagingFile));
    flatFileItemWriter.open(executionContext);
}

From source file:org.cbioportal.annotation.pipeline.MutationRecordWriter.java

@Override
public void open(ExecutionContext ec) throws ItemStreamException {
    stagingFile = Paths.get(outputFilename);

    PassThroughLineAggregator aggr = new PassThroughLineAggregator();
    flatFileItemWriter.setLineAggregator(aggr);
    flatFileItemWriter.setResource(new FileSystemResource(stagingFile.toString()));
    flatFileItemWriter.setHeaderCallback(new FlatFileHeaderCallback() {
        @Override//from www. java 2 s .c o  m
        public void writeHeader(Writer writer) throws IOException {
            AnnotatedRecord record = new AnnotatedRecord();

            // first write out the comment lines, then write the actual header
            for (String comment : commentLines) {
                writer.write(comment + "\n");
            }
            writer.write(StringUtils.join(header, "\t"));
        }
    });
    flatFileItemWriter.open(ec);
}

From source file:org.cbio.portal.pipelines.foundation.MutationDataWriter.java

@Override
public void open(ExecutionContext executionContext) throws ItemStreamException {
    // retrieve list of foundation cases from execution context
    final List<CaseType> fmiCaseList = (List<CaseType>) executionContext.get("fmiCaseList");

    String stagingFile = outputDirectory + "data_mutations_extended.txt";
    PassThroughLineAggregator aggr = new PassThroughLineAggregator();
    flatFileItemWriter.setLineAggregator(aggr);
    flatFileItemWriter.setHeaderCallback(new FlatFileHeaderCallback() {
        @Override/*www. j av  a 2 s  . com*/
        public void writeHeader(Writer writer) throws IOException {
            writer.write(getHeader(fmiCaseList));
        }
    });
    flatFileItemWriter.setResource(new FileSystemResource(stagingFile));
    flatFileItemWriter.open(executionContext);
}

From source file:org.springframework.batch.admin.sample.LeadRandomizer.java

@Test
public void testLeads() throws Exception {

    FlatFileItemReader<FieldSet> reader = new FlatFileItemReader<FieldSet>();
    reader.setResource(new ClassPathResource("/data/test.txt"));
    DefaultLineMapper<FieldSet> lineMapper = new DefaultLineMapper<FieldSet>();
    lineMapper.setLineTokenizer(new DelimitedLineTokenizer());
    lineMapper.setFieldSetMapper(new PassThroughFieldSetMapper());
    reader.setLinesToSkip(1);// www .ja  v  a 2 s .  c o  m
    final List<String> headers = new ArrayList<String>();
    reader.setSkippedLinesCallback(new LineCallbackHandler() {
        public void handleLine(String line) {
            headers.add(line);
        }
    });
    reader.setLineMapper(lineMapper);
    reader.open(new ExecutionContext());

    List<FieldSet> list = new ArrayList<FieldSet>();
    FieldSet item = reader.read();
    while (item != null) {
        list.add(item);
        item = reader.read();
    }
    assertEquals(7, list.size());

    FlatFileItemWriter<FieldSet> writer = new FlatFileItemWriter<FieldSet>();
    FileSystemResource resource = new FileSystemResource("target/output/output.txt");
    FileUtils.deleteQuietly(resource.getFile());
    writer.setResource(resource);
    writer.setHeaderCallback(new FlatFileHeaderCallback() {
        public void writeHeader(Writer writer) throws IOException {
            for (String header : headers) {
                writer.write(header);
            }
        }
    });
    writer.setLineAggregator(new DelimitedLineAggregator<FieldSet>());
    writer.open(new ExecutionContext());

    String[] names = getFields(list, 1);
    String[] country = getFields(list, 2);
    String[] products = getFields(list, 3);
    double[] amounts = getMinMax(list, 4);

    NumberFormat formatter = new DecimalFormat("#.##");
    int count = 20;
    for (int i = 0; i < 100; i++) {
        List<FieldSet> items = new ArrayList<FieldSet>();
        for (FieldSet fieldSet : list) {
            String[] values = fieldSet.getValues();
            values[0] = "" + (count++);
            values[1] = choose(names);
            values[2] = choose(country);
            values[3] = choose(products);
            values[4] = formatter.format(random(amounts));
            items.add(new DefaultFieldSet(values));
        }
        writer.write(items);
    }
    writer.close();

}