Example usage for org.apache.commons.csv CSVFormat DEFAULT

List of usage examples for org.apache.commons.csv CSVFormat DEFAULT

Introduction

In this page you can find the example usage for org.apache.commons.csv CSVFormat DEFAULT.

Prototype

CSVFormat DEFAULT

To view the source code for org.apache.commons.csv CSVFormat DEFAULT.

Click Source Link

Document

Standard comma separated format, as for #RFC4180 but allowing empty lines.

Usage

From source file:org.apache.phoenix.pherf.util.GoogleChartGenerator.java

/**
 * Reads aggregate file and convert it to DataNode 
 * @param label/* w  w  w. j av  a2 s .c o  m*/
 * @throws Exception
 */
private void read(String label) throws Exception {
    String resultFileName = resultDir + PherfConstants.PATH_SEPARATOR + PherfConstants.RESULT_PREFIX + label
            + ResultFileDetails.CSV_AGGREGATE_PERFORMANCE.getExtension();

    FileReader in = new FileReader(resultFileName);
    final CSVParser parser = new CSVParser(in, CSVFormat.DEFAULT.withHeader());

    for (CSVRecord record : parser) {
        String group = record.get("QUERY_GROUP");
        String query = record.get("QUERY");
        String explain = record.get("EXPLAIN_PLAN");
        String tenantId = record.get("TENANT_ID");
        long avgTime = Long.parseLong(record.get("AVG_TIME_MS"));
        long minTime = Long.parseLong(record.get("AVG_MIN_TIME_MS"));
        long numRuns = Long.parseLong(record.get("RUN_COUNT"));
        long rowCount = Long.parseLong(record.get("RESULT_ROW_COUNT"));
        Node node = new Node(minTime, avgTime, numRuns, explain, query, tenantId, label, rowCount);

        if (datanodes.containsKey(group)) {
            datanodes.get(group).getDataSet().put(label, node);
        } else {
            datanodes.put(group, new DataNode(label, node));
        }
    }
    parser.close();
}

From source file:org.apache.phoenix.util.csv.CsvUpsertExecutorTest.java

private CSVRecord createCsvRecord(String... columnValues) throws IOException {
    String inputRecord = Joiner.on(',').join(columnValues);
    return Iterables.getFirst(CSVParser.parse(inputRecord, CSVFormat.DEFAULT), null);
}

From source file:org.apache.phoenix.util.CSVCommonsLoader.java

/**
 * default settings/* www.  j a v a  2  s . co  m*/
 * delimiter = ','
 * quoteChar = '"',
 * escape = null
 * recordSeparator = CRLF, CR, or LF
 * ignore empty lines allows the last data line to have a recordSeparator
 *
 * @return CSVFormat based on constructor settings.
 */
private CSVFormat buildFormat() {
    CSVFormat format = CSVFormat.DEFAULT.withIgnoreEmptyLines(true)
            .withDelimiter(asControlCharacter(fieldDelimiter)).withQuote(asControlCharacter(quoteCharacter));

    if (escapeCharacter != null) {
        format = format.withEscape(asControlCharacter(escapeCharacter));
    }

    switch (headerSource) {
    case FROM_TABLE:
        // obtain headers from table, so format should not expect a header.
        break;
    case IN_LINE:
        // an empty string array triggers csv loader to grab the first line as the header
        format = format.withHeader(new String[0]);
        break;
    case SUPPLIED_BY_USER:
        // a populated string array supplied by the user
        format = format.withHeader(columns.toArray(new String[columns.size()]));
        break;
    default:
        throw new RuntimeException("Header source was unable to be inferred.");

    }
    return format;
}

From source file:org.commonvox.hbase_column_manager.ChangeEventMonitor.java

static void exportChangeEventListToCsvFile(Collection<ChangeEvent> changeEvents, File targetFile,
        String headerDetail) throws IOException {
    CSVFormat csvFormat = CSVFormat.DEFAULT.withRecordSeparator("\n").withCommentMarker('#')
            .withHeader(ReportHeader.class);
    try (CSVPrinter csvPrinter = csvFormat.withHeaderComments("List of ChangeEvents in "
            + Repository.PRODUCT_NAME + " repository " + headerDetail + "-- Exported to CSV by "
            + Repository.PRODUCT_NAME + ":" + ChangeEventMonitor.class.getSimpleName(), new Date())
            .print(new FileWriter(targetFile))) {
        if (changeEvents == null) {
            return;
        }/*w w  w . j a v  a 2s  .  c  om*/
        for (ChangeEvent ce : changeEvents) {
            csvPrinter.print(ce.getTimestampAsString());
            csvPrinter.print(ce.getUserNameAsString());
            csvPrinter.print(ce.getEntityType().toString());
            csvPrinter.print(ce.getNamespaceAsString());
            csvPrinter.print(ce.getTableNameAsString());
            csvPrinter.print(ce.getColumnFamilyAsString());
            csvPrinter.print(ce.getColumnQualifierAsString());
            csvPrinter.print(ce.getAttributeNameAsString());
            csvPrinter.print(ce.getAttributeValueAsString());
            csvPrinter.println();
        }
    }
}

From source file:org.cricketmsf.in.http.CsvFormatter.java

public String format(List list) {
    StringBuilder sb = new StringBuilder();
    try {//  w  ww .j a  v a 2s .  c  o  m
        CSVPrinter printer = new CSVPrinter(sb, CSVFormat.DEFAULT);
        if (list.size() > 0) {
            printer.printRecord((List) list.get(0));
            for (int i = 1; i < list.size(); i++) {
                printer.printRecord((List) list.get(i));
            }
        }
    } catch (IOException e) {
        sb.append(e.getMessage());
    }
    return sb.toString();
}

From source file:org.cricketmsf.in.http.CsvFormatter.java

public String format(Map data) {
    StringBuilder sb = new StringBuilder();
    try {/*  w ww .  j a va2s.  c  o  m*/
        CSVPrinter printer = new CSVPrinter(sb, CSVFormat.DEFAULT);
        printer.printRecord(data.keySet());
        printer.printRecord(data.values());
    } catch (IOException e) {
        sb.append(e.getMessage());
    }
    return sb.toString();
}

From source file:org.dishevelled.bio.tools.EnsemblVariantTableToVcf.java

@Override
public Integer call() throws Exception {
    BufferedReader reader = null;
    PrintWriter writer = null;//  w  ww .  j  a v  a  2s .c  om
    try {
        reader = reader(inputEnsemblVariantTableFile);
        writer = writer(outputVcfFile);

        VcfHeader header = VcfHeader.builder().withFileFormat("VCFv4.2").withMeta("##fileformat=VCFv4.2")
                .build();
        VcfWriter.writeHeader(header, writer);

        List<VcfSample> samples = Collections.emptyList();
        VcfWriter.writeColumnHeader(samples, writer);

        VcfRecord.Builder builder = VcfRecord.builder().withFilter("PASS").withFormat("");

        for (CSVRecord record : CSVFormat.DEFAULT.withHeader().parse(reader)) {
            String variantId = record.get("Variant ID");
            String[] location = record.get("Location").split(":");
            String[] alleles = record.get("Alleles").replace("-", ".").split("/");

            if (alleles.length != 2) {
                // log warning, found e.g. COSMIC_MUTATION, HGMD_MUTATION, or CCC/-/CC, G/A/C/T
                continue;
            }
            VcfRecord vcfRecord = builder.withChrom(location[0]).withPos(Long.parseLong(location[1]))
                    .withRef(alleles[0]).withAlt(alleles[1]).withId(variantId).build();

            VcfWriter.writeRecord(samples, vcfRecord, writer);
        }
        return 0;
    } finally {
        try {
            reader.close();
        } catch (Exception e) {
            // ignore
        }
        try {
            writer.close();
        } catch (Exception e) {
            // ignore
        }
    }
}

From source file:org.easybatch.extensions.apache.common.csv.ApacheCommonCsvBatchReaderTest.java

@Before
public void setUp() throws Exception {
    CSVFormat csvFormat = CSVFormat.DEFAULT;
    CSVParser parser = new CSVParser(new FileReader(this.getClass().getResource("/tweets.csv").getFile()),
            csvFormat);//ww  w. ja  v a 2 s  .  co m
    apacheCommonCsvBatchReader = new ApacheCommonCsvBatchReader(parser, BATCH_SIZE);
}

From source file:org.easybatch.extensions.apache.common.csv.ApacheCommonCsvRecordMapperTest.java

@Test
public void testApacheCommonCsvMapping() throws Exception {
    StringReader stringReader = new StringReader("foo,bar,15,true");
    CSVFormat csvFormat = CSVFormat.DEFAULT.withHeader("firstName", "lastName", "age", "married");
    ApacheCommonCsvRecord record = getApacheCommonCsvRecord(stringReader, csvFormat);

    GenericRecord<Foo> actual = mapper.processRecord(record);
    Foo foo = actual.getPayload();//  ww  w .  j a va  2  s  .  c o  m

    assertThat(foo).isNotNull();
    assertThat(foo.getFirstName()).isEqualTo("foo");
    assertThat(foo.getLastName()).isEqualTo("bar");
    assertThat(foo.getAge()).isEqualTo(15);
    assertThat(foo.isMarried()).isTrue();
}

From source file:org.easybatch.extensions.apache.common.csv.ApacheCommonCsvRecordMapperTest.java

@Test
public void testApacheCommonCsvDelimiter() throws Exception {
    StringReader stringReader = new StringReader("foo;bar;15;true");
    CSVFormat csvFormat = CSVFormat.DEFAULT.withDelimiter(';').withHeader("firstName", "lastName", "age",
            "married");
    ApacheCommonCsvRecord record = getApacheCommonCsvRecord(stringReader, csvFormat);

    GenericRecord<Foo> actual = mapper.processRecord(record);
    Foo foo = actual.getPayload();//from ww w. j  a  v a  2 s  .c om

    assertThat(foo).isNotNull();
    assertThat(foo.getFirstName()).isEqualTo("foo");
    assertThat(foo.getLastName()).isEqualTo("bar");
    assertThat(foo.getAge()).isEqualTo(15);
    assertThat(foo.isMarried()).isTrue();
}