Example usage for org.apache.commons.csv CSVFormat DEFAULT

List of usage examples for org.apache.commons.csv CSVFormat DEFAULT

Introduction

In this page you can find the example usage for org.apache.commons.csv CSVFormat DEFAULT.

Prototype

CSVFormat DEFAULT

To view the source code for org.apache.commons.csv CSVFormat DEFAULT.

Click Source Link

Document

Standard comma separated format, as for #RFC4180 but allowing empty lines.

Usage

From source file:com.garethahealy.quotalimitsgenerator.cli.parsers.DefaultCLIParser.java

private Map<String, Pair<Integer, Integer>> parseLines(String instanceTypeCsv)
        throws IOException, URISyntaxException, ParseException {
    InputStreamReader inputStreamReader;
    if (instanceTypeCsv.equalsIgnoreCase("classpath")) {
        inputStreamReader = new InputStreamReader(
                getClass().getClassLoader().getResourceAsStream("instancetypes.csv"), Charset.forName("UTF-8"));
    } else {/*from   w ww.jav a2s .c o m*/
        URI uri = new URI(instanceTypeCsv);
        inputStreamReader = new InputStreamReader(new FileInputStream(new File(uri)), Charset.forName("UTF-8"));
    }

    CSVParser parser = null;
    List<CSVRecord> lines = null;
    try {
        parser = CSVFormat.DEFAULT.parse(new BufferedReader(inputStreamReader));
        lines = parser.getRecords();
    } finally {
        inputStreamReader.close();

        if (parser != null) {
            parser.close();
        }
    }

    if (lines == null || lines.size() <= 0) {
        throw new ParseException("instance-type-csv data is empty");
    }

    Map<String, Pair<Integer, Integer>> linesMap = new HashMap<String, Pair<Integer, Integer>>();
    for (CSVRecord current : lines) {
        linesMap.put(current.get(1), new ImmutablePair<Integer, Integer>(Integer.parseInt(current.get(2)),
                Integer.parseInt(current.get(3))));
    }

    return linesMap;
}

From source file:io.mindmaps.migration.csv.CSVDataMigratorTest.java

private CSVParser parser(String fileName) {
    File file = new File(CSVSchemaMigratorTest.class.getClassLoader().getResource(fileName).getPath());

    CSVParser csvParser = null;//from   ww  w.  ja v a 2  s .  c  om
    try {
        csvParser = CSVParser.parse(file.toURI().toURL(), StandardCharsets.UTF_8,
                CSVFormat.DEFAULT.withHeader());
    } catch (IOException e) {
        e.printStackTrace();
    }

    return csvParser;
}

From source file:ca.nrc.cadc.tap.db.AsciiTableData.java

/**
 * Constructor./*from   w w  w.  j a  v a2s.com*/
 * 
 * @param in The data stream
 * @param contentType The content type of the data
 * @throws IOException If a data handling error occurs
 */
public AsciiTableData(InputStream in, String contentType) throws IOException {
    char delimiter = ',';
    if (contentType.equals(TableContentHandler.CONTENT_TYPE_TSV)) {
        delimiter = '\t';
    }
    InputStreamReader ir = new InputStreamReader(in);

    if (TableContentHandler.CONTENT_TYPE_TSV.equals(contentType)) {
        this.reader = new CSVParser(ir, CSVFormat.TDF.withFirstRecordAsHeader());
    } else if (TableContentHandler.CONTENT_TYPE_CSV.equals(contentType)) {
        this.reader = new CSVParser(ir, CSVFormat.DEFAULT.withFirstRecordAsHeader());
    } else {
        throw new UnsupportedOperationException("contentType: " + contentType);
    }

    this.rowIterator = reader.iterator();
    Map<String, Integer> header = reader.getHeaderMap();
    columnNames = new ArrayList<String>(header.size());
    for (String s : header.keySet()) {
        columnNames.add(s.trim());
        log.debug("found column: " + s);
    }
    if (columnNames.isEmpty()) {
        throw new IllegalArgumentException("No data columns.");
    }
}

From source file:io.github.seiferma.jameica.hibiscus.dkb.creditcard.synchronize.csvparser.DKBCsvParser.java

private static CSVFormat createCsvFormat(String lineSeparator) {
    return CSVFormat.DEFAULT.withDelimiter(';').withRecordSeparator(lineSeparator);
}

From source file:com.advdb.footballclub.FootBallClub.java

private void createOpponent(Session session) {

    Transaction transaction = null;/*  w  w  w.  j a va2 s  .c  o m*/
    try {
        System.out.println("start createOpponent.");
        transaction = session.beginTransaction();
        Reader in = new FileReader("/Users/apichart/Documents/DW_opponent/DimOpponent-Table 1.csv");
        Iterable<CSVRecord> records = CSVFormat.DEFAULT.parse(in);
        for (CSVRecord record : records) {
            String longName = record.get(2);
            String shortName = record.get(3);
            DimOpponent d = new DimOpponent(longName, shortName);
            session.save(d);
        }
        in.close();
        session.flush();
        session.clear();
        transaction.commit();

        System.out.println("finish createOpponent.");
    } catch (HibernateException e) {
        if (transaction != null) {
            transaction.rollback();
        }
    } catch (FileNotFoundException ex) {
        Logger.getLogger(FootBallClub.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException ex) {
        Logger.getLogger(FootBallClub.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:com.streamsets.pipeline.lib.csv.TestCsvParser.java

@Test
public void testSkipLines() throws Exception {
    CsvParser parser = new CsvParser(
            new CountingReader(new StringReader("foo\nbar\r\na,b,c\naa,bb,cc\ne,f,g\n")),
            CSVFormat.DEFAULT.withHeader((String[]) null).withSkipHeaderRecord(false), -1, 0, 2);
    try {//from   w ww.  j a v a2s .  co  m
        Assert.assertEquals(9, parser.getReaderPosition());

        String[] record = parser.read();
        Assert.assertEquals(15, parser.getReaderPosition());
        Assert.assertNotNull(record);
        Assert.assertArrayEquals(new String[] { "a", "b", "c" }, record);
        record = parser.read();
        Assert.assertNotNull(record);
        Assert.assertArrayEquals(new String[] { "aa", "bb", "cc" }, record);
        Assert.assertEquals(24, parser.getReaderPosition());
        record = parser.read();
        Assert.assertNotNull(record);
        Assert.assertArrayEquals(new String[] { "e", "f", "g" }, record);
        Assert.assertEquals(30, parser.getReaderPosition());
        record = parser.read();
        Assert.assertNull(record);
        Assert.assertEquals(30, parser.getReaderPosition());
    } finally {
        parser.close();
    }
}

From source file:com.team3637.service.ScheduleServiceMySQLImpl.java

@Override
public void exportCSV(String outputFile) {
    List<Schedule> data = getSchedule();
    FileWriter fileWriter = null;
    CSVPrinter csvFilePrinter = null;/*from  w w  w  .  j a va2  s  .c  o m*/
    try {
        fileWriter = new FileWriter(outputFile);
        csvFilePrinter = new CSVPrinter(fileWriter, CSVFormat.DEFAULT.withRecordSeparator("\n"));
        for (int i = 0; i < data.size(); i++) {
            List<Object> line = new ArrayList<>();
            for (Field field : Schedule.class.getDeclaredFields()) {
                field.setAccessible(true);
                Object value = field.get(data.get(i));
                line.add(value);
            }
            csvFilePrinter.printRecord(line);
        }
    } catch (IOException | IllegalAccessException e) {
        e.printStackTrace();
    } finally {
        try {
            if (fileWriter != null) {
                fileWriter.flush();
                fileWriter.close();
            }
            if (csvFilePrinter != null) {
                csvFilePrinter.close();
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
}

From source file:co.cask.hydrator.plugin.CSVParser.java

@Override
public void initialize(TransformContext context) throws Exception {
    super.initialize(context);

    String csvFormatString = config.format.toLowerCase();
    switch (csvFormatString) {
    case "default":
        csvFormat = CSVFormat.DEFAULT;
        break;//w  ww  .  j ava 2s  .c o  m

    case "excel":
        csvFormat = CSVFormat.EXCEL;
        break;

    case "mysql":
        csvFormat = CSVFormat.MYSQL;
        break;

    case "rfc4180":
        csvFormat = CSVFormat.RFC4180;
        break;

    case "tdf":
        csvFormat = CSVFormat.TDF;
        break;

    case "pdl":
        csvFormat = PDL;
        break;

    default:
        throw new IllegalArgumentException(
                "Format {} specified is not one of the allowed format. Allowed formats are"
                        + "DEFAULT, EXCEL, MYSQL, RFC4180, PDL and TDF");
    }

    try {
        outSchema = Schema.parseJson(config.schema);
        fields = outSchema.getFields();
    } catch (IOException e) {
        throw new IllegalArgumentException("Format of schema specified is invalid. Please check the format.");
    }
}

From source file:com.streamsets.pipeline.lib.parser.delimited.TestDelimitedCharDataParser.java

@Test
public void testParseIgnoreHeaderWithListMap() throws Exception {
    OverrunReader reader = new OverrunReader(new StringReader("A,B\na,b"), 1000, true, false);
    DataParser parser = new DelimitedCharDataParser(getContext(), "id", reader, 0, 0, CSVFormat.DEFAULT,
            CsvHeader.IGNORE_HEADER, -1, CsvRecordType.LIST_MAP);
    Assert.assertEquals("4", parser.getOffset());
    Record record = parser.parse();// www  .  ja  va 2 s.com
    Assert.assertNotNull(record);
    Assert.assertEquals("id::4", record.getHeader().getSourceId());
    Assert.assertEquals("a", record.get().getValueAsListMap().get("0").getValueAsString());
    Assert.assertEquals("b", record.get().getValueAsListMap().get("1").getValueAsString());
    Assert.assertEquals("7", parser.getOffset());
    record = parser.parse();
    Assert.assertNull(record);
    Assert.assertEquals("-1", parser.getOffset());
    parser.close();
}

From source file:de.tudarmstadt.ukp.experiments.argumentation.sequence.annotator.OnlyFilesMatchingPredictionsReader.java

public static List<Sequence> extractSequences(File tokenLevelPredictionsCsvFile1) throws IOException {
    List<Sequence> result = new ArrayList<>();
    // load the CSV
    CSVParser csvParser = new CSVParser(new FileReader(tokenLevelPredictionsCsvFile1),
            CSVFormat.DEFAULT.withCommentMarker('#'));

    String prevSeqId = null;//from  w  ww  . j  av  a 2s.co m
    Sequence currentSequence = new Sequence();

    int tokenCounter = 0;

    for (CSVRecord csvRecord : csvParser) {
        // row for particular instance (token)
        String predictedTag = csvRecord.get(1);
        String token = csvRecord.get(2);
        String seqId = csvRecord.get(3);

        TokenEntry tokenEntry = new TokenEntry(token, predictedTag);

        // if the token belongs to the previous seqId, add it to the sequence
        if (prevSeqId == null || seqId.equals(prevSeqId)) {
            currentSequence.getTokens().add(tokenEntry);
        } else {
            // otherwise start a new sequence
            result.add(currentSequence);

            currentSequence = new Sequence();
            currentSequence.getTokens().add(tokenEntry);
        }

        prevSeqId = seqId;
        tokenCounter++;
    }

    // don't forget to add the last sequence
    result.add(currentSequence);

    System.out.println("Loaded " + result.size() + " sequences with total " + tokenCounter + " tokens.");

    return result;
}