Example usage for com.fasterxml.jackson.dataformat.csv CsvMapper CsvMapper

List of usage examples for com.fasterxml.jackson.dataformat.csv CsvMapper CsvMapper

Introduction

In this page you can find the example usage for com.fasterxml.jackson.dataformat.csv CsvMapper CsvMapper.

Prototype

public CsvMapper() 

Source Link

Usage

From source file:hrytsenko.gscripts.io.CsvFiles.java

/**
 * Save records into file.//from w  w w  .j a  v a 2 s  . c  om
 * 
 * <p>
 * If file already exists, then it will be overridden.
 * 
 * @param records
 *            the list of records to save.
 * @param args
 *            the named arguments.
 * 
 * @throws IOException
 *             if file could not be saved.
 */
public static void saveCsv(List<Map<String, ?>> records, Map<String, ?> args) {
    if (records.isEmpty()) {
        LOGGER.info("No records to save.");
        return;
    }

    Path path = NamedArgs.findPath(args);
    LOGGER.info("Save {}.", path.getFileName());

    CsvSchema.Builder csvSchema = schemaFrom(args).setUseHeader(true);
    Records.columns(records).forEach(csvSchema::addColumn);

    try (Writer writer = Files.newBufferedWriter(path, charsetFrom(args))) {
        CsvMapper csvMapper = new CsvMapper();
        csvMapper.configure(CsvGenerator.Feature.ALWAYS_QUOTE_STRINGS, true);

        csvMapper.writer().with(csvSchema.build()).writeValue(writer, Records.normalize(records));
    } catch (IOException exception) {
        throw new AppException(String.format("Could not save file %s.", path.getFileName()), exception);
    }
}

From source file:ro.fortsoft.dataset.csv.CsvDataSet.java

protected CsvMapper createCsvMapper() {
    CsvMapper mapper = new CsvMapper();
    //        mapper.enable(CsvParser.Feature.WRAP_AS_ARRAY);

    return mapper;
}

From source file:ed.cracken.code.SimpleTestExp1.java

public static List<Map<?, ?>> readObjectsFromCsv(String file) throws IOException {
    CsvSchema bootstrap = CsvSchema.emptySchema().withHeader();
    CsvMapper csvMapper = new CsvMapper();
    MappingIterator<Map<?, ?>> mappingIterator = csvMapper.reader(Map.class).with(bootstrap).readValues(file);

    return mappingIterator.readAll();
}

From source file:com.fortify.processrunner.file.ProcessorFileSubmitIssueForVulnerabilities.java

@Override
protected boolean processMaps(Context context, String groupName, List<Object> currentGroup,
        List<LinkedHashMap<String, Object>> listOfMaps) {
    CsvSchema.Builder schemaBuilder = CsvSchema.builder();
    for (String col : getFields().keySet()) {
        schemaBuilder.addColumn(col);//  www  . j  a  va  2s . c om
    }
    CsvSchema schema = schemaBuilder.build().withHeader();
    try {
        new CsvMapper().writer(schema).writeValue(new File(groupName), listOfMaps);
    } catch (Exception e) {
        throw new RuntimeException("Error writing data to file " + groupName, e);
    }
    LOG.info(String.format("[File] Submitted %d vulnerabilities to %s", currentGroup.size(), groupName));
    return true;
}

From source file:edu.cmu.cs.lti.discoursedb.io.mturk.converter.MturkConverter.java

License:asdf

private Iterable<Map<String, String>> csvIteratorNoHeaders(String filename, String fieldlist)
        throws JsonProcessingException, IOException {
    InputStream in = new FileInputStream(filename);
    MappingIterator<Map<String, String>> iterator = new CsvMapper().readerFor(Map.class)
            .with(mkCsvSchema(fieldlist).build()).readValues(in);
    return () -> iterator;
}

From source file:eu.scape_project.cdx_creator.CDXCreationTask.java

public void createIndex() {
    FileInputStream fileInputStream = null;
    ArchiveReader reader = null;//from w  w w  .  ja  va  2s.co  m
    FileOutputStream outputStream = null;
    try {
        fileInputStream = new FileInputStream(archiveFile);
        reader = ArchiveReaderFactory.getReader(fileInputStream, this.archiveFileName);
        reader.setComputePayloadDigest(config.isCreatePayloadDigest());
        List<CdxArchiveRecord> cdxArchRecords = new ArrayList<CdxArchiveRecord>();
        while (reader.hasNext()) {
            ArchiveRecord archRec = (ArchiveRecord) reader.next();
            CdxArchiveRecord cdxArchRec = CdxArchiveRecord.fromArchiveRecord(archRec);
            cdxArchRec.setContainerFileName(archiveFileName);
            cdxArchRec.setContainerLengthStr(Long.toString(archiveFile.length()));
            cdxArchRecords.add(cdxArchRec);
        }

        CsvMapper mapper = new CsvMapper();
        mapper.setDateFormat(GMTGTechDateFormat);

        String cdxfileCsColumns = config.getCdxfileCsColumns();
        List<String> cdxfileCsColumnsList = Arrays.asList(cdxfileCsColumns.split("\\s*,\\s*"));
        String[] cdxfileCsColumnsArray = cdxfileCsColumnsList.toArray(new String[cdxfileCsColumnsList.size()]);

        CsvSchema.Builder builder = CsvSchema.builder();
        for (String cdxField : cdxfileCsColumnsList) {
            builder.addColumn(cdxField);
        }
        builder.setColumnSeparator(' ');
        CsvSchema schema = builder.build();
        schema = schema.withoutQuoteChar();

        SimpleFilterProvider filterProvider = new SimpleFilterProvider().addFilter("cdxfields",
                FilterExceptFilter.filterOutAllExcept(cdxfileCsColumnsArray));

        ObjectWriter cdxArchRecordsWriter = mapper.writer(filterProvider).withSchema(schema);

        PrintStream pout = null;
        String outputPathStr = config.getOutputStr();
        if (outputPathStr != null) {
            FileOutputStream fos;
            try {
                fos = new FileOutputStream(outputPathStr, true);
                pout = new PrintStream(fos);
                System.setOut(pout);
            } catch (FileNotFoundException ex) {
                LOG.error("File not found error", ex);
            }
        }
        System.out.println(" " + config.getCdxfileCsHeader());

        cdxArchRecordsWriter.writeValue(System.out, cdxArchRecords);

        if (pout != null) {
            pout.close();
        }

    } catch (FileNotFoundException ex) {
        LOG.error("File not found error", ex);
    } catch (IOException ex) {
        LOG.error("I/O Error", ex);
    } finally {
        try {
            if (fileInputStream != null) {
                fileInputStream.close();
            }

            if (outputStream != null) {
                outputStream.close();
            }

        } catch (IOException ex) {
            LOG.error("I/O Error", ex);
        }
    }
}

From source file:datadidit.helpful.hints.processors.csv.converter.ConvertCSVToJSON.java

@Override
protected void init(final ProcessorInitializationContext context) {
    final List<PropertyDescriptor> descriptors = new ArrayList<PropertyDescriptor>();
    descriptors.add(HEADER);//w  w w  .  ja  v a2 s  .  co m
    descriptors.add(FIELD_NAMES);
    this.descriptors = Collections.unmodifiableList(descriptors);

    final Set<Relationship> relationships = new HashSet<Relationship>();
    relationships.add(REL_SUCCESS);
    relationships.add(REL_FAILURE);
    this.relationships = Collections.unmodifiableSet(relationships);

    csvMapper = new CsvMapper();
}

From source file:hrytsenko.csv.IO.java

/**
 * Saves records into CSV file.//from   w  w w .j  ava 2 s  . c o  m
 * 
 * <p>
 * If file already exists, then it will be overridden.
 * 
 * @param args
 *            the named arguments {@link IO}.
 * 
 * @throws IOException
 *             if file could not be written.
 */
public static void save(Map<String, ?> args) throws IOException {
    Path path = getPath(args);
    LOGGER.info("Save: {}.", path.getFileName());

    @SuppressWarnings("unchecked")
    Collection<Record> records = (Collection<Record>) args.get("records");
    if (records.isEmpty()) {
        LOGGER.info("No records to save.");
        return;
    }

    try (Writer dataWriter = newBufferedWriter(path, getCharset(args), StandardOpenOption.CREATE,
            StandardOpenOption.TRUNCATE_EXISTING)) {
        Set<String> columns = new LinkedHashSet<>();
        List<Map<String, String>> rows = new ArrayList<>();
        for (Record record : records) {
            Map<String, String> values = record.values();
            columns.addAll(values.keySet());
            rows.add(values);
        }

        CsvSchema.Builder csvSchema = getSchema(args).setUseHeader(true);
        for (String column : columns) {
            csvSchema.addColumn(column);
        }
        CsvMapper csvMapper = new CsvMapper();
        ObjectWriter csvWriter = csvMapper.writer().withSchema(csvSchema.build());
        csvWriter.writeValue(dataWriter, rows);
    }
}

From source file:edu.cmu.cs.lti.discoursedb.io.mturk.converter.MturkConverter.java

License:asdf

private Iterable<Map<String, String>> csvIteratorExistingHeaders(String filename)
        throws JsonProcessingException, IOException {
    //InputStream in = new FileInputStream(filename, "UTF-8");
    InputStreamReader in = new InputStreamReader(new FileInputStream(filename), "ISO-8859-1");
    MappingIterator<Map<String, String>> iterator = new CsvMapper().readerFor(Map.class)
            .with(CsvSchema.emptySchema().withColumnSeparator(',').withHeader()).readValues(in);
    return () -> iterator;
}

From source file:edu.cmu.cs.lti.discoursedb.io.bazaar.converter.BazaarConverter.java

private void convert(String messageFileDir, String roomFileDir, String agentname)
        throws ParseException, IOException {

    Map<String, String> roomIdNameMap = new HashMap<>();
    List<String> messages = new ArrayList<>();

    //Read input file and preprocess
    String lineFragment = null;/*from   ww  w . j a  v  a2  s.  c o m*/
    for (String line : FileUtils.readLines(new File(messageFileDir))) {
        //line fragments occur in case we have line feeds in a column
        if (lineFragment != null) {
            line = lineFragment + line;
            lineFragment = null;
        }
        if (line.endsWith("\\") || line.endsWith("\\\r\f")) {
            line = line.replaceAll("\\\r\f", "");
            lineFragment = line;
        } else {
            if (line.contains("\\\"We're Ready\\\"")) {
                line = line.replaceAll("\"We're Ready\\\\\"", "We're Ready\\\\");
            }
            if (line.contains("\\\"ready\\\"")) {
                line = line.replaceAll("\\\\\"ready\\\\\"", "\\\\ready\\\\");
            }
            if (line.contains("\\\"" + agentname + "\\\"")) {
                line = line.replaceAll("\\\\\"" + agentname + "\\\\\"", "\\\\" + agentname + "\\\\");
            }
            messages.add(line);
        }
    }

    // Phase 1: read through input room file once and map all entities
    try (InputStream in = new FileInputStream(roomFileDir)) {
        CsvMapper mapper = new CsvMapper();
        CsvSchema schema = mapper.schemaFor(Room.class).withColumnSeparator(',');
        MappingIterator<Room> rIter = mapper.readerFor(Room.class).with(schema).readValues(in);
        while (rIter.hasNextValue()) {
            Room r = rIter.next();
            if (!roomIdNameMap.containsKey(r.getId()))
                roomIdNameMap.put(r.getId(), r.getName());
            converterService.mapRoom(r, dataSetName, discourseName);
        }
    } catch (IOException e) {
        log.error("Error reading room file", e);
    }

    // Phase 2: read through input message file and map relationships between room and message
    CsvMapper mapper = new CsvMapper();
    CsvSchema schema = mapper.schemaFor(Message.class).withColumnSeparator(',');
    for (String message : messages) {
        Message m = mapper.readerFor(Message.class).with(schema).readValue(message);
        if (m.getType().equals("text") || m.getType().equals("image") || m.getType().equals("private")) {
            converterService.mapMessage(m, dataSetName, discourseName, roomIdNameMap);
        } else {
            converterService.mapInteraction(m, dataSetName, discourseName, roomIdNameMap);
        }
    }
}