Example usage for com.fasterxml.jackson.dataformat.csv CsvMapper writer

List of usage examples for com.fasterxml.jackson.dataformat.csv CsvMapper writer

Introduction

In this page you can find the example usage for com.fasterxml.jackson.dataformat.csv CsvMapper writer.

Prototype

public ObjectWriter writer(SerializationFeature feature) 

Source Link

Document

Factory method for constructing ObjectWriter with specified feature enabled (compared to settings that this mapper instance has).

Usage

From source file:net.arp7.HdfsPerfTest.WriteFile.java

private static void writeCsvResult(final FileIoStats stats) {
    if (params.getResultCsvFile() == null) {
        return;/*from   w w  w.jav a  2 s .c  o  m*/
    }

    final Object[] results = new Object[] { new Date().toGMTString(), params.getNumFiles(),
            params.getNumThreads(), params.getReplication(), params.getBlockSize(), params.getIoSize(),
            stats.getFilesWritten(), stats.getBytesWritten(), stats.getMeanCreateTimeMs(),
            stats.getMeanWriteTimeMs(), stats.getMeanCloseTimeMs(), stats.getElapsedTimeMs(),
            (params.getFileSize() * 1000) / stats.getElapsedTimeMs(),
            (params.getNumFiles() * params.getFileSize() * 1000) / stats.getElapsedTimeMs(), params.getNote() };

    final CsvSchema schema = CsvSchema.builder().setColumnSeparator(';').setQuoteChar('"')
            .setUseHeader(!params.getResultCsvFile().exists())
            .addColumn("timestamp", CsvSchema.ColumnType.STRING)
            .addColumn("number of files", CsvSchema.ColumnType.NUMBER)
            .addColumn("number of threads", CsvSchema.ColumnType.NUMBER)
            .addColumn("replication factor", CsvSchema.ColumnType.NUMBER)
            .addColumn("block size", CsvSchema.ColumnType.NUMBER)
            .addColumn("io size", CsvSchema.ColumnType.NUMBER)
            .addColumn("total files written", CsvSchema.ColumnType.NUMBER)
            .addColumn("total bytes written", CsvSchema.ColumnType.NUMBER)
            .addColumn("mean time to create file in ms", CsvSchema.ColumnType.NUMBER)
            .addColumn("mean time to write file in ms", CsvSchema.ColumnType.NUMBER)
            .addColumn("mean time to close file in ms", CsvSchema.ColumnType.NUMBER)
            .addColumn("total ms", CsvSchema.ColumnType.NUMBER)
            .addColumn("mean throughput bytes per s", CsvSchema.ColumnType.NUMBER)
            .addColumn("aggregate throughput bytes per s", CsvSchema.ColumnType.NUMBER)
            .addColumn("note", CsvSchema.ColumnType.STRING).build();

    try (FileWriter fileWriter = new FileWriter(params.getResultCsvFile(), true)) {
        final CsvMapper mapper = new CsvMapper();
        final ObjectWriter writer = mapper.writer(schema);
        writer.writeValue(fileWriter, results);
    } catch (IOException e) {
        LOG.error("Could not write results to CSV file '{}': '{}'", params.getResultCsvFile().getPath(),
                e.getMessage());
    }
}

From source file:fi.helsinki.opintoni.config.http.converter.CsvHttpMessageConverter.java

private String toCsvRow(CsvMapper csvMapper, CsvSchema csvSchema, Object entry) {
    try {/*from  w w w . j  av a 2s .  c o m*/
        return csvMapper.writer(csvSchema).writeValueAsString(entry);
    } catch (JsonProcessingException e) {
        throw new RuntimeException(e);
    }
}

From source file:io.github.binout.jaxrs.csv.CsvMessageBodyProvider.java

@Override
public void writeTo(Object o, Class<?> aClass, Type type, Annotation[] annotations, MediaType mediaType,
        MultivaluedMap<String, Object> multivaluedMap, OutputStream outputStream)
        throws IOException, WebApplicationException {
    CsvMapper mapper = new CsvMapper();
    String body = objectClass(o, aClass).map(csvClass -> {
        CsvSchema schema = CsvSchemaFactory.buildSchema(mapper, csvClass);
        try {// ww w  . j  av a2  s . c o  m
            return mapper.writer(schema).writeValueAsString(o);
        } catch (JsonProcessingException e) {
            throw new RuntimeException(e);
        }
    }).orElse("");
    outputStream.write(body.getBytes(StandardCharsets.UTF_8));
}

From source file:org.restlet.ext.jackson.JacksonRepresentation.java

/**
 * Creates a Jackson object writer based on a mapper. Has a special handling
 * for CSV media types./*www.  j av a2s. co m*/
 * 
 * @return The Jackson object writer.
 */
protected ObjectWriter createObjectWriter() {
    ObjectWriter result = null;

    if (MediaType.TEXT_CSV.isCompatible(getMediaType())) {
        CsvMapper csvMapper = (CsvMapper) getObjectMapper();
        CsvSchema csvSchema = createCsvSchema(csvMapper);
        result = csvMapper.writer(csvSchema);
    } else {
        result = getObjectMapper().writerFor(getObjectClass());
    }

    return result;
}

From source file:eu.scape_project.cdx_creator.CDXCreationTask.java

public void createIndex() {
    FileInputStream fileInputStream = null;
    ArchiveReader reader = null;//from  ww w . jav a 2 s  . co m
    FileOutputStream outputStream = null;
    try {
        fileInputStream = new FileInputStream(archiveFile);
        reader = ArchiveReaderFactory.getReader(fileInputStream, this.archiveFileName);
        reader.setComputePayloadDigest(config.isCreatePayloadDigest());
        List<CdxArchiveRecord> cdxArchRecords = new ArrayList<CdxArchiveRecord>();
        while (reader.hasNext()) {
            ArchiveRecord archRec = (ArchiveRecord) reader.next();
            CdxArchiveRecord cdxArchRec = CdxArchiveRecord.fromArchiveRecord(archRec);
            cdxArchRec.setContainerFileName(archiveFileName);
            cdxArchRec.setContainerLengthStr(Long.toString(archiveFile.length()));
            cdxArchRecords.add(cdxArchRec);
        }

        CsvMapper mapper = new CsvMapper();
        mapper.setDateFormat(GMTGTechDateFormat);

        String cdxfileCsColumns = config.getCdxfileCsColumns();
        List<String> cdxfileCsColumnsList = Arrays.asList(cdxfileCsColumns.split("\\s*,\\s*"));
        String[] cdxfileCsColumnsArray = cdxfileCsColumnsList.toArray(new String[cdxfileCsColumnsList.size()]);

        CsvSchema.Builder builder = CsvSchema.builder();
        for (String cdxField : cdxfileCsColumnsList) {
            builder.addColumn(cdxField);
        }
        builder.setColumnSeparator(' ');
        CsvSchema schema = builder.build();
        schema = schema.withoutQuoteChar();

        SimpleFilterProvider filterProvider = new SimpleFilterProvider().addFilter("cdxfields",
                FilterExceptFilter.filterOutAllExcept(cdxfileCsColumnsArray));

        ObjectWriter cdxArchRecordsWriter = mapper.writer(filterProvider).withSchema(schema);

        PrintStream pout = null;
        String outputPathStr = config.getOutputStr();
        if (outputPathStr != null) {
            FileOutputStream fos;
            try {
                fos = new FileOutputStream(outputPathStr, true);
                pout = new PrintStream(fos);
                System.setOut(pout);
            } catch (FileNotFoundException ex) {
                LOG.error("File not found error", ex);
            }
        }
        System.out.println(" " + config.getCdxfileCsHeader());

        cdxArchRecordsWriter.writeValue(System.out, cdxArchRecords);

        if (pout != null) {
            pout.close();
        }

    } catch (FileNotFoundException ex) {
        LOG.error("File not found error", ex);
    } catch (IOException ex) {
        LOG.error("I/O Error", ex);
    } finally {
        try {
            if (fileInputStream != null) {
                fileInputStream.close();
            }

            if (outputStream != null) {
                outputStream.close();
            }

        } catch (IOException ex) {
            LOG.error("I/O Error", ex);
        }
    }
}