Example usage for org.apache.commons.csv CSVPrinter print

List of usage examples for org.apache.commons.csv CSVPrinter print

Introduction

In this page you can find the example usage for org.apache.commons.csv CSVPrinter print.

Prototype

public void print(final Object value) throws IOException 

Source Link

Document

Prints the string as the next value on the line.

Usage

From source file:com.chargebee.MethodBank.MethodBank.java

public static void print(CSVPrinter printer, String[] s) throws Exception {
    for (String val : s) {
        printer.print(val != null ? val : "");
    }//from  ww w  . ja  v a  2 s.co m
    printer.println();
}

From source file:edu.caltech.ipac.firefly.server.util.DsvToDataGroup.java

public static void write(Writer writer, DataGroup data, CSVFormat format) throws IOException {

    BufferedWriter outf = new BufferedWriter(writer, IpacTableUtil.FILE_IO_BUFFER_SIZE);
    try {//www  . jav  a2 s . c om
        CSVPrinter printer = new CSVPrinter(outf, format);

        if (data != null && data.size() > 0) {
            for (DataType t : data.getDataDefinitions()) {
                printer.print(t.getKeyName());
            }
            printer.println();

            for (DataObject row : data.values()) {
                for (String s : row.getFormatedData()) {
                    printer.print(s.trim());
                }
                printer.println();
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        if (outf != null) {
            outf.close();
        }
    }
}

From source file:de.tudarmstadt.ukp.experiments.argumentation.sequence.significance.SignificanceMain.java

/**
 * Prints table to output string as CSV/* w  ww. j  a v a 2  s.c  o  m*/
 *
 * @param out   output
 * @param <T>   value type
 * @param table table
 * @throws IOException
 */
public static <T> String tableToCsv(Table<String, String, Boolean> table) throws IOException {
    StringWriter sw = new StringWriter();
    CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);

    List<String> firstRow = new ArrayList<>();
    firstRow.add(" ");
    firstRow.addAll(table.columnKeySet());
    printer.printRecord(firstRow);

    for (String rowKey : table.rowKeySet()) {
        printer.print(rowKey);
        for (String columnKey : table.columnKeySet()) {
            printer.print(table.get(rowKey, columnKey));
        }
        printer.println();
    }

    printer.close();

    return sw.toString();
}

From source file:de.tudarmstadt.ukp.experiments.argumentation.sequence.evaluation.helpers.FinalTableExtractor.java

public static <T> String tableToCsv(Table<String, String, T> table) throws IOException {
    StringWriter sw = new StringWriter();
    CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);

    List<String> firstRow = new ArrayList<>();
    firstRow.add(" ");
    firstRow.addAll(table.columnKeySet());
    printer.printRecord(firstRow);/*from w ww .  j  a  v a2  s .co m*/

    for (String rowKey : table.rowKeySet()) {
        printer.print(rowKey);
        for (String columnKey : table.columnKeySet()) {
            printer.print(table.get(rowKey, columnKey));
        }
        printer.println();
    }

    printer.close();

    return sw.toString();
}

From source file:biz.webgate.dominoext.poi.component.kernel.simpleviewexport.CSVExportProcessor.java

public void process2HTTP(ExportModel expModel, UISimpleViewExport uis, HttpServletResponse hsr,
        DateTimeHelper dth) {/*from ww  w. ja  va 2 s.  c  o  m*/
    try {
        ByteArrayOutputStream csvBAOS = new ByteArrayOutputStream();
        OutputStreamWriter csvWriter = new OutputStreamWriter(csvBAOS);
        CSVPrinter csvPrinter = new CSVPrinter(csvWriter, CSVFormat.DEFAULT);

        // BUILDING HEADER
        if (uis.isIncludeHeader()) {
            for (ExportColumn expColumn : expModel.getColumns()) {
                csvPrinter.print(expColumn.getColumnName());
            }
            csvPrinter.println();
        }
        // Processing Values
        for (ExportDataRow expRow : expModel.getRows()) {
            for (ExportColumn expColumn : expModel.getColumns()) {
                csvPrinter.print(convertValue(expRow.getValue(expColumn.getPosition()), expColumn, dth));
            }
            csvPrinter.println();
        }
        csvPrinter.flush();

        hsr.setContentType("text/csv");
        hsr.setHeader("Cache-Control", "no-cache");
        hsr.setDateHeader("Expires", -1);
        hsr.setContentLength(csvBAOS.size());
        hsr.addHeader("Content-disposition", "inline; filename=\"" + uis.getDownloadFileName() + "\"");
        OutputStream os = hsr.getOutputStream();
        csvBAOS.writeTo(os);
        os.close();
    } catch (Exception e) {
        ErrorPageBuilder.getInstance().processError(hsr, "Error during SVE-Generation (CSV Export)", e);
    }
}

From source file:edu.unc.lib.dl.admin.controller.ExportController.java

private void printObject(CSVPrinter printer, BriefObjectMetadata object) throws IOException {

    // Vitals: object type, pid, title, path, label, depth

    printer.print(object.getResourceType());
    printer.print(object.getPid());/*from w  w w . j  a v  a 2 s  . c  om*/
    printer.print(object.getTitle());
    printer.print(object.getAncestorNames());

    String label = object.getLabel();

    if (label != null) {
        printer.print(label);
    } else {
        printer.print("");
    }

    printer.print(object.getAncestorPathFacet().getHighestTier());

    // Status: deleted

    printer.print(new Boolean(
            object.getStatus().contains("Deleted") || object.getStatus().contains("Parent Deleted")));

    // Dates: added, updated

    Date added = object.getDateAdded();

    if (added != null) {
        printer.print(dateFormat.format(added));
    } else {
        printer.print("");
    }

    Date updated = object.getDateUpdated();

    if (updated != null) {
        printer.print(dateFormat.format(updated));
    } else {
        printer.print("");
    }

    // DATA_FILE info: mime type, checksum, file size

    Datastream dataFileDatastream = null;

    if (ResourceType.File.equals(object.getResourceType())) {
        dataFileDatastream = object.getDatastreamObject(ContentModelHelper.Datastream.DATA_FILE.toString());
    }

    if (dataFileDatastream != null) {
        printer.print(dataFileDatastream.getMimetype());
        printer.print(dataFileDatastream.getChecksum());

        Long filesize = dataFileDatastream.getFilesize();

        // If we don't have a filesize for whatever reason, print a blank
        if (filesize != null && filesize >= 0) {
            printer.print(filesize);
        } else {
            printer.print("");
        }
    } else {
        printer.print("");
        printer.print("");
        printer.print("");
    }

    // Container info: child count

    if (object.getContentModel().contains(ContentModelHelper.Model.CONTAINER.toString())) {
        Long childCount = object.getCountMap().get("child");

        // If we don't have a childCount we will assume that the container contains zero
        // items, because the Solr query asked for facet.mincount=1
        if (childCount != null && childCount > 0) {
            printer.print(childCount);
        } else {
            printer.print(new Long(0));
        }
    } else {
        printer.print("");
    }

    printer.println();

}

From source file:net.sourceforge.ganttproject.io.GanttCSVExport.java

private void writeResourceHeaders(CSVPrinter writer) throws IOException {
    if (csvOptions.bExportResourceID) {
        writer.print(i18n("tableColID"));
    }/*from   www .  j av  a2 s  .  c o m*/
    if (csvOptions.bExportResourceName) {
        writer.print(i18n("tableColResourceName"));
    }
    if (csvOptions.bExportResourceMail) {
        writer.print(i18n("tableColResourceEMail"));
    }
    if (csvOptions.bExportResourcePhone) {
        writer.print(i18n("tableColResourcePhone"));
    }
    if (csvOptions.bExportResourceRole) {
        writer.print(i18n("tableColResourceRole"));
    }
    List<CustomPropertyDefinition> customFieldDefs = myProject.getResourceCustomPropertyManager()
            .getDefinitions();
    for (int i = 0; i < customFieldDefs.size(); i++) {
        CustomPropertyDefinition nextDef = customFieldDefs.get(i);
        writer.print(nextDef.getName());
    }
    writer.println();
    writer.println();
}

From source file:net.sourceforge.ganttproject.io.GanttCSVExport.java

private void writeTaskHeaders(CSVPrinter writer) throws IOException {
    if (csvOptions.bExportTaskID) {
        writer.print(TaskDefaultColumn.ID.getName());
    }//from w w w .  j  a v  a 2  s . c om
    if (csvOptions.bExportTaskName) {
        writer.print(TaskDefaultColumn.NAME.getName());
    }
    if (csvOptions.bExportTaskStartDate) {
        writer.print(TaskDefaultColumn.BEGIN_DATE.getName());
    }
    if (csvOptions.bExportTaskEndDate) {
        writer.print(TaskDefaultColumn.END_DATE.getName());
    }
    if (csvOptions.bExportTaskDuration) {
        writer.print(TaskDefaultColumn.DURATION.getName());
    }
    if (csvOptions.bExportTaskPercent) {
        writer.print(TaskDefaultColumn.COMPLETION.getName());
    }
    if (csvOptions.bExportTaskWebLink) {
        writer.print(i18n("webLink"));
    }
    if (csvOptions.bExportTaskResources) {
        writer.print(i18n("resources"));
    }
    if (csvOptions.bExportTaskNotes) {
        writer.print(i18n("notes"));
    }
    writer.print(TaskDefaultColumn.PREDECESSORS.getName());
    for (CustomPropertyDefinition def : myProject.getTaskCustomColumnManager().getDefinitions()) {
        writer.print(def.getName());
    }
    writer.println();
    writer.println();
}

From source file:com.itemanalysis.psychometrics.irt.estimation.ItemResponseSimulator.java

/**
 * Generates a comma separated file (CSV file) of item responses.
 *
 * @param outputFile complete path and file name of output file
 * @param includeID include an examinee ID number in the first column if true. Omits the ID if false.
 * @param includeHeader if true will include variable names in first row of CSV file.
 * @throws IOException/*  ww w  .  j  a v a2  s  . com*/
 */
public void generateData(String outputFile, boolean includeID, boolean includeHeader) throws IOException {
    byte[][] x = generateData();
    int baseID = nPeople * 10 + 1;

    Writer writer = null;
    CSVPrinter printer = null;
    File file = new File(outputFile);

    try {
        writer = new OutputStreamWriter(new FileOutputStream(file));
        printer = new CSVPrinter(writer, CSVFormat.DEFAULT.withCommentMarker('#'));

        if (includeHeader) {
            if (includeID)
                printer.print("ID");
            for (int j = 0; j < nItems; j++) {
                printer.print("V" + (j + 1));
            }
            printer.println();
        }

        for (int i = 0; i < nPeople; i++) {
            if (includeID)
                printer.print(baseID);
            for (int j = 0; j < nItems; j++) {
                printer.print(x[i][j]);
            }
            printer.println();
            baseID++;
        }
    } catch (IOException ex) {
        throw (ex);
    } finally {
        if (writer != null)
            writer.close();
        if (printer != null)
            printer.close();
    }

}

From source file:at.alladin.rmbt.statisticServer.export.ExportResource.java

@Get
public Representation request(final String entity) {
    //Before doing anything => check if a cached file already exists and is new enough
    String property = System.getProperty("java.io.tmpdir");

    final String filename_zip;
    final String filename_csv;

    //allow filtering by month/year
    int year = -1;
    int month = -1;
    int hours = -1;
    boolean hoursExport = false;
    boolean dateExport = false;

    if (getRequest().getAttributes().containsKey("hours")) { // export by hours
        try {/*from  w ww.  jav a  2 s.  c o  m*/
            hours = Integer.parseInt(getRequest().getAttributes().get("hours").toString());
        } catch (NumberFormatException ex) {
            //Nothing -> just fall back
        }
        if (hours <= 7 * 24 && hours >= 1) { //limit to 1 week (avoid DoS)
            hoursExport = true;
        }
    } else if (!hoursExport && getRequest().getAttributes().containsKey("year")) { // export by month/year 
        try {
            year = Integer.parseInt(getRequest().getAttributes().get("year").toString());
            month = Integer.parseInt(getRequest().getAttributes().get("month").toString());
        } catch (NumberFormatException ex) {
            //Nothing -> just fall back
        }
        if (year < 2099 && month > 0 && month <= 12 && year > 2000) {
            dateExport = true;
        }
    }

    if (hoursExport) {
        filename_zip = FILENAME_ZIP_HOURS.replace("%HOURS%", String.format("%03d", hours));
        filename_csv = FILENAME_CSV_HOURS.replace("%HOURS%", String.format("%03d", hours));
        cacheThresholdMs = 5 * 60 * 1000; //5 minutes
    } else if (dateExport) {
        filename_zip = FILENAME_ZIP.replace("%YEAR%", Integer.toString(year)).replace("%MONTH%",
                String.format("%02d", month));
        filename_csv = FILENAME_CSV.replace("%YEAR%", Integer.toString(year)).replace("%MONTH%",
                String.format("%02d", month));
        cacheThresholdMs = 23 * 60 * 60 * 1000; //23 hours
    } else {
        filename_zip = FILENAME_ZIP_CURRENT;
        filename_csv = FILENAME_CSV_CURRENT;
        cacheThresholdMs = 3 * 60 * 60 * 1000; //3 hours
    }

    final File cachedFile = new File(property + File.separator + ((zip) ? filename_zip : filename_csv));
    final File generatingFile = new File(
            property + File.separator + ((zip) ? filename_zip : filename_csv) + "_tmp");
    if (cachedFile.exists()) {

        //check if file has been recently created OR a file is currently being created
        if (((cachedFile.lastModified() + cacheThresholdMs) > (new Date()).getTime())
                || (generatingFile.exists()
                        && (generatingFile.lastModified() + cacheThresholdMs) > (new Date()).getTime())) {

            //if so, return the cached file instead of a cost-intensive new one
            final OutputRepresentation result = new OutputRepresentation(
                    zip ? MediaType.APPLICATION_ZIP : MediaType.TEXT_CSV) {

                @Override
                public void write(OutputStream out) throws IOException {
                    InputStream is = new FileInputStream(cachedFile);
                    IOUtils.copy(is, out);
                    out.close();
                }

            };
            if (zip) {
                final Disposition disposition = new Disposition(Disposition.TYPE_ATTACHMENT);
                disposition.setFilename(filename_zip);
                result.setDisposition(disposition);
            }
            return result;

        }
    }

    final String timeClause;

    if (dateExport)
        timeClause = " AND (EXTRACT (month FROM t.time AT TIME ZONE 'UTC') = " + month
                + ") AND (EXTRACT (year FROM t.time AT TIME ZONE 'UTC') = " + year + ") ";
    else if (hoursExport)
        timeClause = " AND time > now() - interval '" + hours + " hours' ";
    else
        timeClause = " AND time > current_date - interval '31 days' ";

    final String sql = "SELECT" + " ('P' || t.open_uuid) open_uuid,"
            + " ('O' || t.open_test_uuid) open_test_uuid,"
            + " to_char(t.time AT TIME ZONE 'UTC', 'YYYY-MM-DD HH24:MI:SS') time_utc,"
            + " nt.group_name cat_technology," + " nt.name network_type,"
            + " (CASE WHEN (t.geo_accuracy < ?) AND (t.geo_provider != 'manual') AND (t.geo_provider != 'geocoder') THEN"
            + " t.geo_lat" + " WHEN (t.geo_accuracy < ?) THEN" + " ROUND(t.geo_lat*1111)/1111" + " ELSE null"
            + " END) lat,"
            + " (CASE WHEN (t.geo_accuracy < ?) AND (t.geo_provider != 'manual') AND (t.geo_provider != 'geocoder') THEN"
            + " t.geo_long" + " WHEN (t.geo_accuracy < ?) THEN" + " ROUND(t.geo_long*741)/741 " + " ELSE null"
            + " END) long," + " (CASE WHEN ((t.geo_provider = 'manual') OR (t.geo_provider = 'geocoder')) THEN"
            + " 'rastered'" + //make raster transparent
            " ELSE t.geo_provider" + " END) loc_src,"
            + " (CASE WHEN (t.geo_accuracy < ?) AND (t.geo_provider != 'manual') AND (t.geo_provider != 'geocoder') "
            + " THEN round(t.geo_accuracy::float * 10)/10 "
            + " WHEN (t.geo_accuracy < 100) AND ((t.geo_provider = 'manual') OR (t.geo_provider = 'geocoder')) THEN 100"
            + // limit accuracy to 100m
            " WHEN (t.geo_accuracy < ?) THEN round(t.geo_accuracy::float * 10)/10"
            + " ELSE null END) loc_accuracy, "
            + " (CASE WHEN (t.zip_code < 1000 OR t.zip_code > 9999) THEN null ELSE t.zip_code END) zip_code,"
            + " t.gkz gkz," + " t.country_location country_location," + " t.speed_download download_kbit,"
            + " t.speed_upload upload_kbit," + " round(t.ping_median::float / 100000)/10 ping_ms,"
            + " t.lte_rsrp," + " t.lte_rsrq," + " ts.name server_name," + " duration test_duration,"
            + " num_threads," + " t.plattform platform," + " COALESCE(adm.fullname, t.model) model,"
            + " client_software_version client_version," + " network_operator network_mcc_mnc,"
            + " network_operator_name network_name," + " network_sim_operator sim_mcc_mnc," + " nat_type,"
            + " public_ip_asn asn," + " client_public_ip_anonymized ip_anonym,"
            + " (ndt.s2cspd*1000)::int ndt_download_kbit," + " (ndt.c2sspd*1000)::int ndt_upload_kbit,"
            + " COALESCE(t.implausible, false) implausible," + " t.signal_strength" + " FROM test t"
            + " LEFT JOIN network_type nt ON nt.uid=t.network_type"
            + " LEFT JOIN device_map adm ON adm.codename=t.model"
            + " LEFT JOIN test_server ts ON ts.uid=t.server_id" + " LEFT JOIN test_ndt ndt ON t.uid=ndt.test_id"
            + " WHERE " + " t.deleted = false" + timeClause + " AND status = 'FINISHED'" + " ORDER BY t.uid";

    final String[] columns;
    final List<String[]> data = new ArrayList<>();
    PreparedStatement ps = null;
    ResultSet rs = null;
    try {
        ps = conn.prepareStatement(sql);

        //insert filter for accuracy
        double accuracy = Double.parseDouble(settings.getString("RMBT_GEO_ACCURACY_DETAIL_LIMIT"));
        ps.setDouble(1, accuracy);
        ps.setDouble(2, accuracy);
        ps.setDouble(3, accuracy);
        ps.setDouble(4, accuracy);
        ps.setDouble(5, accuracy);
        ps.setDouble(6, accuracy);

        if (!ps.execute())
            return null;
        rs = ps.getResultSet();

        final ResultSetMetaData meta = rs.getMetaData();
        final int colCnt = meta.getColumnCount();
        columns = new String[colCnt];
        for (int i = 0; i < colCnt; i++)
            columns[i] = meta.getColumnName(i + 1);

        while (rs.next()) {
            final String[] line = new String[colCnt];

            for (int i = 0; i < colCnt; i++) {
                final Object obj = rs.getObject(i + 1);
                line[i] = obj == null ? null : obj.toString();
            }

            data.add(line);
        }
    } catch (final SQLException e) {
        e.printStackTrace();
        return null;
    } finally {
        try {
            if (rs != null)
                rs.close();
            if (ps != null)
                ps.close();
        } catch (final SQLException e) {
            e.printStackTrace();
        }
    }

    final OutputRepresentation result = new OutputRepresentation(
            zip ? MediaType.APPLICATION_ZIP : MediaType.TEXT_CSV) {
        @Override
        public void write(OutputStream out) throws IOException {
            //cache in file => create temporary temporary file (to 
            // handle errors while fulfilling a request)
            String property = System.getProperty("java.io.tmpdir");
            final File cachedFile = new File(
                    property + File.separator + ((zip) ? filename_zip : filename_csv) + "_tmp");
            OutputStream outf = new FileOutputStream(cachedFile);

            if (zip) {
                final ZipOutputStream zos = new ZipOutputStream(outf);
                final ZipEntry zeLicense = new ZipEntry("LIZENZ.txt");
                zos.putNextEntry(zeLicense);
                final InputStream licenseIS = getClass().getResourceAsStream("DATA_LICENSE.txt");
                IOUtils.copy(licenseIS, zos);
                licenseIS.close();

                final ZipEntry zeCsv = new ZipEntry(filename_csv);
                zos.putNextEntry(zeCsv);
                outf = zos;
            }

            final OutputStreamWriter osw = new OutputStreamWriter(outf);
            final CSVPrinter csvPrinter = new CSVPrinter(osw, csvFormat);

            for (final String c : columns)
                csvPrinter.print(c);
            csvPrinter.println();

            for (final String[] line : data) {
                for (final String f : line)
                    csvPrinter.print(f);
                csvPrinter.println();
            }
            csvPrinter.flush();

            if (zip)
                outf.close();

            //if we reach this code, the data is now cached in a temporary tmp-file
            //so, rename the file for "production use2
            //concurrency issues should be solved by the operating system
            File newCacheFile = new File(property + File.separator + ((zip) ? filename_zip : filename_csv));
            Files.move(cachedFile.toPath(), newCacheFile.toPath(), StandardCopyOption.ATOMIC_MOVE,
                    StandardCopyOption.REPLACE_EXISTING);

            FileInputStream fis = new FileInputStream(newCacheFile);
            IOUtils.copy(fis, out);
            fis.close();
            out.close();
        }
    };
    if (zip) {
        final Disposition disposition = new Disposition(Disposition.TYPE_ATTACHMENT);
        disposition.setFilename(filename_zip);
        result.setDisposition(disposition);
    }

    return result;
}