Example usage for org.apache.commons.csv CSVPrinter printRecords

List of usage examples for org.apache.commons.csv CSVPrinter printRecords

Introduction

In this page you can find the example usage for org.apache.commons.csv CSVPrinter printRecords.

Prototype

public void printRecords(final ResultSet resultSet) throws SQLException, IOException 

Source Link

Document

Prints all the objects in the given JDBC result set.

Usage

From source file:com.siemens.sw360.exporter.CSVExport.java

@NotNull
private static ByteArrayOutputStream getCSVOutputStream(Iterable<String> csvHeaderIterable,
        Iterable<Iterable<String>> inputIterable) throws IOException {
    final ByteArrayOutputStream outB = new ByteArrayOutputStream();
    try (Writer out = new BufferedWriter(new OutputStreamWriter(outB));) {
        CSVPrinter csvPrinter = new CSVPrinter(out, CommonUtils.sw360CsvFormat);
        csvPrinter.printRecord(csvHeaderIterable);
        csvPrinter.printRecords(inputIterable);
        csvPrinter.flush();//from   w w w . ja  v  a 2 s .  com
        csvPrinter.close();
    } catch (Exception e) {
        outB.close();
        throw e;
    }

    return outB;

}

From source file:de.tudarmstadt.ukp.dkpro.tc.svmhmm.util.SVMHMMUtils.java

/**
 * Given confusion matrix, it writes it in CSV and LaTeX form to the tasks output directory,
 * and also prints evaluations (F-measure, Precision, Recall)
 *
 * @param context         task context// w w w.  ja v a  2  s .c  o  m
 * @param confusionMatrix confusion matrix
 * @param filePrefix      prefix of output files
 * @throws java.io.IOException
 */
public static void writeOutputResults(TaskContext context, ConfusionMatrix confusionMatrix, String filePrefix)
        throws IOException {
    // storing the results as latex confusion matrix
    String confMatrixFileTex = (filePrefix != null ? filePrefix : "") + "confusionMatrix.tex";
    File evaluationFileLaTeX = new File(
            context.getStorageLocation(Constants.TEST_TASK_OUTPUT_KEY, StorageService.AccessMode.READWRITE),
            confMatrixFileTex);
    FileUtils.writeStringToFile(evaluationFileLaTeX, confusionMatrix.toStringLatex());

    // as CSV confusion matrix
    String confMatrixFileCsv = (filePrefix != null ? filePrefix : "") + "confusionMatrix.csv";
    File evaluationFileCSV = new File(
            context.getStorageLocation(Constants.TEST_TASK_OUTPUT_KEY, StorageService.AccessMode.READWRITE),
            confMatrixFileCsv);

    CSVPrinter csvPrinter = new CSVPrinter(new FileWriter(evaluationFileCSV), CSVFormat.DEFAULT);
    csvPrinter.printRecords(confusionMatrix.toStringMatrix());
    IOUtils.closeQuietly(csvPrinter);

    // and results
    File evaluationFile = new File(
            context.getStorageLocation(Constants.TEST_TASK_OUTPUT_KEY, StorageService.AccessMode.READWRITE),
            new SVMHMMAdapter()
                    .getFrameworkFilename(TCMachineLearningAdapter.AdapterNameEntries.evaluationFile));

    PrintWriter pw = new PrintWriter(evaluationFile);
    pw.println(confusionMatrix.printNiceResults());
    pw.println(confusionMatrix.printLabelPrecRecFm());
    pw.println(confusionMatrix.printClassDistributionGold());
    IOUtils.closeQuietly(pw);
}

From source file:de.tudarmstadt.ukp.dkpro.tc.svmhmm.report.SVMHMMBatchCrossValidationReport.java

protected void aggregateResults(String testTaskCSVFile, String outputPrefix) throws Exception {
    StorageService storageService = getContext().getStorageService();

    // aggregate rows from all CSVs from all folds
    List<List<String>> allOutcomes = new ArrayList<>();

    List<TaskContextMetadata> testTasks = collectTestTasks();

    // we need test tasks!
    if (testTasks.isEmpty()) {
        throw new IllegalStateException("No test tasks found. Make sure you properly "
                + "define the test task in getTestTaskClass() (currently: " + getTestTaskClass().getName());
    }/*from  www.  j  a va2s.c  o  m*/

    // iterate over all sub tasks
    for (TaskContextMetadata subContext : testTasks) {
        // locate CSV file with outcomes (gold, predicted, token, etc.)
        File csvFile = storageService.getStorageFolder(subContext.getId(),
                Constants.TEST_TASK_OUTPUT_KEY + File.separator + testTaskCSVFile);

        // load the CSV
        CSVParser csvParser = new CSVParser(new FileReader(csvFile), CSVFormat.DEFAULT.withCommentMarker('#'));

        // and add the all rows
        for (CSVRecord csvRecord : csvParser) {
            // row for particular instance
            List<String> row = new ArrayList<>();
            for (String value : csvRecord) {
                row.add(value);
            }
            allOutcomes.add(row);
        }

        IOUtils.closeQuietly(csvParser);
    }

    // store aggregated outcomes again to CSV
    File evaluationFile = new File(getContext().getStorageLocation(Constants.TEST_TASK_OUTPUT_KEY,
            StorageService.AccessMode.READWRITE), testTaskCSVFile);
    log.debug("Evaluation file: " + evaluationFile.getAbsolutePath());

    CSVPrinter csvPrinter = new CSVPrinter(new FileWriter(evaluationFile), SVMHMMUtils.CSV_FORMAT);
    csvPrinter.printComment(SVMHMMUtils.CSV_COMMENT);
    csvPrinter.printRecords(allOutcomes);
    IOUtils.closeQuietly(csvPrinter);

    // compute confusion matrix
    ConfusionMatrix cm = new ConfusionMatrix();

    for (List<String> singleInstanceOutcomeRow : allOutcomes) {
        // first item is the gold label
        String gold = singleInstanceOutcomeRow.get(0);
        // second item is the predicted label
        String predicted = singleInstanceOutcomeRow.get(1);

        cm.increaseValue(gold, predicted);
    }

    // and write all reports
    SVMHMMUtils.writeOutputResults(getContext(), cm, outputPrefix);

    // and print detailed results
    log.info(outputPrefix + "; " + cm.printNiceResults());
    log.info(outputPrefix + "; " + cm.printLabelPrecRecFm());
}

From source file:com.goeuro.goeurotest.service.Services.java

/**
 * Write CSV file using list of records and pre defined static header
 *
 * @param recordsList//from  w  w  w  . ja v  a  2s  .c o  m
 * @throws Exception
 */
public void writeCSV(List recordsList) throws Exception {
    FileWriter fileWriter = null;
    CSVPrinter csvFilePrinter = null;
    try {
        CSVFormat csvFileFormat = CSVFormat.DEFAULT.withRecordSeparator(Defines.NEW_LINE_SEPARATOR);
        fileWriter = new FileWriter(Defines.FILE_NAME);
        csvFilePrinter = new CSVPrinter(fileWriter, csvFileFormat);
        csvFilePrinter.printRecord(Defines.FILE_HEADER);
        for (Object recordList : recordsList) {
            csvFilePrinter.printRecords(recordList);
        }
        fileWriter.flush();
        fileWriter.close();
        csvFilePrinter.close();
    } catch (IOException ex) {
        throw new Exception("IOException occured while writing CSV file " + ex.getMessage());
    }
}

From source file:com.siemens.sw360.portal.portlets.admin.ComponentUploadPortlet.java

@NotNull
private ByteArrayOutputStream writeCsvStream(List<List<String>> listList) throws TException, IOException {
    final ByteArrayOutputStream riskCategoryCsvStream = new ByteArrayOutputStream();
    Writer out = new BufferedWriter(new OutputStreamWriter(riskCategoryCsvStream));
    CSVPrinter csvPrinter = new CSVPrinter(out, CommonUtils.sw360CsvFormat);
    csvPrinter.printRecords(listList);
    csvPrinter.flush();/*from  w w  w  .  j a  va  2  s  .co  m*/
    csvPrinter.close();
    return riskCategoryCsvStream;
}

From source file:com.amazonaws.services.dynamodbv2.online.index.integration.tests.ViolationCorrectionTest.java

/**
 * Iterates through detection output file: first leave updates blank based on missing updates per key. 
 * Once it has reached the missing update number, it removes the expected gsi values as per the specified 'missingGsiExpectedHashValues'.
 * Note that once blank number is reached, it also starts adding updates. 
 * It then iterates over the rows again and adds values for Yes/No/Invalid in the delete column.
 * It returns all error records, if present. If not, it returns all records.
 *//*from  ww  w. j  av  a2  s  .co  m*/
private static List<List<String>> createCorrectionFile(final String detectionFile, final String correctionFile,
        final String gsiHashKeyName, final String gsiHashKeyType, final String gsiRangeKeyName,
        final String gsiRangeKeyType, final Map<String, String> tableHashToNewGsiHashValueMap,
        final Map<String, String> tableHashToNewGsiRangeValueMap, final int missingUpdatesPerKey,
        final int missingGsiExpectedHashValues, final int invalidValuesForDelete, final int numOfYesForDelete,
        final int numOfNoForDelete) throws IOException {

    List<List<String>> errorRecords = null;
    List<List<String>> allRecords = null;

    BufferedReader br = null;
    BufferedWriter bw = null;
    CSVParser parser = null;
    CSVPrinter csvPrinter = null;
    try {
        br = new BufferedReader(new FileReader(new File(detectionFile)));
        bw = new BufferedWriter(new FileWriter(new File(correctionFile)));
        parser = new CSVParser(br, TestUtils.csvFormat);
        csvPrinter = new CSVPrinter(bw, TestUtils.csvFormat);
        List<CSVRecord> detectorRecords = parser.getRecords();

        int hashMissingUpdates = 0;
        int rangeMissingUpdates = 0;
        int missingGsiExpectedHashValuesCurrent = 0;

        // Print Header
        Map<String, Integer> header = parser.getHeaderMap();
        csvPrinter.printRecord(header.keySet());

        allRecords = new ArrayList<List<String>>();
        for (CSVRecord csvRecord : detectorRecords) {
            List<String> newRecord = new ArrayList<String>();
            String tableHashKeyRecorded = csvRecord.get(ViolationRecord.TABLE_HASH_KEY);

            String hashKeyViolationType = null;
            if (gsiHashKeyName != null) {
                hashKeyViolationType = csvRecord.get(ViolationRecord.GSI_HASH_KEY_VIOLATION_TYPE);
            }
            String rangeKeyViolationType = null;
            if (gsiRangeKeyName != null) {
                rangeKeyViolationType = csvRecord.get(ViolationRecord.GSI_RANGE_KEY_VIOLATION_TYPE);
            }

            for (int i = 0; i < csvRecord.size(); i++) {
                newRecord.add(i, csvRecord.get(i));
            }

            String newGsiVal = null;
            if (hashKeyViolationType != null && (hashKeyViolationType.equals("Size Violation")
                    || hashKeyViolationType.equals("Type Violation"))) {
                if (hashMissingUpdates < missingUpdatesPerKey) {
                    allRecords.add(newRecord);
                    hashMissingUpdates++;
                    continue;
                }
                //Remove expected hash Values
                if (missingGsiExpectedHashValuesCurrent < missingGsiExpectedHashValues) {
                    newRecord.remove((int) header.get(ViolationRecord.GSI_HASH_KEY));
                    newRecord.add(header.get(ViolationRecord.GSI_HASH_KEY), "");
                    missingGsiExpectedHashValuesCurrent++;
                }

                newRecord.remove((int) header.get(ViolationRecord.GSI_HASH_KEY_UPDATE_VALUE));
                newGsiVal = getNewValue(gsiHashKeyType, 4 /*length*/);
                newRecord.add(header.get(ViolationRecord.GSI_HASH_KEY_UPDATE_VALUE), newGsiVal);
                tableHashToNewGsiHashValueMap.put(tableHashKeyRecorded, newGsiVal);
            }

            if (rangeKeyViolationType != null && (rangeKeyViolationType.equals("Size Violation")
                    || rangeKeyViolationType.equals("Type Violation"))) {
                if (rangeMissingUpdates < missingUpdatesPerKey) {
                    allRecords.add(newRecord);
                    rangeMissingUpdates++;
                    continue;
                }

                newRecord.remove(header.get(ViolationRecord.GSI_RANGE_KEY_UPDATE_VALUE));
                newGsiVal = getNewValue(gsiRangeKeyType, 4 /*length*/);
                newRecord.add(header.get(ViolationRecord.GSI_RANGE_KEY_UPDATE_VALUE), newGsiVal);
                tableHashToNewGsiRangeValueMap.put(tableHashKeyRecorded, newGsiVal);
            }
            allRecords.add(newRecord);
        }

        // Add 'Y' or 'N' for delete column
        if (numOfNoForDelete > 0 || numOfYesForDelete > 0 || invalidValuesForDelete > 0) {
            errorRecords = new ArrayList<List<String>>();
            int numOfYesAdded = 0;
            int numOfNoAdded = 0;
            int numOfInvalids = 0;
            for (List<String> record : allRecords) {
                if (numOfInvalids < invalidValuesForDelete) {
                    record.remove(header.get(ViolationRecord.GSI_CORRECTION_DELETE_BLANK));
                    record.add(header.get(ViolationRecord.GSI_CORRECTION_DELETE_BLANK), "xx");
                    numOfInvalids++;
                    errorRecords.add(record);
                    continue;
                }

                if (numOfYesAdded < numOfYesForDelete) {
                    record.remove(header.get(ViolationRecord.GSI_CORRECTION_DELETE_BLANK));
                    record.add(header.get(ViolationRecord.GSI_CORRECTION_DELETE_BLANK), "Y");
                    numOfYesAdded++;
                    continue;
                }

                if (numOfNoAdded < numOfNoForDelete) {
                    record.remove(header.get(ViolationRecord.GSI_CORRECTION_DELETE_BLANK));
                    record.add(header.get(ViolationRecord.GSI_CORRECTION_DELETE_BLANK), "N");
                    numOfNoAdded++;
                    continue;
                }
            }
        }

        // Add all records to file
        csvPrinter.printRecords(allRecords);
    } finally {
        br.close();
        bw.close();
        parser.close();
        csvPrinter.close();
    }

    if (errorRecords != null)
        return errorRecords;
    else
        return allRecords;
}

From source file:org.qamatic.mintleaf.tools.CsvExportFlavour.java

@Override
public void export(ResultSet resultSet) throws MintleafException {
    CSVPrinter printer = null;
    try {/*from ww w  . j  a  v  a2 s . co m*/
        printer = new CSVPrinter(writer, CSVFormat.EXCEL.withHeader(resultSet));
        printer.printRecords(resultSet);
        printer.close();
    } catch (SQLException e) {
        throw new MintleafException(e);
    } catch (IOException e) {
        throw new MintleafException(e);

    }
}

From source file:org.roda.core.plugins.plugins.base.InventoryReportPlugin.java

private void processAIP(ModelService model, StorageService storage, SimpleJobPluginInfo jobPluginInfo,
        CSVPrinter csvFilePrinter, AIP aip) {
    if (csvFilePrinter == null) {
        LOGGER.warn("CSVPrinter is NULL! Skipping...");
        return;//from   w  w w  .ja  v  a 2  s. c  om
    }

    try {
        if (outputDataInformation && aip.getRepresentations() != null) {
            List<List<String>> dataInformation = InventoryReportPluginUtils.getDataInformation(fields, aip,
                    model, storage);
            csvFilePrinter.printRecords(dataInformation);
        }
        if (outputDescriptiveMetadataInformation && aip.getDescriptiveMetadata() != null) {
            List<List<String>> dataInformation = InventoryReportPluginUtils
                    .getDescriptiveMetadataInformation(fields, aip, model, storage);
            csvFilePrinter.printRecords(dataInformation);
        }
        if (otherMetadataTypes != null && !otherMetadataTypes.isEmpty()) {
            for (String otherMetadataType : otherMetadataTypes) {
                List<List<String>> otherMetadataInformation = InventoryReportPluginUtils
                        .getOtherMetadataInformation(fields, otherMetadataType, aip, model, storage);
                csvFilePrinter.printRecords(otherMetadataInformation);
            }
        }
        jobPluginInfo.incrementObjectsProcessedWithSuccess();
    } catch (IOException e) {
        jobPluginInfo.incrementObjectsProcessedWithFailure();
    }
}

From source file:poe.trade.assist.Main.java

private void saveSearchList(SearchPane searchPane) {
    List<Search> list = new ArrayList<>(searchPane.searchTable.getMasterItems());
    if (list != null) {
        //         Gson gson = new Gson();
        List<String[]> persistList = list.stream().map(e -> e.toSearchPersist()).map(e -> e.toCSVArray())
                .collect(toList());/*from   w  w w .ja  v  a  2s  .  c  om*/
        if (!persistList.isEmpty()) {
            File file = getSearchFile();
            try (BufferedWriter br = new BufferedWriter(new FileWriter(file))) {
                CSVPrinter json = CSVFormat.RFC4180.withHeader("Name", "Tags", "URL", "Auto", "Sort").print(br);
                json.printRecords(persistList);
                json.flush();
                //         String json = gson.toJson(persistList);
            } catch (IOException e1) {
                // TODO Auto-generated catch block
                Dialogs.showError(e1);
                e1.printStackTrace();
            }
        }

        //         saveSearchesToFile(json);
    }
}