List of usage examples for org.apache.commons.lang3 StringEscapeUtils escapeCsv
public static final String escapeCsv(final String input)
From source file:edu.utsa.sifter.IndexResource.java
void writeDocRecordFeatures(final Result doc, final Bookmark mark, final OutputStreamWriter writer, final double feat) throws IOException { writer.write(nullCheck(doc.ID));/* w ww.j av a 2s.co m*/ writer.write(","); writer.write(Double.toString(doc.Score)); writer.write(",\""); writer.write(StringEscapeUtils.escapeCsv(nullCheck(doc.Name))); writer.write("\",\""); writer.write(StringEscapeUtils.escapeCsv(nullCheck(doc.Path))); writer.write("\",\""); writer.write(StringEscapeUtils.escapeCsv(nullCheck(doc.Extension))); writer.write("\","); writer.write(Long.toString(doc.Size)); writer.write(","); writer.write(Long.toString(doc.Modified)); writer.write(","); writer.write(Long.toString(doc.Accessed)); writer.write(","); writer.write(Long.toString(doc.Created)); writer.write(","); writer.write(nullCheck(doc.Cell)); writer.write(","); writer.write(Double.toString(doc.CellDistance)); writer.write(","); writer.write(mark == null ? "0" : Long.toString(mark.Created)); writer.write(","); writer.write(mark == null ? "" : StringEscapeUtils.escapeCsv(nullCheck(mark.Comment))); writer.write(","); writer.write(Double.toString(feat)); writer.write("\n"); }
From source file:edu.utsa.sifter.IndexResource.java
void writeHitRecordFeatures(final SearchHit hit, final Bookmark mark, final OutputStreamWriter writer) throws IOException, InterruptedException { writer.write(nullCheck(hit.ID()));//from ww w . java 2s . c o m writer.write(","); writer.write(Double.toString(hit.Score)); writer.write(",\""); writer.write(StringEscapeUtils.escapeCsv(nullCheck(hit.Name()))); writer.write("\",\""); writer.write(StringEscapeUtils.escapeCsv(nullCheck(hit.Path()))); writer.write("\",\""); writer.write(nullCheck(StringEscapeUtils.escapeCsv(hit.Extension()))); writer.write("\","); writer.write(Long.toString(hit.Size())); writer.write(","); writer.write(Long.toString(hit.Modified())); writer.write(","); writer.write(Long.toString(hit.Accessed())); writer.write(","); writer.write(Long.toString(hit.Created())); writer.write(","); writer.write(nullCheck(hit.Cell())); writer.write(","); writer.write(Double.toString(hit.CellDistance())); writer.write(","); writer.write(Long.toString(hit.Start)); writer.write(","); writer.write(Long.toString(hit.End)); writer.write(","); writer.write(nullCheck(StringEscapeUtils.escapeCsv(hit.Passage.replace('\n', ' ').replace('\r', ' ')))); writer.write(","); writer.write(mark == null ? "0" : Long.toString(mark.Created)); writer.write(","); writer.write(mark == null ? "" : StringEscapeUtils.escapeCsv(nullCheck(mark.Comment))); writer.write("\n"); }
From source file:io.cloudex.cloud.impl.google.GoogleCloudServiceImpl.java
/** * Polls a big data job and once done save the results to a file * @param jobId//w w w. j av a 2 s .c o m * @param filename * @throws IOException * FIXME rename to saveBigDataResultsToFile */ public QueryStats saveBigQueryResultsToFile(String jobId, Job queryJob, String filename) throws IOException { // query with retry support String completedJob; if (queryJob == null) { completedJob = checkBigQueryJobResults(jobId, true, false); } else { completedJob = queryJob.getJobReference().getJobId(); } Joiner joiner = Joiner.on(','); String pageToken = null; BigInteger totalRows = null; Long totalBytes = null; Integer numFields = null; Stopwatch stopwatch = Stopwatch.createStarted(); try (PrintWriter writer = new PrintWriter(new FileOutputStream(filename))) { do { GetQueryResultsResponse queryResult = this.getQueryResults(completedJob, pageToken); pageToken = queryResult.getPageToken(); log.debug("Page token: " + pageToken); if (totalRows == null) { totalRows = queryResult.getTotalRows(); numFields = queryResult.getSchema().getFields().size(); totalBytes = queryResult.getTotalBytesProcessed(); log.debug("Total rows for query: " + totalRows); } List<TableRow> rows = queryResult.getRows(); if (rows != null) { log.debug("Saving " + rows.size() + ", records to file: " + filename); // save as CSV and properly escape the data to avoid failures on parsing // one field only if (numFields == 1) { for (TableRow row : rows) { writer.println(StringEscapeUtils.escapeCsv((String) row.getF().get(0).getV())); } } else { // multiple fields for (TableRow row : rows) { List<Object> fields = new ArrayList<>(); for (TableCell field : row.getF()) { if (Data.isNull(field.getV())) { fields.add(""); } else { fields.add(StringEscapeUtils.escapeCsv((String) field.getV())); } } writer.println(joiner.join(fields)); } } } } while ((pageToken != null) && !BigInteger.ZERO.equals(totalRows)); } log.debug("BigQuery query data saved successfully, timer: " + stopwatch); QueryStats stats = new QueryStats(totalRows, totalBytes); stats.getOutputFiles().add(filename); return stats; }
From source file:net.sourceforge.pmd.cpd.CSVRenderer.java
@Override public void render(Iterator<Match> matches, Writer writer) throws IOException { if (!lineCountPerFile) { writer.append("lines").append(separator); }/* w w w .ja va2 s .c o m*/ writer.append("tokens").append(separator).append("occurrences").append(PMD.EOL); while (matches.hasNext()) { Match match = matches.next(); if (!lineCountPerFile) { writer.append(String.valueOf(match.getLineCount())).append(separator); } writer.append(String.valueOf(match.getTokenCount())).append(separator) .append(String.valueOf(match.getMarkCount())).append(separator); for (Iterator<Mark> marks = match.iterator(); marks.hasNext();) { Mark mark = marks.next(); writer.append(String.valueOf(mark.getBeginLine())).append(separator); if (lineCountPerFile) { writer.append(String.valueOf(mark.getLineCount())).append(separator); } writer.append(StringEscapeUtils.escapeCsv(mark.getFilename())); if (marks.hasNext()) { writer.append(separator); } } writer.append(PMD.EOL); } writer.flush(); }
From source file:nl.knaw.huygens.timbuctoo.tools.importer.cnw.CNWConverter.java
private void convertPersons(Map<String, Map<String, String>> listMaps) throws Exception { Progress progress = new Progress(); PrintWriter out = createPrintWriter(CNWPerson.class); // Set<RelationDTO> knownRelations = Sets.newHashSet(); Map<String, String> shortDescriptionMap = Maps.newHashMap(); try {/*from ww w . java 2s .co m*/ Collection<File> files = FileUtils.listFiles(inputDir, TEI_EXTENSIONS, false); for (File file : Sets.newTreeSet(files)) { progress.step(); String fileName = file.getName(); log(".. %s%n", fileName); String xml = Files.readTextFromFile(file); // LOG.info("xml={}", xml); String pid = "cnw:pers:" + fileName.replace(".xml", ""); PersonContext personContext = new PersonContext(pid); Visitor visitor = new PersonVisitor(personContext, listMaps); Document.createFromXml(xml).accept(visitor); CNWPerson person = personContext.person; koppelnaam2pid.put(person.getKoppelnaam(), person.getId()); jsonConverter.appendTo(out, person); shortDescriptionMap.put(person.getKoppelnaam(), person.getShortDescription()); } out.close(); File personDescription = new File("import/CNW/person-short_description.csv"); FileUtils.write(personDescription, "koppelnaam;korte_omschrijving\n", false); shortDescriptionMap.forEach((koppelnaam, description) -> { CharSequence data = StringEscapeUtils.escapeCsv(koppelnaam) + ";" + StringEscapeUtils.escapeCsv(description) + "\n"; try { FileUtils.write(personDescription, data, true); } catch (Exception e) { e.printStackTrace(); throw new RuntimeException(e); } }); } finally { out.close(); progress.done(); } }
From source file:org.apache.nifi.util.hive.HiveJdbcCommon.java
public static long convertToCsvStream(final ResultSet rs, final OutputStream outStream, String recordName, ResultSetRowCallback callback, CsvOutputOptions outputOptions) throws SQLException, IOException { final ResultSetMetaData meta = rs.getMetaData(); final int nrOfColumns = meta.getColumnCount(); List<String> columnNames = new ArrayList<>(nrOfColumns); if (outputOptions.isHeader()) { if (outputOptions.getAltHeader() == null) { for (int i = 1; i <= nrOfColumns; i++) { String columnNameFromMeta = meta.getColumnName(i); // Hive returns table.column for column name. Grab the column name as the string after the last period int columnNameDelimiter = columnNameFromMeta.lastIndexOf("."); columnNames.add(columnNameFromMeta.substring(columnNameDelimiter + 1)); }/*from w ww . ja v a 2 s .c o m*/ } else { String[] altHeaderNames = outputOptions.getAltHeader().split(","); columnNames = Arrays.asList(altHeaderNames); } } // Write column names as header row outStream.write( StringUtils.join(columnNames, outputOptions.getDelimiter()).getBytes(StandardCharsets.UTF_8)); if (outputOptions.isHeader()) { outStream.write("\n".getBytes(StandardCharsets.UTF_8)); } // Iterate over the rows long nrOfRows = 0; while (rs.next()) { if (callback != null) { callback.processRow(rs); } List<String> rowValues = new ArrayList<>(nrOfColumns); for (int i = 1; i <= nrOfColumns; i++) { final int javaSqlType = meta.getColumnType(i); final Object value = rs.getObject(i); switch (javaSqlType) { case CHAR: case LONGNVARCHAR: case LONGVARCHAR: case NCHAR: case NVARCHAR: case VARCHAR: String valueString = rs.getString(i); if (valueString != null) { // Removed extra quotes as those are a part of the escapeCsv when required. StringBuilder sb = new StringBuilder(); if (outputOptions.isQuote()) { sb.append("\""); if (outputOptions.isEscape()) { sb.append(StringEscapeUtils.escapeCsv(valueString)); } else { sb.append(valueString); } sb.append("\""); rowValues.add(sb.toString()); } else { if (outputOptions.isEscape()) { rowValues.add(StringEscapeUtils.escapeCsv(valueString)); } else { rowValues.add(valueString); } } } else { rowValues.add(""); } break; case ARRAY: case STRUCT: case JAVA_OBJECT: String complexValueString = rs.getString(i); if (complexValueString != null) { rowValues.add(StringEscapeUtils.escapeCsv(complexValueString)); } else { rowValues.add(""); } break; default: if (value != null) { rowValues.add(value.toString()); } else { rowValues.add(""); } } } // Write row values outStream.write( StringUtils.join(rowValues, outputOptions.getDelimiter()).getBytes(StandardCharsets.UTF_8)); outStream.write("\n".getBytes(StandardCharsets.UTF_8)); nrOfRows++; } return nrOfRows; }
From source file:org.apache.struts2.components.Property.java
private String prepare(String value) { String result = value;/*from w ww. j ava 2 s. com*/ if (escapeHtml) { result = StringEscapeUtils.escapeHtml4(result); } if (escapeJavaScript) { result = StringEscapeUtils.escapeEcmaScript(result); } if (escapeXml) { result = StringEscapeUtils.escapeXml(result); } if (escapeCsv) { result = StringEscapeUtils.escapeCsv(result); } return result; }
From source file:org.cgiar.ccafs.ap.summaries.projects.csv.BaseCSV.java
/** * This method is used for writing a text in the file, if the String is empty, the method will write a predefined * string. (e.g. <Not defined>)//from w w w . ja v a 2 s. c o m * * @param text is the string to be added. * @param useDefault if true, a default value will be written in case the string is empty or null. Otherwise * @throws IOException If an I/O error occurs */ public void writeString(Object text, boolean useDefault, boolean endWithSeparator) throws IOException { if (text == null || String.valueOf(text).equals("")) { if (useDefault) { writer.write(this.getText("summaries.project.empty")); } } else { writer.write(StringEscapeUtils.escapeCsv(String.valueOf(text))); } if (endWithSeparator) { writer.write(SEPARATOR); } }
From source file:org.fao.faostat.api.core.jdbc.JDBCIterable.java
public String nextCSV() { StringBuilder sb = new StringBuilder(); String columnType;//from www . j a v a 2 s . com String value; if (this.isHasNext()) { try { for (int i = 1; i <= this.getResultSet().getMetaData().getColumnCount(); i++) { try { columnType = this.getResultSet().getMetaData().getColumnClassName(i); value = this.getResultSet().getString(i).trim(); if (columnType.endsWith("Double")) { sb.append(Double.parseDouble(value)); } else if (columnType.endsWith("Integer")) { sb.append(Integer.parseInt(value)); } else if (columnType.endsWith("Long")) { sb.append(Long.parseLong(value)); } else if (columnType.endsWith("Date")) { sb.append(StringEscapeUtils.escapeCsv(value)); //sb.append("\"").append(new Date(value).toString()).append("\""); } else { // TODO: check if there are "" in the string sb.append(StringEscapeUtils.escapeCsv(value)); } if (i <= this.getResultSet().getMetaData().getColumnCount() - 1) { sb.append(","); } else { sb.append("\n"); } } catch (NullPointerException ignored) { if (i > 0) { sb.append(""); } if (i <= this.getResultSet().getMetaData().getColumnCount() - 1) { sb.append(","); } else { sb.append("\n"); } } } this.setHasNext(this.getResultSet().next()); } catch (SQLException ignored) { } } if (!this.isHasNext()) { try { this.getResultSet().close(); this.getStatement().close(); this.getConnection().close(); } catch (SQLException ignored) { } } else { //sb.append("\n"); } return sb.toString(); }
From source file:org.fao.faostat.api.legacy.V10Data.java
private void writeCSV(JDBCIterable it, Writer writer) throws IOException { /* Get Headers from Metadata */ List<String> headers = it.getColumnNames(); /* write headers */ for (int i = 0; i < headers.size(); i += 1) { writer.write(StringEscapeUtils.escapeCsv(headers.get(i))); if (i < headers.size() - 1) writer.write(","); else//from w w w.j a va 2s . c om writer.write("\n"); } /* Add CSV rows. */ while (it.hasNext()) { writer.write(it.nextCSV()); } }