List of usage examples for org.apache.commons.csv CSVFormat DEFAULT
CSVFormat DEFAULT
To view the source code for org.apache.commons.csv CSVFormat DEFAULT.
Click Source Link
From source file:ro.pippo.csv.CsvEngine.java
public CSVFormat getCSVFormat() { return CSVFormat.DEFAULT.withDelimiter(delimiter).withRecordSeparator(recordSeparator) .withNullString(nullString).withEscape(escapeCharacter).withQuote(quoteCharacter) .withQuoteMode(quoteMode);//from ww w.ja v a2s.c om }
From source file:strawn.evariant.rainsorter.data.msapop.MSAPopulationLoader.java
public static Iterable<CSVRecord> getCSVRecords() throws FileNotFoundException, IOException { Reader in = new FileReader(MSA_POPULATION_DISK_LOCATION); return CSVFormat.DEFAULT.withHeader().parse(in); }
From source file:strawn.evariant.rainsorter.data.precipitation.PrecipitationLoader.java
public static Iterable<CSVRecord> getCSVRecords() throws FileNotFoundException, IOException { Reader in = new FileReader(QCLCD_DISK_LOCATION); return CSVFormat.DEFAULT.withHeader().parse(in); }
From source file:strawn.evariant.rainsorter.unused.MSACountyLoader.java
public static Iterable<CSVRecord> getCSVRecords() throws FileNotFoundException, IOException { Reader in = new FileReader(MSACountyFileInfo.LOCATION); return CSVFormat.DEFAULT.withHeader().parse(in); }
From source file:streaming.core.GenericTweetsProcessor.java
public static void main(String[] args) throws Exception { Reader in = new FileReader("data/us-election-aprial07.csv"); Iterable<CSVRecord> records = CSVFormat.DEFAULT.parse(in); for (CSVRecord record : records) { System.out.println(record.get(0) + " " + record.get(1)); }//from ww w . j a v a 2s . c o m }
From source file:tr.edu.firat.ceng.aml.assignments.decisiontree.util.CSV2DatasetUtil.java
public Dataset convert(String resourceName) throws UnsupportedEncodingException, IOException { Reader reader = null;//from www . j ava 2s . c o m try { List<Property> properties = new ArrayList<Property>(); properties.add(new NumericPropertyImpl("sepal_length")); properties.add(new NumericPropertyImpl("sepal_width")); properties.add(new NumericPropertyImpl("petal_length")); properties.add(new NumericPropertyImpl("petal_width")); ClassProperty classProperty = new ClassPropertyImpl("class"); final URL url = getClass().getResource(resourceName); reader = new InputStreamReader(url.openStream(), "UTF-8"); CSVParser parser = new CSVParser(reader, CSVFormat.DEFAULT); for (CSVRecord record : parser) { for (int i = 0; i < properties.size(); i++) { Property get = properties.get(i); if (get instanceof NumericProperty) { NumericProperty numericProperty = (NumericProperty) get; numericProperty.getValues().add(new Double(record.get(i))); } } classProperty.getValues().add(record.get(properties.size())); } Dataset dataset = new DatasetImpl(classProperty); dataset.addProperties(properties); return dataset; } finally { try { if (reader != null) { reader.close(); } } catch (IOException ex) { System.out.println(ex); } } }
From source file:uk.bl.dpt.qa.ProcessIsolatedTika.java
/** * Parse an inputstream and populate a Metadata object * @param pInputStream stream to analyse * @param pMetadata metadata object to populate * @param pOutputStream output to write data to * @return true if processed ok, false if execution was terminated */// w w w .ja v a2s.co m public boolean parse(final InputStream pInputStream, final Metadata pMetadata) { boolean ret = true; if (!gRunner.isRunning()) { gLogger.error("Tika-Server is not running"); return false; } final String TIKA_PATH = "/meta"; final String END_POINT = "http://" + TIKA_LOCAL_HOST + ":" + TIKA_SERVER_PORT; gLogger.trace("Server: " + END_POINT + TIKA_PATH); final String detectedType = pMetadata.get(Metadata.CONTENT_TYPE); FutureTask<Integer> task = new FutureTask<Integer>(new Callable<Integer>() { @Override public Integer call() throws Exception { gResponse = WebClient.create(END_POINT + TIKA_PATH).accept("text/csv") // give the parsers a hint .type(detectedType) // protect the stream from being closed .put(new CloseShieldInputStream(pInputStream)); return null; } }); Thread thread = new Thread(task); thread.start(); try { task.get(TIMEOUT_SECS * 1000, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { gLogger.info("InterruptedException: " + e); ret = false; restart(); } catch (ExecutionException e) { gLogger.info("ExecutionException: " + e); ret = false; restart(); } catch (TimeoutException e) { gLogger.info("TimeoutException: " + e); ret = false; restart(); } if (gResponse != null) { if (gResponse.getStatus() == Status.UNSUPPORTED_MEDIA_TYPE.getStatusCode()) { // the server may return HTTP 415 (unsupported) if it won't accept the mimetype // handle this issue here // add some text to the output // FIXME: maybe change mimetype for a more visible error? pMetadata.add("parseFailure415", "true"); gLogger.error("Parse Failure: HTTP 415 (format unsupported for parsing)"); } else { if (gResponse.getEntity() instanceof InputStream) { InputStream is = (InputStream) gResponse.getEntity(); BufferedReader reader = new BufferedReader(new InputStreamReader(is)); try { Iterable<CSVRecord> records = CSVFormat.DEFAULT.parse(reader); for (CSVRecord record : records) { pMetadata.add(record.get(0), record.get(1)); } } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); ret = false; } finally { if (reader != null) { try { reader.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } } } } gLogger.info("Metadata entries: " + pMetadata.names().length); return ret; }
From source file:uk.co.inetria.gce.GceUsageParser.java
public void parse() throws FileNotFoundException, IOException { for (String file : files) { try (BufferedReader reader = new BufferedReader(new FileReader(file), BUF_SIZE);) { Iterable<CSVRecord> records = CSVFormat.DEFAULT.withHeader().withSkipHeaderRecord().parse(reader); for (CSVRecord record : records) { String measurement = StringUtils.remove(record.get(1), MEASURE_PREFIX); this.measurementIds.add(measurement); if (measurement.contains(VM) || measurement.contains(CONTAINER_ENGINE_VM)) { this.numberOfVms++; this.vms.add(record.get(4)); }/* ww w . j av a 2s .co m*/ Usage usage = this.usages.get(measurement); if (usage == null) { usage = new Usage(); this.usages.put(measurement, usage); } long value = Long.parseLong(record.get(2)); usage.raw += value; if (measurement.contains(VM) || measurement.contains(CONTAINER_ENGINE_VM)) { // hourly based billing long adjusted = value; if (adjusted < HOUR) { adjusted = HOUR; } else if (adjusted % HOUR > 0) { adjusted = (long) (HOUR * Math.ceil(adjusted / (double) HOUR)); } usage.adjusted += adjusted; } } } } System.out.println("Unique measurements"); for (String measureId : this.measurementIds) { System.out.println(measureId); } System.out.println("Total number of started VMs: " + this.numberOfVms); System.out.println("Total number of unique VMs: " + this.vms.size()); for (String vmId : this.vms) { System.out.println(vmId); } System.out.println("Aggregated usage"); System.out.println("MeasurementId,Quantity,Per-hour Quantity"); for (Entry<String, Usage> entry : this.usages.entrySet()) { Usage usage = entry.getValue(); System.out.println(entry.getKey() + ',' + usage.raw + ',' + usage.adjusted); } }
From source file:uk.trainwatch.osgb.codepoint.util.CodePointImport.java
private void importer(Connection con, Path path) throws SQLException { LOG.log(Level.INFO, () -> "Importing " + path); try {//from w w w . j a v a 2s . c o m try (CSVParser parser = new CSVParser(new FileReader(path.toFile()), CSVFormat.DEFAULT)) { List<CSVRecord> records = parser.getRecords(); // Do the import in one massive transaction con.setAutoCommit(false); try (PreparedStatement ps = con.prepareStatement(CP_SQL)) { records.stream() .map(r -> new PostCode(r.get(0), Integer.parseInt(r.get(1)), Integer.parseInt(r.get(2)), Integer.parseInt(r.get(3)), r.get(4), r.get(5), r.get(6), r.get(7), r.get(8), r.get(9))) .forEach(SQLConsumer.guard(pc -> { SQL.executeUpdate(ps, pc.getPostCode(), pc.getPqi(), pc.getEastings(), pc.getNorthings(), codeLookup.getOrDefault(pc.getCountry(), 0), codeLookup.getOrDefault(pc.getCounty(), 0), codeLookup.getOrDefault(pc.getDistrict(), 0), codeLookup.getOrDefault(pc.getWard(), 0), nhsLookup.getOrDefault(pc.getNhsRegion(), 0), nhsLookup.getOrDefault(pc.getNhs(), 0)); })); } con.commit(); int parseCount = records.size(); lineCount += parseCount; LOG.log(Level.INFO, () -> "Parsed " + parseCount); } } catch (IOException ex) { con.rollback(); LOG.log(Level.SEVERE, null, ex); throw new UncheckedIOException(ex); } catch (UncheckedSQLException ex) { con.rollback(); LOG.log(Level.SEVERE, null, ex); throw ex.getCause(); } catch (Exception ex) { con.rollback(); LOG.log(Level.SEVERE, null, ex); throw new RuntimeException(ex); } }
From source file:Utils.CVEUtils.java
/** * Find the CVE in the CSV file and return the relevant parts * * @param cveid//from w w w . j a v a 2 s . com * @return String[] with format { cveid, cvss_risk, summary } - If no cve * exits then this returns null */ public String[] getCVE(String cveid) { String[] cve = new String[3]; // get the id from the cveid CSVFormat format = CSVFormat.DEFAULT.withDelimiter(','); try { CSVParser parser = new CSVParser(new FileReader(csvfile), format); for (CSVRecord record : parser) { String thiscve = record.get(0); if (thiscve.equalsIgnoreCase(cveid)) { // we have found our cve, get all the details and return cve[0] = record.get(0); cve[1] = record.get(1); cve[2] = record.get(2); return cve; } } } catch (IOException ex) { ex.printStackTrace(); } // If we get here then there was no vuln with that ID, return null. return null; }