List of usage examples for org.apache.commons.csv CSVFormat DEFAULT
CSVFormat DEFAULT
To view the source code for org.apache.commons.csv CSVFormat DEFAULT.
Click Source Link
From source file:org.sonar.server.db.migrations.v51.FeedFileSourcesBinaryData.java
private byte[] toBinary(Long fileSourceId, @Nullable String data) { FileSourceDb.Data.Builder dataBuilder = FileSourceDb.Data.newBuilder(); CSVParser parser = null;//from w w w . j ava 2s .c o m try { if (data != null) { parser = CSVParser.parse(data, CSVFormat.DEFAULT); Iterator<CSVRecord> rows = parser.iterator(); int line = 1; while (rows.hasNext()) { CSVRecord row = rows.next(); if (row.size() == 16) { FileSourceDb.Line.Builder lineBuilder = dataBuilder.addLinesBuilder(); lineBuilder.setLine(line); String s = row.get(0); if (StringUtils.isNotEmpty(s)) { lineBuilder.setScmRevision(s); } s = row.get(1); if (StringUtils.isNotEmpty(s)) { lineBuilder.setScmAuthor(s); } s = row.get(2); if (StringUtils.isNotEmpty(s)) { lineBuilder.setScmDate(DateUtils.parseDateTimeQuietly(s).getTime()); } s = row.get(3); if (StringUtils.isNotEmpty(s)) { lineBuilder.setUtLineHits(Integer.parseInt(s)); } s = row.get(4); if (StringUtils.isNotEmpty(s)) { lineBuilder.setUtConditions(Integer.parseInt(s)); } s = row.get(5); if (StringUtils.isNotEmpty(s)) { lineBuilder.setUtCoveredConditions(Integer.parseInt(s)); } s = row.get(6); if (StringUtils.isNotEmpty(s)) { lineBuilder.setItLineHits(Integer.parseInt(s)); } s = row.get(7); if (StringUtils.isNotEmpty(s)) { lineBuilder.setItConditions(Integer.parseInt(s)); } s = row.get(8); if (StringUtils.isNotEmpty(s)) { lineBuilder.setItCoveredConditions(Integer.parseInt(s)); } s = row.get(9); if (StringUtils.isNotEmpty(s)) { lineBuilder.setOverallLineHits(Integer.parseInt(s)); } s = row.get(10); if (StringUtils.isNotEmpty(s)) { lineBuilder.setOverallConditions(Integer.parseInt(s)); } s = row.get(11); if (StringUtils.isNotEmpty(s)) { lineBuilder.setOverallCoveredConditions(Integer.parseInt(s)); } s = row.get(12); if (StringUtils.isNotEmpty(s)) { lineBuilder.setHighlighting(s); } s = row.get(13); if (StringUtils.isNotEmpty(s)) { lineBuilder.setSymbols(s); } s = row.get(14); if (StringUtils.isNotEmpty(s)) { lineBuilder.addAllDuplication(splitIntegers(s)); } s = row.get(15); if (s != null) { lineBuilder.setSource(s); } } line++; } } return FileSourceDto.encodeSourceData(dataBuilder.build()); } catch (Exception e) { throw new IllegalStateException( "Invalid FILE_SOURCES.DATA on row with ID " + fileSourceId + ": " + data, e); } finally { IOUtils.closeQuietly(parser); } }
From source file:org.sonar.server.source.index.SourceLineResultSetIterator.java
@Override protected SourceFile read(ResultSet rs) throws SQLException { String projectUuid = rs.getString(1); String fileUuid = rs.getString(2); Long updatedAt = SqlUtil.getLong(rs, 3); if (updatedAt == null) { updatedAt = System.currentTimeMillis(); }//from w w w .j ava 2 s .c o m Date updatedDate = new Date(updatedAt); SourceFile result = new SourceFile(fileUuid, updatedAt); Reader csv = rs.getCharacterStream(4); if (csv == null) { return result; } int line = 1; CSVParser csvParser = null; try { csvParser = new CSVParser(csv, CSVFormat.DEFAULT); for (CSVRecord csvRecord : csvParser) { SourceLineDoc doc = new SourceLineDoc(Maps.<String, Object>newHashMap()); doc.setProjectUuid(projectUuid); doc.setFileUuid(fileUuid); doc.setLine(line); doc.setUpdateDate(updatedDate); doc.setScmRevision(csvRecord.get(0)); doc.setScmAuthor(csvRecord.get(1)); doc.setScmDate(DateUtils.parseDateTimeQuietly(csvRecord.get(2))); // UT doc.setUtLineHits(parseIntegerFromRecord(csvRecord.get(3))); doc.setUtConditions(parseIntegerFromRecord(csvRecord.get(4))); doc.setUtCoveredConditions(parseIntegerFromRecord(csvRecord.get(5))); // IT doc.setItLineHits(parseIntegerFromRecord(csvRecord.get(6))); doc.setItConditions(parseIntegerFromRecord(csvRecord.get(7))); doc.setItCoveredConditions(parseIntegerFromRecord(csvRecord.get(8))); // OVERALL doc.setOverallLineHits(parseIntegerFromRecord(csvRecord.get(9))); doc.setOverallConditions(parseIntegerFromRecord(csvRecord.get(10))); doc.setOverallCoveredConditions(parseIntegerFromRecord(csvRecord.get(11))); doc.setHighlighting(csvRecord.get(12)); doc.setSymbols(csvRecord.get(13)); doc.setDuplications(parseDuplications(csvRecord.get(14))); doc.setSource(csvRecord.get(csvRecord.size() - 1)); result.addLine(doc); line++; } } catch (IOException ioError) { throw new IllegalStateException( "Impossible to open stream for file_sources.data with file_uuid " + fileUuid, ioError); } catch (ArrayIndexOutOfBoundsException lineError) { throw new IllegalStateException( String.format("Impossible to parse source line data, stuck at line %d", line), lineError); } finally { IOUtils.closeQuietly(csv); IOUtils.closeQuietly(csvParser); } return result; }
From source file:org.totschnig.myexpenses.task.CsvParseTask.java
@Override protected ArrayList<CSVRecord> doInBackground(Void... params) { InputStream inputStream;//from w w w . ja v a2s. c o m try { inputStream = MyApplication.getInstance().getContentResolver().openInputStream(fileUri); } catch (FileNotFoundException e) { publishProgress(MyApplication.getInstance().getString(R.string.parse_error_file_not_found, fileUri)); return null; } catch (Exception e) { publishProgress( MyApplication.getInstance().getString(R.string.parse_error_other_exception, e.getMessage())); return null; } try { return (ArrayList<CSVRecord>) CSVFormat.DEFAULT.withDelimiter(delimiter) .parse(new InputStreamReader(inputStream, encoding)).getRecords(); } catch (IOException e) { publishProgress( MyApplication.getInstance().getString(R.string.parse_error_other_exception, e.getMessage())); return null; } finally { if (inputStream != null) { try { inputStream.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } }
From source file:org.transitime.custom.sfmta.delayTimes.Intersection.java
public static List<Intersection> readIntersections(String fileName) { List<Intersection> intersections = new ArrayList<Intersection>(); try {/*from w ww . jav a 2 s. c o m*/ Reader in = new BufferedReader(new InputStreamReader(new FileInputStream(fileName), "UTF-8")); CSVFormat formatter = CSVFormat.DEFAULT.withHeader().withCommentMarker('-'); // Parse the file Iterable<CSVRecord> records = formatter.parse(in); Iterator<CSVRecord> iterator = records.iterator(); while (iterator.hasNext()) { // Determine the record to process CSVRecord record = iterator.next(); Intersection i = getIntersection(record); intersections.add(i); } } catch (Exception e) { e.printStackTrace(); } return intersections; }
From source file:org.transitime.custom.sfmta.delayTimes.Loc.java
public static List<Loc> readLocs(String fileName) { List<Loc> locs = new ArrayList<Loc>(); try {//from w w w . j a v a 2 s . c o m Reader in = new BufferedReader(new InputStreamReader(new FileInputStream(fileName), "UTF-8")); CSVFormat formatter = CSVFormat.DEFAULT.withHeader().withCommentMarker('-'); // Parse the file Iterable<CSVRecord> records = formatter.parse(in); Iterator<CSVRecord> iterator = records.iterator(); while (iterator.hasNext()) { // Determine the record to process CSVRecord record = iterator.next(); Loc loc = getLoc(record); if (loc.accuracy < MAX_ALLOWED_ACCURACY) locs.add(loc); } } catch (Exception e) { e.printStackTrace(); } return locs; }
From source file:org.transitime.utils.csv.CsvBaseReader.java
/** * Parse the CSV file. Reads in the header info and then each line. Calls * the abstract handleRecord() method for each record. Adds each resulting * CSV object to the gtfsObjecgts array. *//*from www . j a v a2 s. co m*/ private void parse() { CSVRecord record = null; try { IntervalTimer timer = new IntervalTimer(); logger.debug("Parsing CSV file {} ...", fileName); // Open the file for reading. Use UTF-8 format since that will work // for both regular ASCII format and UTF-8 extended format files // since UTF-8 was designed to be backwards compatible with ASCII. // This way will work for Chinese and other character sets. Use // InputStreamReader so can specify that using UTF-8 format. Use // BufferedReader so that can determine if first character is an // optional BOM (Byte Order Mark) character used to indicate that // file is in UTF-8 format. BufferedReader allows us to read in // first character and then discard if it is a BOM character or // reset the reader to back to the beginning if it is not. This // way the CSV parser will process the file starting with the first // true character. Reader in = new BufferedReader(new InputStreamReader(new FileInputStream(fileName), "UTF-8")); // Deal with the possible BOM character at the beginning of the file in.mark(1); int firstRead = in.read(); final int BOM_CHARACTER = 0xFEFF; if (firstRead != BOM_CHARACTER) in.reset(); // Get ready to parse the CSV file. // Allow lines to be comments if they start with "-" so that can // easily comment out problems and also test what happens when // certain data is missing. Using the '-' character so can // comment out line that starts with "--", which is what is // used for SQL. CSVFormat formatter = CSVFormat.DEFAULT.withHeader().withCommentMarker('-'); // Parse the file Iterable<CSVRecord> records = formatter.parse(in); logger.debug("Finished CSV parsing of file {}. Took {} msec.", fileName, timer.elapsedMsec()); int lineNumberWhenLogged = 0; timer = new IntervalTimer(); IntervalTimer loggingTimer = new IntervalTimer(); Iterator<CSVRecord> iterator = records.iterator(); while (iterator.hasNext()) { // Determine the record to process record = iterator.next(); // If blank line then skip it. This way avoid error messages since // expected data column won't exist if (record.size() == 0) continue; // Process the record using appropriate handler // and create the corresponding CSV object T gtfsObject; try { gtfsObject = handleRecord(record, supplemental); } catch (ParseException e) { logger.error("ParseException occurred for record {} " + "(comment lines not included when determing record #) for " + "filename {} . {}", record.getRecordNumber(), fileName, e.getMessage()); // Continue even though there was an error so that all errors // logged at once. continue; } catch (NumberFormatException e) { logger.error("NumberFormatException occurred for record {} " + "(comment lines not included when determing record #) " + "for filename {} . {}", record.getRecordNumber(), fileName, e.getMessage()); // Continue even though there was an error so that all errors // logged at once. continue; } // Add the newly created CSV object to the object list if (gtfsObject != null) gtfsObjects.add(gtfsObject); // Log info if it has been a while. Check only every 20,000 // lines to see if the 10 seconds has gone by. If so, then log // number of lines. By only looking at timer every 20,000 lines // not slowing things down by for every line doing system call // for to get current time. final int LINES_TO_PROCESS_BEFORE_CHECKING_IF_SHOULD_LOG = 20000; final long SECONDS_ELSAPSED_UNTIL_SHOULD_LOG = 5; if (record.getRecordNumber() >= lineNumberWhenLogged + LINES_TO_PROCESS_BEFORE_CHECKING_IF_SHOULD_LOG) { lineNumberWhenLogged = (int) record.getRecordNumber(); if (loggingTimer.elapsedMsec() > SECONDS_ELSAPSED_UNTIL_SHOULD_LOG * Time.MS_PER_SEC) { logger.info(" Processed {} lines. Took {} msec...", lineNumberWhenLogged, timer.elapsedMsec()); loggingTimer = new IntervalTimer(); } } } // End of while iterating over records // Close up the file reader in.close(); // Determine number of records for logging message long numberRecords = 0; if (record != null) numberRecords = record.getRecordNumber(); logger.info("Finished parsing {} records from file {} . Took {} msec.", numberRecords, fileName, timer.elapsedMsec()); } catch (FileNotFoundException e) { if (required) logger.error("Required CSV file {} not found.", fileName); else logger.info("CSV file {} not found but OK because this file " + "not required.", fileName); } catch (IOException e) { logger.error("IOException occurred when reading in filename {}.", fileName, e); } }
From source file:org.wildfly.swarm.proc.CSVCollector.java
public CSVCollector(File file) { if (!file.getName().endsWith(".csv")) { throw new IllegalArgumentException("Illegal file name " + file.getAbsolutePath()); }/*from w w w. j a va2 s .c om*/ List<String> headerList = new ArrayList<>(); headerList.add(FULL_FILE_PATH_COLUMN); headerList.add(SHORT_FILE_NAME_COLUMN); for (Measure measure : Measure.values()) { headerList.add(measure.columnSamples()); headerList.add(measure.columnMin()); headerList.add(measure.columnMax()); headerList.add(measure.columnMean()); headerList.add(measure.columnStandardDeviation()); headerList.add(measure.columnMedian()); headerList.add(measure.column75Percentile()); } String[] header = headerList.toArray(new String[0]); try { Appendable output = Files.newBufferedWriter(file.toPath()); this.csvOutput = CSVFormat.DEFAULT.withHeader(header).print(output); this.csvOutput.flush(); } catch (IOException e) { throw new RuntimeException("Error accessing CSV file", e); } }
From source file:org.wildfly.swarm.proc.Monitor.java
private CSVParser loadCSV(File file) throws Exception { Reader input = Files.newBufferedReader(file.toPath()); return CSVFormat.DEFAULT.withHeader().parse(input); }
From source file:org.wso2.carbon.event.simulator.core.internal.generator.csv.util.CSVReader.java
/** * parseFile() method is used to parse the CSV file using the delimiter specified in CSV simulation Configuration * * @param delimiter delimiter to be used when parsing CSV file * @throws IOException if an error occurs when creating a CSVReader *//* w w w.j a v a 2s . c o m*/ private CSVParser parseFile(String delimiter) throws IOException { switch (delimiter) { case ",": return new CSVParser(fileReader, CSVFormat.DEFAULT); case ";": return new CSVParser(fileReader, CSVFormat.EXCEL); case "\\t": return new CSVParser(fileReader, CSVFormat.TDF); default: return new CSVParser(fileReader, CSVFormat.newFormat(delimiter.charAt(0))); } }
From source file:org.wso2.carbon.event.simulator.csvFeedSimulation.core.CSVFeedEventSimulator.java
/** * This method must be called within a synchronized block to avoid multiple file simulators from running simultaneously. * Read the values from uploaded CSV file and convert those values into event and send those events to * input handler/*from ww w. j ava2s. co m*/ * <p> * <p> * To read the CSV file It uses CSV parser Library. * {@link <a href="https://commons.apache.org/proper/commons-csv/apidocs/org/apache/commons/csv/CSVParser.html">CSVParser</a>} * </p> * <p> * <p> * CSV file can be separated by one of these fallowing character , , ; , \t by default * It has capability to have user defined delimiter * Any field may be quoted (with double quotes) * Fields with embedded commas or delimiter characters must be double quoted. * </p> * <p> * Initialize CSVParser * * @param executionPlanDto ExecutionPlanDto * @param csvFileConfig CSVFileSimulationDto */ private void sendEvent(ExecutionPlanDto executionPlanDto, CSVFileSimulationDto csvFileConfig) { /* return no of events read from CSV file during ever iteration */ long noOfEvents = 0; int delay = csvFileConfig.getDelay(); /* Reader for reading character streams from file */ Reader in = null; /* CSVParser to read CSV Values */ CSVParser csvParser = null; if (delay <= 0) { log.warn("Events will be sent continuously since the delay between events are set to " + delay + "milliseconds"); delay = 0; } try { /* Initialize Reader */ in = new FileReader(String.valueOf(Paths.get(System.getProperty("java.io.tmpdir"), csvFileConfig.getFileDto().getFileInfo().getFileName()))); /* Initialize CSVParser with appropriate CSVFormat according to delimiter */ switch (csvFileConfig.getDelimiter()) { case ",": csvParser = CSVParser.parse(in, CSVFormat.DEFAULT); break; case ";": csvParser = CSVParser.parse(in, CSVFormat.EXCEL); break; case "\\t": csvParser = CSVParser.parse(in, CSVFormat.TDF); break; default: csvParser = CSVParser.parse(in, CSVFormat.newFormat(csvFileConfig.getDelimiter().charAt(0))); } int attributeSize = executionPlanDto.getInputStreamDtoMap().get(csvFileConfig.getStreamName()) .getStreamAttributeDtos().size(); /* Iterate through the CSV file line by line */ for (CSVRecord record : csvParser) { try { synchronized (this) { if (isStopped) { isStopped = false; break; } if (isPaused) { this.wait(); } } if (record.size() != attributeSize) { log.warn("No of attribute is not equal to attribute size: " + attributeSize + " is needed" + "in Row no:" + noOfEvents + 1); } String[] attributes = new String[attributeSize]; noOfEvents = csvParser.getCurrentLineNumber(); for (int i = 0; i < record.size(); i++) { attributes[i] = record.get(i); } //convert Attribute values into event Event event = EventConverter.eventConverter(csvFileConfig.getStreamName(), attributes, executionPlanDto); // TODO: 13/12/16 delete sout System.out.println("Input Event " + Arrays.deepToString(event.getEventData())); // //send the event to input handler send(csvFileConfig.getStreamName(), event); //delay between two events if (delay > 0) { Thread.sleep(delay); } } catch (EventSimulationException e) { log.error("Event dropped due to Error occurred during generating an event" + e.getMessage()); } catch (InterruptedException e) { log.error("Error occurred during send event" + e.getMessage()); } } } catch (IllegalArgumentException e) { // TODO: 02/12/16 proper error message throw new EventSimulationException("File Parameters are null" + e.getMessage()); } catch (FileNotFoundException e) { throw new EventSimulationException( "File not found :" + csvFileConfig.getFileDto().getFileInfo().getFileName()); } catch (IOException e) { throw new EventSimulationException("Error occurred while reading the file"); } finally { try { if (in != null && csvParser != null) in.close(); csvParser.close(); } catch (IOException e) { throw new EventSimulationException("Error occurred during closing the file"); } } }