List of usage examples for com.fasterxml.jackson.dataformat.csv CsvMapper reader
public ObjectReader reader(DeserializationFeature feature)
From source file:ed.cracken.code.SimpleTestExp1.java
public static List<Map<?, ?>> readObjectsFromCsv(String file) throws IOException { CsvSchema bootstrap = CsvSchema.emptySchema().withHeader(); CsvMapper csvMapper = new CsvMapper(); MappingIterator<Map<?, ?>> mappingIterator = csvMapper.reader(Map.class).with(bootstrap).readValues(file); return mappingIterator.readAll(); }
From source file:db.migration.util.DbUtil.java
public static Iterator<Object[]> readCsv(URL csvUrl) throws Exception { CsvMapper mapper = new CsvMapper(); mapper.enable(CsvParser.Feature.WRAP_AS_ARRAY); return mapper.reader(String[].class).readValues(csvUrl); }
From source file:hrytsenko.csv.IO.java
/** * Gets records from file.// w ww .j a v a2 s. c om * * <p> * If closure is given, then it will be applied to each record. * * @param args * the named arguments {@link IO}. * @param closure * the closure to be applied to each record. * * @return the loaded records. * * @throws IOException * if file could not be read. */ public static List<Record> load(Map<String, ?> args, Closure<?> closure) throws IOException { Path path = getPath(args); LOGGER.info("Load: {}.", path.getFileName()); try (InputStream dataStream = newInputStream(path, StandardOpenOption.READ); InputStream bomStream = new BOMInputStream(dataStream); Reader dataReader = new InputStreamReader(bomStream, getCharset(args))) { CsvSchema csvSchema = getSchema(args).setUseHeader(true).build(); CsvMapper csvMapper = new CsvMapper(); ObjectReader csvReader = csvMapper.reader(Map.class).with(csvSchema); Iterator<Map<String, String>> rows = csvReader.readValues(dataReader); List<Record> records = new ArrayList<>(); while (rows.hasNext()) { Map<String, String> row = rows.next(); Record record = new Record(); record.putAll(row); records.add(record); if (closure != null) { closure.call(record); } } return records; } }
From source file:org.apache.nifi.processors.ParseCSV.ParseCSV.java
public static List<Map<?, ?>> readObjectsFromCsv(InputStream is) throws IOException { CsvSchema bootstrap = CsvSchema.emptySchema().withHeader(); CsvMapper csvMapper = new CsvMapper(); MappingIterator<Map<?, ?>> mappingIterator = csvMapper.reader(Map.class).with(bootstrap).readValues(is); return mappingIterator.readAll(); }
From source file:ro.fortsoft.dataset.csv.CsvDataSet.java
protected MappingIterator<Map<String, Object>> createMappingIterator(InputStream inputStream) { CsvMapper mapper = createCsvMapper(); CsvSchema schema = createCsvSchema(); ObjectReader reader = mapper.reader(Map.class).with(schema); try {/*from w w w . j a va 2 s . com*/ return reader.readValues(inputStream); } catch (IOException e) { throw new DataSetException(e); } }
From source file:io.github.binout.jaxrs.csv.CsvMessageBodyProvider.java
@Override public Object readFrom(Class<Object> aClass, Type type, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, String> multivaluedMap, InputStream inputStream) throws IOException, WebApplicationException { CsvMapper mapper = new CsvMapper(); Class csvClass = (Class) (((ParameterizedType) type).getActualTypeArguments())[0]; CsvSchema schema = CsvSchemaFactory.buildSchema(mapper, csvClass); return mapper.reader(csvClass).with(schema).readValues(inputStream).readAll(); }
From source file:com.marklogic.client.test.JacksonDatabindTest.java
/** Demonstrate using Jackson's CSV mapper directly to simplify reading in data, populating a * third-party pojo (one we cannot annotate) then writing it out * via JacksonDatabindHandle with configuration provided by mix-in annotations. **//*from ww w. j a v a2s . c o m*/ @Test public void testDatabindingThirdPartyPojoWithMixinAnnotations() throws JsonProcessingException, IOException { CsvSchema schema = CsvSchema.builder().setColumnSeparator('\t').addColumn("geoNameId").addColumn("name") .addColumn("asciiName").addColumn("alternateNames") .addColumn("latitude", CsvSchema.ColumnType.NUMBER) .addColumn("longitude", CsvSchema.ColumnType.NUMBER).addColumn("featureClass") .addColumn("featureCode").addColumn("countryCode").addColumn("countryCode2").addColumn("adminCode1") .addColumn("adminCode2").addColumn("adminCode3").addColumn("adminCode4").addColumn("population") .addColumn("elevation", CsvSchema.ColumnType.NUMBER).addColumn("dem", CsvSchema.ColumnType.NUMBER) .addColumn("timezoneCode").addColumn("lastModified").build(); CsvMapper mapper = new CsvMapper(); mapper.addMixInAnnotations(Toponym.class, ToponymMixIn1.class); ObjectReader reader = mapper.reader(Toponym.class).with(schema); BufferedReader cityReader = new BufferedReader(Common.testFileToReader(CITIES_FILE)); GenericDocumentManager docMgr = Common.client.newDocumentManager(); DocumentWriteSet set = docMgr.newWriteSet(); String line = null; for (int numWritten = 0; numWritten < MAX_TO_WRITE && (line = cityReader.readLine()) != null; numWritten++) { Toponym city = reader.readValue(line); JacksonDatabindHandle handle = new JacksonDatabindHandle(city); handle.getMapper().addMixInAnnotations(Toponym.class, ToponymMixIn2.class); set.add(DIRECTORY + "/thirdPartyJsonCities/" + city.getGeoNameId() + ".json", handle); } docMgr.write(set); cityReader.close(); // we can add assertions later, for now this test just serves as example code and // ensures no exceptions are thrown }
From source file:ro.fortsoft.dada.csv.CsvGenericDao.java
public long readFromCsv() { File file = new File(csvFile); if (!file.exists() || !file.isFile()) { return 0; }//from w ww . jav a 2s. co m Class<T> persistentClass = getPersistentClass(); // create mapper and schema CsvMapper mapper = new CsvMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); CsvSchema schema = mapper.schemaFor(persistentClass).withHeader(); this.entities = new ArrayList<>(); // read entities long count = 0; try { MappingIterator<T> it = mapper.reader(persistentClass).with(schema).readValues(file); while (it.hasNextValue()) { entities.add(it.nextValue()); count++; } } catch (IOException e) { e.printStackTrace(); } return count; }
From source file:nl.esciencecenter.ptk.csv.CSVData.java
public void parseText(String csvText) throws IOException { // Extended CSV ! // Pass I: remove comments including the ending newline! Pattern pat = Pattern.compile("^#.*\n", Pattern.MULTILINE); csvText = pat.matcher(csvText).replaceAll(""); // todo: check how jackson can parse alternative field separators; if (fieldSeparators != null) { // csvText=csvText.replaceAll(",","_"); for (String sep : fieldSeparators) { // lazy replace csvText = csvText.replaceAll(sep, ","); }/*from ww w . j a va2 s .c o m*/ } // Not needed: Pass II: remove empty lines as a result of the // pat=Pattern.compile("\n\n",Pattern.MULTILINE); // newTxt=pat.matcher(newTxt).replaceAll(""); // ObjectMapper mapper=new ObjectMapper(); CsvMapper mapper = new CsvMapper(); mapper.enable(CsvParser.Feature.WRAP_AS_ARRAY); MappingIterator<Object[]> it = mapper.reader(Object[].class).readValues(csvText); if (it.hasNext() == false) { throw new IOException("Empty text or csv text contains no headers!"); } // read header: Object headers[] = it.next(); StringList list = new StringList(); for (int i = 0; i < headers.length; i++) { list.add(headers[i].toString()); } logger.debugPrintf("Headers=%s\n", list.toString("<>")); headerList = list; data = new ArrayList<String[]>(); // check header values. while (it.hasNext()) { Object line[] = it.next(); String row[] = new String[line.length]; for (int j = 0; j < line.length; j++) { Object value = line[j]; if (value != null) { row[j] = value.toString(); } } data.add(row); } logger.debugPrintf("Read %d number of rows\n", data.size()); }
From source file:com.couchbase.devex.CSVConfig.java
@Override public Observable<Document> startImport() { FileInputStream csvFile;/*from w w w . jav a 2s.c o m*/ try { csvFile = new FileInputStream(getCsvFilePath()); CsvMapper mapper = new CsvMapper(); mapper.enable(CsvParser.Feature.WRAP_AS_ARRAY); CsvSchema csvSchema = CsvSchema.emptySchema().withColumnSeparator(getColumnSeparator()) .withQuoteChar(getQuoteChar()); ObjectReader reader = mapper.reader(String[].class); MappingIterator<String[]> it = reader.with(csvSchema).readValues(csvFile); if (!getSkipFirstLineForNames()) { String[] firstline = it.next(); updateColumnNames(firstline); } return Observable.from(new Iterable<String[]>() { @Override public Iterator<String[]> iterator() { return it; } }).flatMap(line -> createNode(line)); } catch (FileNotFoundException e) { return Observable.error(e); } catch (IOException e) { return Observable.error(e); } }