List of usage examples for com.fasterxml.jackson.dataformat.csv CsvMapper schemaFor
public final CsvSchema schemaFor(TypeReference<?> pojoTypeRef)
From source file:io.github.binout.jaxrs.csv.CsvSchemaFactory.java
static CsvSchema buildSchema(CsvMapper mapper, Class csvClass) { CsvAnnotationIntrospector introspector = new CsvAnnotationIntrospector(csvClass); char separatorChar = introspector.separator(); Optional<String[]> columns = introspector.columns(); CsvSchema csvSchema = mapper.schemaFor(csvClass).withColumnSeparator(separatorChar) .withSkipFirstDataRow(introspector.skipFirstDataRow()); if (columns.isPresent()) { // Rebuild columns to take account of order CsvSchema.Builder builder = csvSchema.rebuild().clearColumns(); for (String column : columns.get()) { CsvSchema.Column oldColumn = csvSchema.column(column); builder.addColumn(column, oldColumn.getType()); }// w w w. j a v a 2 s . c o m csvSchema = builder.build(); } return csvSchema; }
From source file:fi.helsinki.opintoni.config.http.converter.CsvHttpMessageConverter.java
private String getCsv(CsvResponse<T> response) { CsvMapper csvMapper = new CsvMapper(); CsvSchema csvSchema = csvMapper.schemaFor(response.getType()); return response.entries.stream().map(entry -> toCsvRow(csvMapper, csvSchema, entry)) .collect(Collectors.joining()); }
From source file:ro.fortsoft.dada.csv.CsvGenericDao.java
public long writeToCsv() { Class<T> persistentClass = getPersistentClass(); // create mapper and schema CsvMapper mapper = new CsvMapper(); CsvSchema schema = mapper.schemaFor(persistentClass).withHeader(); // write entities long count = 0; try {/*from w ww.ja v a 2s . c o m*/ mapper.writer().with(schema).writeValue(new File(csvFile), entities); count = entities.size(); } catch (IOException e) { e.printStackTrace(); } return count; }
From source file:edu.cmu.cs.lti.discoursedb.io.bazaar.converter.BazaarConverter.java
private void convert(String messageFileDir, String roomFileDir, String agentname) throws ParseException, IOException { Map<String, String> roomIdNameMap = new HashMap<>(); List<String> messages = new ArrayList<>(); //Read input file and preprocess String lineFragment = null;//from ww w.j a va 2 s . c om for (String line : FileUtils.readLines(new File(messageFileDir))) { //line fragments occur in case we have line feeds in a column if (lineFragment != null) { line = lineFragment + line; lineFragment = null; } if (line.endsWith("\\") || line.endsWith("\\\r\f")) { line = line.replaceAll("\\\r\f", ""); lineFragment = line; } else { if (line.contains("\\\"We're Ready\\\"")) { line = line.replaceAll("\"We're Ready\\\\\"", "We're Ready\\\\"); } if (line.contains("\\\"ready\\\"")) { line = line.replaceAll("\\\\\"ready\\\\\"", "\\\\ready\\\\"); } if (line.contains("\\\"" + agentname + "\\\"")) { line = line.replaceAll("\\\\\"" + agentname + "\\\\\"", "\\\\" + agentname + "\\\\"); } messages.add(line); } } // Phase 1: read through input room file once and map all entities try (InputStream in = new FileInputStream(roomFileDir)) { CsvMapper mapper = new CsvMapper(); CsvSchema schema = mapper.schemaFor(Room.class).withColumnSeparator(','); MappingIterator<Room> rIter = mapper.readerFor(Room.class).with(schema).readValues(in); while (rIter.hasNextValue()) { Room r = rIter.next(); if (!roomIdNameMap.containsKey(r.getId())) roomIdNameMap.put(r.getId(), r.getName()); converterService.mapRoom(r, dataSetName, discourseName); } } catch (IOException e) { log.error("Error reading room file", e); } // Phase 2: read through input message file and map relationships between room and message CsvMapper mapper = new CsvMapper(); CsvSchema schema = mapper.schemaFor(Message.class).withColumnSeparator(','); for (String message : messages) { Message m = mapper.readerFor(Message.class).with(schema).readValue(message); if (m.getType().equals("text") || m.getType().equals("image") || m.getType().equals("private")) { converterService.mapMessage(m, dataSetName, discourseName, roomIdNameMap); } else { converterService.mapInteraction(m, dataSetName, discourseName, roomIdNameMap); } } }
From source file:ro.fortsoft.dada.csv.CsvGenericDao.java
public long readFromCsv() { File file = new File(csvFile); if (!file.exists() || !file.isFile()) { return 0; }/*from w w w . j a v a2s.c om*/ Class<T> persistentClass = getPersistentClass(); // create mapper and schema CsvMapper mapper = new CsvMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); CsvSchema schema = mapper.schemaFor(persistentClass).withHeader(); this.entities = new ArrayList<>(); // read entities long count = 0; try { MappingIterator<T> it = mapper.reader(persistentClass).with(schema).readValues(file); while (it.hasNextValue()) { entities.add(it.nextValue()); count++; } } catch (IOException e) { e.printStackTrace(); } return count; }
From source file:nl.esciencecenter.medim.dicom.types.DicomTags.java
protected void readFromText(String txt) throws IOException { // Pass I: remove comments including the ending newline! Pattern pat = Pattern.compile("^#.*\n", Pattern.MULTILINE); String newTxt = pat.matcher(txt).replaceAll(""); // Not needed: Pass II: remove empty lines as a result of the // pat=Pattern.compile("\n\n",Pattern.MULTILINE); // newTxt=pat.matcher(newTxt).replaceAll(""); // ObjectMapper mapper=new ObjectMapper(); CsvMapper mapper = new CsvMapper(); CsvSchema schema = mapper.schemaFor(CsvTagLine.class); // create object mapping from CsvLine.class // CsvSchema schema = CsvSchema.builder() // .addColumn(CSV_GROUP) // .addColumn(CSV_ELEMENT) // .addColumn(CSV_VR) // .addColumn(CSV_NAME) // .build();// www . jav a2s . c o m MappingIterator<CsvTagLine> mi = mapper.reader(CsvTagLine.class).with(schema).readValues(newTxt); List<TagDirective> tags = new ArrayList<TagDirective>(); // skip first: CsvTagLine header = mi.nextValue(); // check header values. while (mi.hasNextValue()) { CsvTagLine line = mi.nextValue(); TagDirective tag = new TagDirective(); // do something? tag.tagNr = StringUtil.parseHexidecimal(line.group) * 0x10000 + StringUtil.parseHexidecimal(line.element); tag.name = line.name; line.keep = StringUtil.stripWhiteSpace(line.keep); line.options = StringUtil.stripWhiteSpace(line.options); // Support OX if (StringUtil.equalsIgnoreCase(line.VR, "OX")) line.VR = "OB"; // treat as bytes; VRType vrType = VRType.valueOf(line.VR); tag.vr = vrType.vr(); boolean keep = false; if (StringUtil.isWhiteSpace(line.keep) == false) keep = (Integer.parseInt(line.keep) > 0); if (keep == false) { tag.option = TagProcessingOption.DELETE; } else { // check option: // System.err.printf("- %s | %s | %s | %s\n",line.group,line.element,line.keep,line.options); if (StringUtil.isWhiteSpace(line.options) == false) { tag.option = TagProcessingOption.valueOfOrNull(line.options, true); // error parsing option: if (tag.option == null) { throw new IOException("Parse Error: could not parse Tag Option:" + line.options); } } else { tag.option = TagProcessingOption.KEEP; // no option -> keep. } } tags.add(tag); } // POST: check tags: for (int i = 0; i < tags.size(); i++) { TagDirective tag = tags.get(i); // logger.debugPritnf("TagOption: 0x%8x '%s' : %s\n",tag.tagNr,tag.name,tag.option); this.dicomTags.put(tag.tagNr, tag); // register } }
From source file:org.restlet.ext.jackson.JacksonRepresentation.java
/** * Creates a Jackson CSV schema based on a mapper and the current object * class./* w ww.j a v a 2s .co m*/ * * @param csvMapper * The source CSV mapper. * @return A Jackson CSV schema */ protected CsvSchema createCsvSchema(CsvMapper csvMapper) { return csvMapper.schemaFor(getObjectClass()); }