List of usage examples for com.fasterxml.jackson.dataformat.csv CsvSchema builder
public static Builder builder()
From source file:io.github.binout.jaxrs.csv.CsvSchemaFactory.java
static CsvSchema buildSchema(CsvMapper mapper, Class csvClass) { CsvAnnotationIntrospector introspector = new CsvAnnotationIntrospector(csvClass); char separatorChar = introspector.separator(); Optional<String[]> columns = introspector.columns(); CsvSchema csvSchema = mapper.schemaFor(csvClass).withColumnSeparator(separatorChar) .withSkipFirstDataRow(introspector.skipFirstDataRow()); if (columns.isPresent()) { // Rebuild columns to take account of order CsvSchema.Builder builder = csvSchema.rebuild().clearColumns(); for (String column : columns.get()) { CsvSchema.Column oldColumn = csvSchema.column(column); builder.addColumn(column, oldColumn.getType()); }//from www . java 2 s. c om csvSchema = builder.build(); } return csvSchema; }
From source file:hrytsenko.gscripts.io.CsvFiles.java
/** * Save records into file.//w w w . j a v a2 s . c om * * <p> * If file already exists, then it will be overridden. * * @param records * the list of records to save. * @param args * the named arguments. * * @throws IOException * if file could not be saved. */ public static void saveCsv(List<Map<String, ?>> records, Map<String, ?> args) { if (records.isEmpty()) { LOGGER.info("No records to save."); return; } Path path = NamedArgs.findPath(args); LOGGER.info("Save {}.", path.getFileName()); CsvSchema.Builder csvSchema = schemaFrom(args).setUseHeader(true); Records.columns(records).forEach(csvSchema::addColumn); try (Writer writer = Files.newBufferedWriter(path, charsetFrom(args))) { CsvMapper csvMapper = new CsvMapper(); csvMapper.configure(CsvGenerator.Feature.ALWAYS_QUOTE_STRINGS, true); csvMapper.writer().with(csvSchema.build()).writeValue(writer, Records.normalize(records)); } catch (IOException exception) { throw new AppException(String.format("Could not save file %s.", path.getFileName()), exception); } }
From source file:edu.cmu.cs.lti.discoursedb.io.mturk.converter.MturkConverter.java
License:asdf
private CsvSchema.Builder mkCsvSchema(String fieldlist) { CsvSchema.Builder builder = new CsvSchema.Builder(); for (String colname : fieldlist.split(",")) { builder.addColumn(colname);//from w w w . j a va 2 s. c om } builder.setColumnSeparator(','); builder.setUseHeader(false); return builder; }
From source file:com.fortify.processrunner.file.ProcessorFileSubmitIssueForVulnerabilities.java
@Override protected boolean processMaps(Context context, String groupName, List<Object> currentGroup, List<LinkedHashMap<String, Object>> listOfMaps) { CsvSchema.Builder schemaBuilder = CsvSchema.builder(); for (String col : getFields().keySet()) { schemaBuilder.addColumn(col);/* w ww .j a va2s .co m*/ } CsvSchema schema = schemaBuilder.build().withHeader(); try { new CsvMapper().writer(schema).writeValue(new File(groupName), listOfMaps); } catch (Exception e) { throw new RuntimeException("Error writing data to file " + groupName, e); } LOG.info(String.format("[File] Submitted %d vulnerabilities to %s", currentGroup.size(), groupName)); return true; }
From source file:hrytsenko.csv.IO.java
/** * Saves records into CSV file./*from w w w . j ava 2 s . c o m*/ * * <p> * If file already exists, then it will be overridden. * * @param args * the named arguments {@link IO}. * * @throws IOException * if file could not be written. */ public static void save(Map<String, ?> args) throws IOException { Path path = getPath(args); LOGGER.info("Save: {}.", path.getFileName()); @SuppressWarnings("unchecked") Collection<Record> records = (Collection<Record>) args.get("records"); if (records.isEmpty()) { LOGGER.info("No records to save."); return; } try (Writer dataWriter = newBufferedWriter(path, getCharset(args), StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING)) { Set<String> columns = new LinkedHashSet<>(); List<Map<String, String>> rows = new ArrayList<>(); for (Record record : records) { Map<String, String> values = record.values(); columns.addAll(values.keySet()); rows.add(values); } CsvSchema.Builder csvSchema = getSchema(args).setUseHeader(true); for (String column : columns) { csvSchema.addColumn(column); } CsvMapper csvMapper = new CsvMapper(); ObjectWriter csvWriter = csvMapper.writer().withSchema(csvSchema.build()); csvWriter.writeValue(dataWriter, rows); } }
From source file:hrytsenko.gscripts.io.CsvFiles.java
private static CsvSchema.Builder schemaFrom(Map<String, ?> args) { String separator = NamedArgs.tryFind(args, SEPARATOR).orElse(SEPARATOR_DEFAULT); Preconditions.checkArgument(separator.length() == 1, "Invalid separator."); String qualifier = NamedArgs.tryFind(args, QUALIFIER).orElse(QUALIFIER_DEFAULT); Preconditions.checkArgument(qualifier.length() == 1, "Invalid qualifier."); return CsvSchema.builder().setColumnSeparator(separator.charAt(0)).setQuoteChar(qualifier.charAt(0)); }
From source file:eu.scape_project.cdx_creator.CDXCreationTask.java
public void createIndex() { FileInputStream fileInputStream = null; ArchiveReader reader = null;/*from w w w .ja v a2s . com*/ FileOutputStream outputStream = null; try { fileInputStream = new FileInputStream(archiveFile); reader = ArchiveReaderFactory.getReader(fileInputStream, this.archiveFileName); reader.setComputePayloadDigest(config.isCreatePayloadDigest()); List<CdxArchiveRecord> cdxArchRecords = new ArrayList<CdxArchiveRecord>(); while (reader.hasNext()) { ArchiveRecord archRec = (ArchiveRecord) reader.next(); CdxArchiveRecord cdxArchRec = CdxArchiveRecord.fromArchiveRecord(archRec); cdxArchRec.setContainerFileName(archiveFileName); cdxArchRec.setContainerLengthStr(Long.toString(archiveFile.length())); cdxArchRecords.add(cdxArchRec); } CsvMapper mapper = new CsvMapper(); mapper.setDateFormat(GMTGTechDateFormat); String cdxfileCsColumns = config.getCdxfileCsColumns(); List<String> cdxfileCsColumnsList = Arrays.asList(cdxfileCsColumns.split("\\s*,\\s*")); String[] cdxfileCsColumnsArray = cdxfileCsColumnsList.toArray(new String[cdxfileCsColumnsList.size()]); CsvSchema.Builder builder = CsvSchema.builder(); for (String cdxField : cdxfileCsColumnsList) { builder.addColumn(cdxField); } builder.setColumnSeparator(' '); CsvSchema schema = builder.build(); schema = schema.withoutQuoteChar(); SimpleFilterProvider filterProvider = new SimpleFilterProvider().addFilter("cdxfields", FilterExceptFilter.filterOutAllExcept(cdxfileCsColumnsArray)); ObjectWriter cdxArchRecordsWriter = mapper.writer(filterProvider).withSchema(schema); PrintStream pout = null; String outputPathStr = config.getOutputStr(); if (outputPathStr != null) { FileOutputStream fos; try { fos = new FileOutputStream(outputPathStr, true); pout = new PrintStream(fos); System.setOut(pout); } catch (FileNotFoundException ex) { LOG.error("File not found error", ex); } } System.out.println(" " + config.getCdxfileCsHeader()); cdxArchRecordsWriter.writeValue(System.out, cdxArchRecords); if (pout != null) { pout.close(); } } catch (FileNotFoundException ex) { LOG.error("File not found error", ex); } catch (IOException ex) { LOG.error("I/O Error", ex); } finally { try { if (fileInputStream != null) { fileInputStream.close(); } if (outputStream != null) { outputStream.close(); } } catch (IOException ex) { LOG.error("I/O Error", ex); } } }
From source file:datadidit.helpful.hints.processors.csv.converter.ConvertCSVToJSON.java
@OnScheduled public void onScheduled(final ProcessContext context) throws ConfigurationException { //Retrieve properties from context Boolean header = context.getProperty(HEADER).asBoolean(); String fieldNames = context.getProperty(FIELD_NAMES).getValue(); /*//ww w .j a v a 2s .c om * Create Schema based on properties from user. */ if (!header && fieldNames != null) { Builder build = CsvSchema.builder(); for (String field : fieldNames.split(",")) { build.addColumn(field, CsvSchema.ColumnType.NUMBER_OR_STRING); } schema = build.build(); } else if (header && fieldNames != null && !fieldNames.equals("")) { schema = this.buildCsvSchema(fieldNames, header); } else if (!header && fieldNames == null) { throw new ConfigurationException("File must either contain headers or you must provide them.."); } else { schema = CsvSchema.emptySchema().withHeader(); } }
From source file:hrytsenko.csv.IO.java
static CsvSchema.Builder getSchema(Map<String, ?> args) { CharSequence separator = (CharSequence) args.get("separator"); if (separator == null) { separator = ","; }//w w w.ja v a2 s .c o m if (separator.length() != 1) { throw new IllegalArgumentException("Use single character as separator."); } CharSequence qualifier = (CharSequence) args.get("qualifier"); if (qualifier == null) { qualifier = "\""; } if (qualifier.length() != 1) { throw new IllegalArgumentException("Use single character as qualifier."); } CsvSchema.Builder schema = CsvSchema.builder(); schema.setColumnSeparator(separator.charAt(0)); schema.setQuoteChar(qualifier.charAt(0)); return schema; }
From source file:org.jberet.support.io.JacksonCsvItemReaderWriterBase.java
protected CsvSchema buildCsvSchema(CsvSchema schema) throws Exception { if (schema == null) { columns = columns.trim();// w w w.j a va 2 s.c o m if (columns.indexOf(',') < 0 && columns.indexOf(' ') < 0) { //no comma and no space, assume it's java class name for schema schema = csvMapper.schemaFor(getClass().getClassLoader().loadClass(columns)); } else { //manually build CsvSchema final String[] cols = columns.split(","); final CsvSchema.Builder builder = new CsvSchema.Builder(); for (String e : cols) { e = e.trim(); final int lastSpace = e.lastIndexOf(' '); if (lastSpace > 0) { final String e1 = e.substring(0, lastSpace).trim(); final String e2 = e.substring(lastSpace + 1); builder.addColumn(e1, CsvSchema.ColumnType.valueOf(e2)); } else { builder.addColumn(e); } } schema = builder.build(); } } schema = useHeader ? schema.withHeader() : schema.withoutHeader(); if (columnSeparator != null) { schema = schema.withColumnSeparator(columnSeparator.charAt(0)); } if (quoteChar != null) { schema = schema.withQuoteChar(quoteChar.charAt(0)); } if (nullValue != null) { schema = schema.withNullValue(nullValue); } //to allow comments like "# this is comments". //comments can be enabled or disabled with com.fasterxml.jackson.core.JsonParser.Feature.ALLOW_YAML_COMMENTS //which corresponds to batch property jsonParserFeatures return schema.withComments(); }