List of usage examples for com.fasterxml.jackson.databind ObjectMapper registerModule
public ObjectMapper registerModule(Module module)
From source file:com.rcv.ResultsWriter.java
private void generateSummaryJson(Map<Integer, Map<String, BigDecimal>> roundTallies, String precinct, String outputPath) throws IOException { // mapper converts java objects to json ObjectMapper mapper = new ObjectMapper(); // set mapper to order keys alphabetically for more legible output mapper.configure(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS, true); // create a module to contain a serializer for BigDecimal serialization SimpleModule module = new SimpleModule(); module.addSerializer(BigDecimal.class, new ToStringSerializer()); // attach serializer to mapper mapper.registerModule(module); // jsonWriter writes those object to disk ObjectWriter jsonWriter = mapper.writer(new DefaultPrettyPrinter()); // jsonPath for output json summary String jsonPath = outputPath + ".json"; // log output location Logger.log(Level.INFO, "Generating summary JSON file: %s...", jsonPath); // outFile is the target file File outFile = new File(jsonPath); // root outputJson dict will have two entries: // results - vote totals, transfers, and candidates elected / eliminated // config - global config into HashMap<String, Object> outputJson = new HashMap<>(); // config will contain contest configuration info HashMap<String, Object> configData = new HashMap<>(); // add config header info configData.put("contest", config.getContestName()); configData.put("jurisdiction", config.getContestJurisdiction()); configData.put("office", config.getContestOffice()); configData.put("date", config.getContestDate()); configData.put("threshold", winningThreshold); if (precinct != null && !precinct.isEmpty()) { configData.put("precinct", precinct); }//from w w w. ja v a 2s. c o m // results will be a list of round data objects ArrayList<Object> results = new ArrayList<>(); // for each round create objects for json serialization for (int round = 1; round <= numRounds; round++) { // container for all json data this round: HashMap<String, Object> roundData = new HashMap<>(); // add round number (this is implied by the ordering but for debugging we are explicit) roundData.put("round", round); // add actions if this is not a precinct summary if (precinct == null || precinct.isEmpty()) { // actions is a list of one or more action objects ArrayList<Object> actions = new ArrayList<>(); addActionObjects("elected", roundToWinningCandidates.get(round), round, actions); // add any elimination actions addActionObjects("eliminated", roundToEliminatedCandidates.get(round), round, actions); // add action objects roundData.put("tallyResults", actions); } // add tally object roundData.put("tally", updateCandidateNamesInTally(roundTallies.get(round))); // add roundData to results list results.add(roundData); } // add config data to root object outputJson.put("config", configData); // add results to root object outputJson.put("results", results); // write results to disk try { jsonWriter.writeValue(outFile, outputJson); } catch (IOException exception) { Logger.log(Level.SEVERE, "Error writing to JSON file: %s\n%s", jsonPath, exception.toString()); throw exception; } }
From source file:org.versly.rest.wsdoc.AnnotationProcessor.java
String jsonSchemaFromTypeMirror(TypeMirror type) { String serializedSchema = null; if (type.getKind().isPrimitive() || type.getKind() == TypeKind.VOID) { return null; }//ww w . j a v a2s. c om // we need the dto class to generate schema using jackson json-schema module // note: Types.erasure() provides canonical names whereas Class.forName() wants a "regular" name, // so forName will fail for nested and inner classes as "regular" names use $ between parent and child. Class dtoClass = null; StringBuffer erasure = new StringBuffer(_typeUtils.erasure(type).toString()); for (boolean done = false; !done;) { try { dtoClass = Class.forName(erasure.toString()); done = true; } catch (ClassNotFoundException e) { if (erasure.lastIndexOf(".") != -1) { erasure.setCharAt(erasure.lastIndexOf("."), '$'); } else { done = true; } } } // if we were able to figure out the dto class, use jackson json-schema module to serialize it Exception e = null; if (dtoClass != null) { try { ObjectMapper m = new ObjectMapper(); m.enable(SerializationFeature.WRITE_ENUMS_USING_TO_STRING); m.registerModule(new JodaModule()); SchemaFactoryWrapper visitor = new SchemaFactoryWrapper(); m.acceptJsonFormatVisitor(m.constructType(dtoClass), visitor); serializedSchema = m.writeValueAsString(visitor.finalSchema()); } catch (Exception ex) { e = ex; } } // report warning if we were not able to generate schema for non-primitive type if (serializedSchema == null) { this.processingEnv.getMessager().printMessage(Diagnostic.Kind.WARNING, "cannot generate json-schema for class " + type.toString() + " (erasure " + erasure + "), " + ((e != null) ? ("exception: " + e.getMessage()) : "class not found")); } return serializedSchema; }
From source file:org.springframework.http.converter.json.Jackson2ObjectMapperBuilder.java
@SuppressWarnings("unchecked") private void registerWellKnownModulesIfAvailable(ObjectMapper objectMapper) { try {// w ww . ja va 2 s .c o m Class<? extends Module> jdk8Module = (Class<? extends Module>) ClassUtils .forName("com.fasterxml.jackson.datatype.jdk8.Jdk8Module", this.moduleClassLoader); objectMapper.registerModule(BeanUtils.instantiateClass(jdk8Module)); } catch (ClassNotFoundException ex) { // jackson-datatype-jdk8 not available } try { Class<? extends Module> javaTimeModule = (Class<? extends Module>) ClassUtils .forName("com.fasterxml.jackson.datatype.jsr310.JavaTimeModule", this.moduleClassLoader); objectMapper.registerModule(BeanUtils.instantiateClass(javaTimeModule)); } catch (ClassNotFoundException ex) { // jackson-datatype-jsr310 not available } // Joda-Time present? if (ClassUtils.isPresent("org.joda.time.LocalDate", this.moduleClassLoader)) { try { Class<? extends Module> jodaModule = (Class<? extends Module>) ClassUtils .forName("com.fasterxml.jackson.datatype.joda.JodaModule", this.moduleClassLoader); objectMapper.registerModule(BeanUtils.instantiateClass(jodaModule)); } catch (ClassNotFoundException ex) { // jackson-datatype-joda not available } } // Kotlin present? if (KotlinDetector.isKotlinPresent()) { try { Class<? extends Module> kotlinModule = (Class<? extends Module>) ClassUtils .forName("com.fasterxml.jackson.module.kotlin.KotlinModule", this.moduleClassLoader); objectMapper.registerModule(BeanUtils.instantiateClass(kotlinModule)); } catch (ClassNotFoundException ex) { logger.warn("For Jackson Kotlin classes support please add " + "\"com.fasterxml.jackson.module:jackson-module-kotlin\" to the classpath"); } } }
From source file:com.nesscomputing.jackson.NessObjectMapperProvider.java
@Override public ObjectMapper get() { final ObjectMapper mapper = new ObjectMapper(jsonFactory); // Set the features for (Map.Entry<Enum<?>, Boolean> entry : featureMap.entrySet()) { final Enum<?> key = entry.getKey(); if (key instanceof JsonGenerator.Feature) { mapper.configure(((JsonGenerator.Feature) key), entry.getValue()); } else if (key instanceof JsonParser.Feature) { mapper.configure(((JsonParser.Feature) key), entry.getValue()); } else if (key instanceof SerializationFeature) { mapper.configure(((SerializationFeature) key), entry.getValue()); } else if (key instanceof DeserializationFeature) { mapper.configure(((DeserializationFeature) key), entry.getValue()); } else if (key instanceof MapperFeature) { mapper.configure(((MapperFeature) key), entry.getValue()); } else {//w w w . jav a2s. c o m throw new IllegalArgumentException("Can not configure ObjectMapper with " + key.name()); } } for (Module module : modules) { mapper.registerModule(module); } // by default, don't serialize null values. mapper.setSerializationInclusion(Include.NON_NULL); return mapper; }
From source file:com.thinkbiganalytics.metadata.rest.client.MetadataClient.java
private ObjectMapper createObjectMapper() { ObjectMapper mapper = new ObjectMapper(); mapper.registerModule(new JodaModule()); // TODO Module dependency is causing a conflict somehow. // mapper.registerModule(new JavaTimeModule()); mapper.setSerializationInclusion(Include.NON_NULL); return mapper; }
From source file:org.springframework.data.rest.webmvc.config.RepositoryRestMvcConfiguration.java
protected ObjectMapper basicObjectMapper() { ObjectMapper objectMapper = new ObjectMapper(); objectMapper.configure(SerializationFeature.INDENT_OUTPUT, true); objectMapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false); objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); objectMapper.registerModule(geoModule); if (config().isEnableEnumTranslation()) { objectMapper.registerModule(new JacksonSerializers(enumTranslator())); }/*from w w w.ja v a 2s . c o m*/ Jackson2DatatypeHelper.configureObjectMapper(objectMapper); // Configure custom Modules configurerDelegate.configureJacksonObjectMapper(objectMapper); configureJacksonObjectMapper(objectMapper); return objectMapper; }
From source file:com.netflix.bdp.inviso.history.TraceService.java
/** * Returns a json object representing the job history. * * @param jobId// w ww . j a va 2 s . c om * @param path Use the given path as opposed to the history locator * @param summary Return just the top level details of the job * @param counters Include counters * @return Json string * @throws Exception */ @Path("load/{jobId}") @GET @Produces("application/json") public String trace(@PathParam("jobId") final String jobId, @QueryParam("path") final String path, @QueryParam("summary") boolean summary, @QueryParam("counters") @DefaultValue("true") boolean counters) throws Exception { Pair<org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path> historyPath; if (path != null) { historyPath = new ImmutablePair<>(null, new org.apache.hadoop.fs.Path(path)); } else { historyPath = historyLocator.locate(jobId); } if (historyPath == null) { throw new WebApplicationException(404); } TraceJobHistoryLoader loader = new TraceJobHistoryLoader(properties); FileSystem fs = FileSystem.get(historyPath.getRight().toUri(), config); CompressionCodec codec = new CompressionCodecFactory(config).getCodec(historyPath.getRight()); FSDataInputStream fin = fs.open(historyPath.getRight()); if (codec != null) { fin = new FSDataInputStream(new WrappedCompressionInputStream(codec.createInputStream(fin))); } JobHistoryParser parser = new JobHistoryParser(fin); parser.parse(loader); String[] ignore = { "counters" }; ObjectMapper mapper = new ObjectMapper(); SimpleModule module = new SimpleModule("MyModule", new Version(1, 0, 0, null)); //Job JavaType jobMapType = MapLikeType.construct(Job.class, SimpleType.construct(String.class), SimpleType.construct(Object.class)); module.addSerializer(Job.class, MapSerializer.construct(ignore, jobMapType, false, null, null, null, null)); //Task JavaType taskMapType = MapLikeType.construct(Task.class, SimpleType.construct(String.class), SimpleType.construct(Object.class)); module.addSerializer(Task.class, MapSerializer.construct(ignore, taskMapType, false, null, null, null, null)); //Attempt JavaType attemptMapType = MapLikeType.construct(TaskAttempt.class, SimpleType.construct(String.class), SimpleType.construct(Object.class)); module.addSerializer(TaskAttempt.class, MapSerializer.construct(ignore, attemptMapType, false, null, null, null, null)); if (!counters) { mapper.registerModule(module); } if (summary) { loader.getJob().clearTasks(); } return mapper.writeValueAsString(loader.getJob()); }
From source file:com.evolveum.midpoint.prism.lex.json.AbstractJsonLexicalProcessor.java
private JsonParser configureParser(JsonParser parser) { ObjectMapper mapper = new ObjectMapper(); SimpleModule sm = new SimpleModule(); sm.addDeserializer(QName.class, new QNameDeserializer()); sm.addDeserializer(ItemPath.class, new ItemPathDeserializer()); sm.addDeserializer(PolyString.class, new PolyStringDeserializer()); sm.addDeserializer(ItemPathType.class, new ItemPathTypeDeserializer()); mapper.registerModule(sm); parser.setCodec(mapper);//w w w . ja v a2 s .c o m return parser; }
From source file:com.logsniffer.app.CoreAppConfig.java
@Bean public ObjectMapper jsonObjectMapper() { final ObjectMapper jsonMapper = new ObjectMapper(); jsonMapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); jsonMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); jsonMapper.configure(Feature.ALLOW_UNQUOTED_FIELD_NAMES, true); jsonMapper.configure(Feature.ALLOW_SINGLE_QUOTES, true); jsonMapper.configure(MapperFeature.DEFAULT_VIEW_INCLUSION, false); final SimpleModule module = new SimpleModule("FieldsMapping", Version.unknownVersion()); module.setSerializerModifier(new BeanSerializerModifier() { @Override/*from ww w . j ava 2 s . c o m*/ public JsonSerializer<?> modifyMapSerializer(final SerializationConfig config, final MapType valueType, final BeanDescription beanDesc, final JsonSerializer<?> serializer) { if (FieldsMap.class.isAssignableFrom(valueType.getRawClass())) { return new FieldsMapMixInLikeSerializer(); } else { return super.modifyMapSerializer(config, valueType, beanDesc, serializer); } } }); jsonMapper.registerModule(module); return jsonMapper; }
From source file:org.apache.drill.exec.store.parquet.metadata.Metadata.java
/** * Read the parquet metadata from a file * * @param path to metadata file//from ww w .j a v a2 s . c om * @param dirsOnly true for {@link Metadata#METADATA_DIRECTORIES_FILENAME} * or false for {@link Metadata#METADATA_FILENAME} files reading * @param metaContext current metadata context */ private void readBlockMeta(Path path, boolean dirsOnly, MetadataContext metaContext, FileSystem fs) { Stopwatch timer = logger.isDebugEnabled() ? Stopwatch.createStarted() : null; Path metadataParentDir = Path.getPathWithoutSchemeAndAuthority(path.getParent()); String metadataParentDirPath = metadataParentDir.toUri().getPath(); ObjectMapper mapper = new ObjectMapper(); final SimpleModule serialModule = new SimpleModule(); serialModule.addDeserializer(SchemaPath.class, new SchemaPath.De()); serialModule.addKeyDeserializer(Metadata_V2.ColumnTypeMetadata_v2.Key.class, new Metadata_V2.ColumnTypeMetadata_v2.Key.DeSerializer()); serialModule.addKeyDeserializer(ColumnTypeMetadata_v3.Key.class, new ColumnTypeMetadata_v3.Key.DeSerializer()); AfterburnerModule module = new AfterburnerModule(); module.setUseOptimizedBeanDeserializer(true); mapper.registerModule(serialModule); mapper.registerModule(module); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); try (FSDataInputStream is = fs.open(path)) { boolean alreadyCheckedModification; boolean newMetadata = false; alreadyCheckedModification = metaContext.getStatus(metadataParentDirPath); if (dirsOnly) { parquetTableMetadataDirs = mapper.readValue(is, ParquetTableMetadataDirs.class); if (timer != null) { logger.debug("Took {} ms to read directories from directory cache file", timer.elapsed(TimeUnit.MILLISECONDS)); timer.stop(); } parquetTableMetadataDirs.updateRelativePaths(metadataParentDirPath); if (!alreadyCheckedModification && tableModified(parquetTableMetadataDirs.getDirectories(), path, metadataParentDir, metaContext, fs)) { parquetTableMetadataDirs = (createMetaFilesRecursively( Path.getPathWithoutSchemeAndAuthority(path.getParent()).toString(), fs)).getRight(); newMetadata = true; } } else { parquetTableMetadata = mapper.readValue(is, ParquetTableMetadataBase.class); if (timer != null) { logger.debug("Took {} ms to read metadata from cache file", timer.elapsed(TimeUnit.MILLISECONDS)); timer.stop(); } if (new MetadataVersion(parquetTableMetadata.getMetadataVersion()) .compareTo(new MetadataVersion(3, 0)) >= 0) { ((ParquetTableMetadata_v3) parquetTableMetadata).updateRelativePaths(metadataParentDirPath); } if (!alreadyCheckedModification && tableModified(parquetTableMetadata.getDirectories(), path, metadataParentDir, metaContext, fs)) { parquetTableMetadata = (createMetaFilesRecursively( Path.getPathWithoutSchemeAndAuthority(path.getParent()).toString(), fs)).getLeft(); newMetadata = true; } // DRILL-5009: Remove the RowGroup if it is empty List<? extends ParquetFileMetadata> files = parquetTableMetadata.getFiles(); for (ParquetFileMetadata file : files) { List<? extends RowGroupMetadata> rowGroups = file.getRowGroups(); for (Iterator<? extends RowGroupMetadata> iter = rowGroups.iterator(); iter.hasNext();) { RowGroupMetadata r = iter.next(); if (r.getRowCount() == 0) { iter.remove(); } } } } if (newMetadata) { // if new metadata files were created, invalidate the existing metadata context metaContext.clear(); } } catch (IOException e) { logger.error("Failed to read '{}' metadata file", path, e); metaContext.setMetadataCacheCorrupted(true); } }