List of usage examples for com.fasterxml.jackson.databind.module SimpleModule addKeyDeserializer
public SimpleModule addKeyDeserializer(Class<?> type, KeyDeserializer deser)
From source file:org.gytheio.messaging.jackson.ObjectMapperFactory.java
public static ObjectMapper createInstance() { QpidJsonBodyCleanerObjectMapper mapper = new QpidJsonBodyCleanerObjectMapper(); mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL, JsonTypeInfo.As.PROPERTY); SimpleModule module = new SimpleModule("GytheioJackson", new Version(0, 1, 0, "SNAPSHOT", "org.gytheio", "gytheio-messaging-commons")); module.addKeyDeserializer(Class.class, new JsonClassKeyDeserializer()); mapper.registerModule(module);//from w w w.j av a 2s. c om return mapper; }
From source file:org.nohope.jongo.JacksonProcessor.java
@Nonnull private static ObjectMapper createPreConfiguredMapper() { final ObjectMapper mapper = new ObjectMapper(); mapper.registerModule(new JodaModule()); mapper.registerModule(new ColorModule()); mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.configure(AUTO_DETECT_GETTERS, false); mapper.configure(AUTO_DETECT_SETTERS, false); mapper.setSerializationInclusion(NON_NULL); mapper.setVisibilityChecker(VisibilityChecker.Std.defaultInstance().withFieldVisibility(ANY)); mapper.enableDefaultTypingAsProperty(ObjectMapper.DefaultTyping.NON_FINAL, JsonTypeInfo.Id.CLASS.getDefaultPropertyName()); final SimpleModule module = new SimpleModule("jongo", Version.unknownVersion()); module.addKeySerializer(Object.class, ComplexKeySerializer.S_OBJECT); module.addKeyDeserializer(String.class, ComplexKeyDeserializer.S_OBJECT); module.addKeyDeserializer(Object.class, ComplexKeyDeserializer.S_OBJECT); //addBSONTypeSerializers(module); mapper.registerModule(module);/* w w w. j av a 2 s . co m*/ return mapper; }
From source file:it.polimi.diceH2020.SPACE4Cloud.shared.Test2.java
@Test public void test1() { InstanceData_old data = InstanceDataGenerator_old.build(); System.out.println(data.toString()); try {//from w ww .ja v a 2s . co m ObjectMapper mapper = new ObjectMapper().registerModule(new Jdk8Module()); SimpleModule module = new SimpleModule(); module.addKeyDeserializer(TypeVMJobClassKey.class, TypeVMJobClassKey.getDeserializer()); mapper.registerModule(module); String serialized = mapper.writeValueAsString(data); System.out.println(serialized); InstanceData_old data2 = mapper.readValue(serialized, InstanceData_old.class); System.out.println(data2.toString()); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } assertTrue(data.getGamma() == 240); }
From source file:io.airlift.json.ObjectMapperProvider.java
@Override public ObjectMapper get() { ObjectMapper objectMapper = new ObjectMapper(); // ignore unknown fields (for backwards compatibility) objectMapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES); // use ISO dates objectMapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS); // skip fields that are null instead of writing an explicit json null value objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); // disable auto detection of json properties... all properties must be explicit objectMapper.disable(MapperFeature.AUTO_DETECT_CREATORS); objectMapper.disable(MapperFeature.AUTO_DETECT_FIELDS); objectMapper.disable(MapperFeature.AUTO_DETECT_SETTERS); objectMapper.disable(MapperFeature.AUTO_DETECT_GETTERS); objectMapper.disable(MapperFeature.AUTO_DETECT_IS_GETTERS); objectMapper.disable(MapperFeature.USE_GETTERS_AS_SETTERS); objectMapper.disable(MapperFeature.CAN_OVERRIDE_ACCESS_MODIFIERS); if (jsonSerializers != null || jsonDeserializers != null || keySerializers != null || keyDeserializers != null) { SimpleModule module = new SimpleModule(getClass().getName(), new Version(1, 0, 0, null)); if (jsonSerializers != null) { for (Entry<Class<?>, JsonSerializer<?>> entry : jsonSerializers.entrySet()) { addSerializer(module, entry.getKey(), entry.getValue()); }//from w ww .j ava2 s . com } if (jsonDeserializers != null) { for (Entry<Class<?>, JsonDeserializer<?>> entry : jsonDeserializers.entrySet()) { addDeserializer(module, entry.getKey(), entry.getValue()); } } if (keySerializers != null) { for (Entry<Class<?>, JsonSerializer<?>> entry : keySerializers.entrySet()) { addKeySerializer(module, entry.getKey(), entry.getValue()); } } if (keyDeserializers != null) { for (Entry<Class<?>, KeyDeserializer> entry : keyDeserializers.entrySet()) { module.addKeyDeserializer(entry.getKey(), entry.getValue()); } } modules.add(module); } for (Module module : modules) { objectMapper.registerModule(module); } return objectMapper; }
From source file:com.proofpoint.json.ObjectMapperProvider.java
@Override public ObjectMapper get() { ObjectMapper objectMapper = new ObjectMapper(); // ignore unknown fields (for backwards compatibility) objectMapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES); // use ISO dates objectMapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS); // skip fields that are null instead of writing an explicit json null value objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); // disable auto detection of json properties... all properties must be explicit objectMapper.disable(MapperFeature.AUTO_DETECT_CREATORS); objectMapper.disable(MapperFeature.AUTO_DETECT_FIELDS); objectMapper.disable(MapperFeature.AUTO_DETECT_SETTERS); objectMapper.disable(MapperFeature.AUTO_DETECT_GETTERS); objectMapper.disable(MapperFeature.AUTO_DETECT_IS_GETTERS); objectMapper.disable(MapperFeature.USE_GETTERS_AS_SETTERS); objectMapper.disable(MapperFeature.INFER_PROPERTY_MUTATORS); objectMapper.disable(MapperFeature.ALLOW_FINAL_FIELDS_AS_MUTATORS); if (jsonSerializers != null || jsonDeserializers != null || keySerializers != null || keyDeserializers != null) { SimpleModule module = new SimpleModule(getClass().getName(), new Version(1, 0, 0, null, null, null)); if (jsonSerializers != null) { for (Entry<Class<?>, JsonSerializer<?>> entry : jsonSerializers.entrySet()) { addSerializer(module, entry.getKey(), entry.getValue()); }//from w w w .j a va 2 s . c o m } if (jsonDeserializers != null) { for (Entry<Class<?>, JsonDeserializer<?>> entry : jsonDeserializers.entrySet()) { addDeserializer(module, entry.getKey(), entry.getValue()); } } if (keySerializers != null) { for (Entry<Class<?>, JsonSerializer<?>> entry : keySerializers.entrySet()) { addKeySerializer(module, entry.getKey(), entry.getValue()); } } if (keyDeserializers != null) { for (Entry<Class<?>, KeyDeserializer> entry : keyDeserializers.entrySet()) { module.addKeyDeserializer(entry.getKey(), entry.getValue()); } } modules.add(module); } for (Module module : modules) { objectMapper.registerModule(module); } return objectMapper; }
From source file:org.apache.drill.exec.store.parquet.metadata.Metadata.java
/** * Read the parquet metadata from a file * * @param path to metadata file// ww w .j a va 2 s . co m * @param dirsOnly true for {@link Metadata#METADATA_DIRECTORIES_FILENAME} * or false for {@link Metadata#METADATA_FILENAME} files reading * @param metaContext current metadata context */ private void readBlockMeta(Path path, boolean dirsOnly, MetadataContext metaContext, FileSystem fs) { Stopwatch timer = logger.isDebugEnabled() ? Stopwatch.createStarted() : null; Path metadataParentDir = Path.getPathWithoutSchemeAndAuthority(path.getParent()); String metadataParentDirPath = metadataParentDir.toUri().getPath(); ObjectMapper mapper = new ObjectMapper(); final SimpleModule serialModule = new SimpleModule(); serialModule.addDeserializer(SchemaPath.class, new SchemaPath.De()); serialModule.addKeyDeserializer(Metadata_V2.ColumnTypeMetadata_v2.Key.class, new Metadata_V2.ColumnTypeMetadata_v2.Key.DeSerializer()); serialModule.addKeyDeserializer(ColumnTypeMetadata_v3.Key.class, new ColumnTypeMetadata_v3.Key.DeSerializer()); AfterburnerModule module = new AfterburnerModule(); module.setUseOptimizedBeanDeserializer(true); mapper.registerModule(serialModule); mapper.registerModule(module); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); try (FSDataInputStream is = fs.open(path)) { boolean alreadyCheckedModification; boolean newMetadata = false; alreadyCheckedModification = metaContext.getStatus(metadataParentDirPath); if (dirsOnly) { parquetTableMetadataDirs = mapper.readValue(is, ParquetTableMetadataDirs.class); if (timer != null) { logger.debug("Took {} ms to read directories from directory cache file", timer.elapsed(TimeUnit.MILLISECONDS)); timer.stop(); } parquetTableMetadataDirs.updateRelativePaths(metadataParentDirPath); if (!alreadyCheckedModification && tableModified(parquetTableMetadataDirs.getDirectories(), path, metadataParentDir, metaContext, fs)) { parquetTableMetadataDirs = (createMetaFilesRecursively( Path.getPathWithoutSchemeAndAuthority(path.getParent()).toString(), fs)).getRight(); newMetadata = true; } } else { parquetTableMetadata = mapper.readValue(is, ParquetTableMetadataBase.class); if (timer != null) { logger.debug("Took {} ms to read metadata from cache file", timer.elapsed(TimeUnit.MILLISECONDS)); timer.stop(); } if (new MetadataVersion(parquetTableMetadata.getMetadataVersion()) .compareTo(new MetadataVersion(3, 0)) >= 0) { ((ParquetTableMetadata_v3) parquetTableMetadata).updateRelativePaths(metadataParentDirPath); } if (!alreadyCheckedModification && tableModified(parquetTableMetadata.getDirectories(), path, metadataParentDir, metaContext, fs)) { parquetTableMetadata = (createMetaFilesRecursively( Path.getPathWithoutSchemeAndAuthority(path.getParent()).toString(), fs)).getLeft(); newMetadata = true; } // DRILL-5009: Remove the RowGroup if it is empty List<? extends ParquetFileMetadata> files = parquetTableMetadata.getFiles(); for (ParquetFileMetadata file : files) { List<? extends RowGroupMetadata> rowGroups = file.getRowGroups(); for (Iterator<? extends RowGroupMetadata> iter = rowGroups.iterator(); iter.hasNext();) { RowGroupMetadata r = iter.next(); if (r.getRowCount() == 0) { iter.remove(); } } } } if (newMetadata) { // if new metadata files were created, invalidate the existing metadata context metaContext.clear(); } } catch (IOException e) { logger.error("Failed to read '{}' metadata file", path, e); metaContext.setMetadataCacheCorrupted(true); } }
From source file:org.apache.drill.exec.store.parquet.Metadata.java
/** * Read the parquet metadata from a file * * @param path/*from w w w . j av a 2s.c o m*/ * @return * @throws IOException */ private void readBlockMeta(String path, boolean dirsOnly, MetadataContext metaContext) throws IOException { Stopwatch timer = Stopwatch.createStarted(); Path p = new Path(path); Path parentDir = p.getParent(); // parent directory of the metadata file ObjectMapper mapper = new ObjectMapper(); final SimpleModule serialModule = new SimpleModule(); serialModule.addDeserializer(SchemaPath.class, new SchemaPath.De()); serialModule.addKeyDeserializer(ColumnTypeMetadata_v2.Key.class, new ColumnTypeMetadata_v2.Key.DeSerializer()); serialModule.addKeyDeserializer(ColumnTypeMetadata_v3.Key.class, new ColumnTypeMetadata_v3.Key.DeSerializer()); AfterburnerModule module = new AfterburnerModule(); module.setUseOptimizedBeanDeserializer(true); mapper.registerModule(serialModule); mapper.registerModule(module); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); FSDataInputStream is = fs.open(p); boolean alreadyCheckedModification = false; boolean newMetadata = false; if (metaContext != null) { alreadyCheckedModification = metaContext.getStatus(parentDir.toString()); } if (dirsOnly) { parquetTableMetadataDirs = mapper.readValue(is, ParquetTableMetadataDirs.class); logger.info("Took {} ms to read directories from directory cache file", timer.elapsed(TimeUnit.MILLISECONDS)); timer.stop(); if (!alreadyCheckedModification && tableModified(parquetTableMetadataDirs.getDirectories(), p, parentDir, metaContext)) { parquetTableMetadataDirs = (createMetaFilesRecursively( Path.getPathWithoutSchemeAndAuthority(p.getParent()).toString())).getRight(); newMetadata = true; } } else { parquetTableMetadata = mapper.readValue(is, ParquetTableMetadataBase.class); logger.info("Took {} ms to read metadata from cache file", timer.elapsed(TimeUnit.MILLISECONDS)); timer.stop(); if (!alreadyCheckedModification && tableModified(parquetTableMetadata.getDirectories(), p, parentDir, metaContext)) { parquetTableMetadata = (createMetaFilesRecursively( Path.getPathWithoutSchemeAndAuthority(p.getParent()).toString())).getLeft(); newMetadata = true; } // DRILL-5009: Remove the RowGroup if it is empty List<? extends ParquetFileMetadata> files = parquetTableMetadata.getFiles(); for (ParquetFileMetadata file : files) { List<? extends RowGroupMetadata> rowGroups = file.getRowGroups(); for (Iterator<? extends RowGroupMetadata> iter = rowGroups.iterator(); iter.hasNext();) { RowGroupMetadata r = iter.next(); if (r.getRowCount() == 0) { iter.remove(); } } } } if (newMetadata && metaContext != null) { // if new metadata files were created, invalidate the existing metadata context metaContext.clear(); } }