Example usage for com.fasterxml.jackson.databind ObjectMapper registerModule

List of usage examples for com.fasterxml.jackson.databind ObjectMapper registerModule

Introduction

In this page you can find the example usage for com.fasterxml.jackson.databind ObjectMapper registerModule.

Prototype

public ObjectMapper registerModule(Module module) 

Source Link

Document

Method for registering a module that can extend functionality provided by this mapper; for example, by adding providers for custom serializers and deserializers.

Usage

From source file:org.hawkular.alerts.api.JsonTest.java

@Test
public void jsonToAlertTest() throws Exception {
    String jsonAlert = "{\"tenantId\":\"jdoe\"," + "\"id\":\"trigger-test|1436964192878\","
            + "\"eventType\":\"ALERT\"," + "\"trigger\":{\"tenantId\":\"jdoe\"," + "\"id\":\"trigger-test\","
            + "\"name\":\"trigger-test\"," + "\"description\":\"trigger-test\","
            + "\"context\":{\"n1\":\"v1\",\"n2\":\"v2\"}" + "}," + "\"ctime\":1436964192878,"
            + "\"context\":{\"n1\":\"v1\",\"n2\":\"v2\"}," + "\"text\":\"trigger-test\"," + "\"evalSets\":["
            + "[{\"evalTimestamp\":1436964294055," + "\"dataTimestamp\":2," + "\"type\":\"THRESHOLD\","
            + "\"condition\":{\"tenantId\":\"jdoe\"," + "\"triggerId\":\"trigger-test\","
            + "\"triggerMode\":\"FIRING\"," + "\"type\":\"THRESHOLD\","
            + "\"conditionId\":\"my-organization-trigger-test-FIRING-1-1\"," + "\"dataId\":\"Default\","
            + "\"operator\":\"LTE\"," + "\"threshold\":50.0" + "}," + "\"value\":25.5},"
            + "{\"evalTimestamp\":1436964284965," + "\"dataTimestamp\":1," + "\"type\":\"AVAILABILITY\","
            + "\"condition\":{\"tenantId\":\"jdoe\"," + "\"triggerId\":\"trigger-test\","
            + "\"triggerMode\":\"FIRING\"," + "\"type\":\"AVAILABILITY\","
            + "\"conditionId\":\"my-organization-trigger-test-FIRING-1-1\"," + "\"dataId\":\"Default\","
            + "\"operator\":\"UP\"" + "}," + "\"value\":\"UP\"}]" + "]," + "\"severity\":\"MEDIUM\","
            + "\"status\":\"OPEN\"," + "\"ackTime\":0," + "\"ackBy\":null," + "\"resolvedTime\":0,"
            + "\"resolvedBy\":null," + "\"notes\":[{\"user\":\"user1\",\"ctime\":1,\"text\":\"The comment 1\"},"
            + "{\"user\":\"user2\",\"ctime\":2,\"text\":\"The comment 2\"}" + "],"
            + "\"context\":{\"n1\":\"v1\",\"n2\":\"v2\"}}";

    ObjectMapper mapper = new ObjectMapper();
    Alert alert = mapper.readValue(jsonAlert, Alert.class);
    assertNotNull(alert);/*from   w  w w.  jav  a2s.  c  o  m*/
    assertNotNull(alert.getEvalSets());
    assertEquals(1, alert.getEvalSets().size());
    assertEquals(2, alert.getEvalSets().get(0).size());
    assertTrue(alert.getContext() != null);
    assertTrue(alert.getContext().size() == 2);
    assertTrue(alert.getContext().get("n1").equals("v1"));
    assertTrue(alert.getContext().get("n2").equals("v2"));
    assertEquals("trigger-test", alert.getText());

    /*
    Testing thin deserializer
     */
    SimpleModule simpleModule = new SimpleModule();
    simpleModule.setDeserializerModifier(new JacksonDeserializer.AlertThinDeserializer());
    mapper = new ObjectMapper();
    mapper.registerModule(simpleModule);
    alert = mapper.readValue(jsonAlert, Alert.class);
    assertNull(alert.getEvalSets());
}

From source file:org.apache.drill.exec.store.parquet.Metadata.java

/**
 * Serialize parquet metadata to json and write to a file
 *
 * @param parquetTableMetadata//from ww  w. j  a  v a2 s  .co  m
 * @param p
 * @throws IOException
 */
private void writeFile(ParquetTableMetadata_v3 parquetTableMetadata, Path p) throws IOException {
    JsonFactory jsonFactory = new JsonFactory();
    jsonFactory.configure(Feature.AUTO_CLOSE_TARGET, false);
    jsonFactory.configure(JsonParser.Feature.AUTO_CLOSE_SOURCE, false);
    ObjectMapper mapper = new ObjectMapper(jsonFactory);
    SimpleModule module = new SimpleModule();
    module.addSerializer(ColumnMetadata_v3.class, new ColumnMetadata_v3.Serializer());
    mapper.registerModule(module);
    FSDataOutputStream os = fs.create(p);
    mapper.writerWithDefaultPrettyPrinter().writeValue(os, parquetTableMetadata);
    os.flush();
    os.close();
}

From source file:io.airlift.json.ObjectMapperProvider.java

@Override
public ObjectMapper get() {
    ObjectMapper objectMapper = new ObjectMapper();

    // ignore unknown fields (for backwards compatibility)
    objectMapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES);

    // use ISO dates
    objectMapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS);

    // skip fields that are null instead of writing an explicit json null value
    objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);

    // disable auto detection of json properties... all properties must be explicit
    objectMapper.disable(MapperFeature.AUTO_DETECT_CREATORS);
    objectMapper.disable(MapperFeature.AUTO_DETECT_FIELDS);
    objectMapper.disable(MapperFeature.AUTO_DETECT_SETTERS);
    objectMapper.disable(MapperFeature.AUTO_DETECT_GETTERS);
    objectMapper.disable(MapperFeature.AUTO_DETECT_IS_GETTERS);
    objectMapper.disable(MapperFeature.USE_GETTERS_AS_SETTERS);
    objectMapper.disable(MapperFeature.CAN_OVERRIDE_ACCESS_MODIFIERS);

    if (jsonSerializers != null || jsonDeserializers != null || keySerializers != null
            || keyDeserializers != null) {
        SimpleModule module = new SimpleModule(getClass().getName(), new Version(1, 0, 0, null));
        if (jsonSerializers != null) {
            for (Entry<Class<?>, JsonSerializer<?>> entry : jsonSerializers.entrySet()) {
                addSerializer(module, entry.getKey(), entry.getValue());
            }//from  ww  w .  j  a  va  2  s.c o  m
        }
        if (jsonDeserializers != null) {
            for (Entry<Class<?>, JsonDeserializer<?>> entry : jsonDeserializers.entrySet()) {
                addDeserializer(module, entry.getKey(), entry.getValue());
            }
        }
        if (keySerializers != null) {
            for (Entry<Class<?>, JsonSerializer<?>> entry : keySerializers.entrySet()) {
                addKeySerializer(module, entry.getKey(), entry.getValue());
            }
        }
        if (keyDeserializers != null) {
            for (Entry<Class<?>, KeyDeserializer> entry : keyDeserializers.entrySet()) {
                module.addKeyDeserializer(entry.getKey(), entry.getValue());
            }
        }
        modules.add(module);
    }

    for (Module module : modules) {
        objectMapper.registerModule(module);
    }

    return objectMapper;
}

From source file:org.apache.drill.exec.store.parquet.Metadata.java

/**
 * Read the parquet metadata from a file
 *
 * @param path//from ww w. j a  v a 2s  .co  m
 * @return
 * @throws IOException
 */
private void readBlockMeta(String path, boolean dirsOnly, MetadataContext metaContext) throws IOException {
    Stopwatch timer = Stopwatch.createStarted();
    Path p = new Path(path);
    Path parentDir = p.getParent(); // parent directory of the metadata file
    ObjectMapper mapper = new ObjectMapper();

    final SimpleModule serialModule = new SimpleModule();
    serialModule.addDeserializer(SchemaPath.class, new SchemaPath.De());
    serialModule.addKeyDeserializer(ColumnTypeMetadata_v2.Key.class,
            new ColumnTypeMetadata_v2.Key.DeSerializer());
    serialModule.addKeyDeserializer(ColumnTypeMetadata_v3.Key.class,
            new ColumnTypeMetadata_v3.Key.DeSerializer());

    AfterburnerModule module = new AfterburnerModule();
    module.setUseOptimizedBeanDeserializer(true);

    mapper.registerModule(serialModule);
    mapper.registerModule(module);
    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
    FSDataInputStream is = fs.open(p);

    boolean alreadyCheckedModification = false;
    boolean newMetadata = false;

    if (metaContext != null) {
        alreadyCheckedModification = metaContext.getStatus(parentDir.toString());
    }

    if (dirsOnly) {
        parquetTableMetadataDirs = mapper.readValue(is, ParquetTableMetadataDirs.class);
        logger.info("Took {} ms to read directories from directory cache file",
                timer.elapsed(TimeUnit.MILLISECONDS));
        timer.stop();
        if (!alreadyCheckedModification
                && tableModified(parquetTableMetadataDirs.getDirectories(), p, parentDir, metaContext)) {
            parquetTableMetadataDirs = (createMetaFilesRecursively(
                    Path.getPathWithoutSchemeAndAuthority(p.getParent()).toString())).getRight();
            newMetadata = true;
        }
    } else {
        parquetTableMetadata = mapper.readValue(is, ParquetTableMetadataBase.class);
        logger.info("Took {} ms to read metadata from cache file", timer.elapsed(TimeUnit.MILLISECONDS));
        timer.stop();
        if (!alreadyCheckedModification
                && tableModified(parquetTableMetadata.getDirectories(), p, parentDir, metaContext)) {
            parquetTableMetadata = (createMetaFilesRecursively(
                    Path.getPathWithoutSchemeAndAuthority(p.getParent()).toString())).getLeft();
            newMetadata = true;
        }

        // DRILL-5009: Remove the RowGroup if it is empty
        List<? extends ParquetFileMetadata> files = parquetTableMetadata.getFiles();
        for (ParquetFileMetadata file : files) {
            List<? extends RowGroupMetadata> rowGroups = file.getRowGroups();
            for (Iterator<? extends RowGroupMetadata> iter = rowGroups.iterator(); iter.hasNext();) {
                RowGroupMetadata r = iter.next();
                if (r.getRowCount() == 0) {
                    iter.remove();
                }
            }
        }

    }

    if (newMetadata && metaContext != null) {
        // if new metadata files were created, invalidate the existing metadata context
        metaContext.clear();
    }

}

From source file:com.proofpoint.json.ObjectMapperProvider.java

@Override
public ObjectMapper get() {
    ObjectMapper objectMapper = new ObjectMapper();

    // ignore unknown fields (for backwards compatibility)
    objectMapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES);

    // use ISO dates
    objectMapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS);

    // skip fields that are null instead of writing an explicit json null value
    objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);

    // disable auto detection of json properties... all properties must be explicit
    objectMapper.disable(MapperFeature.AUTO_DETECT_CREATORS);
    objectMapper.disable(MapperFeature.AUTO_DETECT_FIELDS);
    objectMapper.disable(MapperFeature.AUTO_DETECT_SETTERS);
    objectMapper.disable(MapperFeature.AUTO_DETECT_GETTERS);
    objectMapper.disable(MapperFeature.AUTO_DETECT_IS_GETTERS);
    objectMapper.disable(MapperFeature.USE_GETTERS_AS_SETTERS);
    objectMapper.disable(MapperFeature.INFER_PROPERTY_MUTATORS);
    objectMapper.disable(MapperFeature.ALLOW_FINAL_FIELDS_AS_MUTATORS);

    if (jsonSerializers != null || jsonDeserializers != null || keySerializers != null
            || keyDeserializers != null) {
        SimpleModule module = new SimpleModule(getClass().getName(), new Version(1, 0, 0, null, null, null));
        if (jsonSerializers != null) {
            for (Entry<Class<?>, JsonSerializer<?>> entry : jsonSerializers.entrySet()) {
                addSerializer(module, entry.getKey(), entry.getValue());
            }//  w  w w .  j a v a2s  . c o m
        }
        if (jsonDeserializers != null) {
            for (Entry<Class<?>, JsonDeserializer<?>> entry : jsonDeserializers.entrySet()) {
                addDeserializer(module, entry.getKey(), entry.getValue());
            }
        }
        if (keySerializers != null) {
            for (Entry<Class<?>, JsonSerializer<?>> entry : keySerializers.entrySet()) {
                addKeySerializer(module, entry.getKey(), entry.getValue());
            }
        }
        if (keyDeserializers != null) {
            for (Entry<Class<?>, KeyDeserializer> entry : keyDeserializers.entrySet()) {
                module.addKeyDeserializer(entry.getKey(), entry.getValue());
            }
        }
        modules.add(module);
    }

    for (Module module : modules) {
        objectMapper.registerModule(module);
    }

    return objectMapper;
}

From source file:DataTools.ConvertObjectToJson.java

private void addCustomSerializing(ObjectMapper mapper) {
    //custom serializer to help parsing dates
    class dateSerializer extends JsonSerializer<DateTime> {
        @Override// ww w. j  a  va2  s .  co m
        public void serialize(DateTime dateTime, JsonGenerator jsonGenerator,
                SerializerProvider serializerProvider) throws IOException {
            jsonGenerator.writeString(cleanupDate(dateTime));
        }
    }

    //custom serializer to help parsing dates
    class zoneDateSerializer extends JsonSerializer<ZonedDateTime> {
        @Override
        public void serialize(ZonedDateTime dateTime, JsonGenerator jsonGenerator,
                SerializerProvider serializerProvider) throws IOException {
            jsonGenerator.writeString(cleanupDate(dateTime));
        }
    }

    class utilDateSerializer extends JsonSerializer<Date> {
        @Override
        public void serialize(Date dateTime, JsonGenerator jsonGenerator, SerializerProvider serializerProvider)
                throws IOException {
            jsonGenerator.writeString(dateTime.toString());
        }
    }

    class JsonObjectSerializer extends JsonSerializer<JSONObject> {
        @Override
        public void serialize(JSONObject jsonObject, JsonGenerator jsonGenerator,
                SerializerProvider serializerProvider) throws IOException, JsonProcessingException {
            jsonGenerator.writeString(jsonObject.toString());
        }
    }
    class JsonArraySerializer extends JsonSerializer<JSONArray> {
        @Override
        public void serialize(JSONArray jsonArray, JsonGenerator jsonGenerator,
                SerializerProvider serializerProvider) throws IOException, JsonProcessingException {
            jsonGenerator.writeString(jsonArray.toString());
        }
    }
    class StorageSerializer extends JsonSerializer<Storage> {
        @Override
        public void serialize(Storage storage, JsonGenerator jsonGenerator,
                SerializerProvider serializerProvider) throws IOException, JsonProcessingException {
            jsonGenerator.writeString(storage.toString());
        }
    }

    //setup new serializer
    SimpleModule simpleModule = new SimpleModule();
    simpleModule.addSerializer(DateTime.class, new dateSerializer());
    simpleModule.addSerializer(ZonedDateTime.class, new zoneDateSerializer());
    simpleModule.addSerializer(Date.class, new utilDateSerializer());
    simpleModule.addSerializer(JSONObject.class, new JsonObjectSerializer());
    simpleModule.addSerializer(JSONArray.class, new JsonArraySerializer());
    simpleModule.addSerializer(Storage.class, new StorageSerializer());
    mapper.registerModule(simpleModule);
}

From source file:sg.ncl.MainController.java

protected ZonedDateTime getZonedDateTime(String zonedDateTimeJSON) throws IOException {
    ObjectMapper mapper = new ObjectMapper();
    mapper.registerModule(new JavaTimeModule());
    return mapper.readValue(zonedDateTimeJSON, ZonedDateTime.class);
}