Example usage for com.fasterxml.jackson.databind ObjectMapper setSerializationInclusion

List of usage examples for com.fasterxml.jackson.databind ObjectMapper setSerializationInclusion

Introduction

In this page you can find the example usage for com.fasterxml.jackson.databind ObjectMapper setSerializationInclusion.

Prototype

public ObjectMapper setSerializationInclusion(JsonInclude.Include incl) 

Source Link

Document

Method for setting defalt POJO property inclusion strategy for serialization.

Usage

From source file:org.openmrs.module.fhir.swagger.SwaggerSpecificationCreator.java

private String createSwaggerSpecification() {
    String json = "";
    try {/*  w  w w.  jav a  2s . c o m*/
        ObjectMapper mapper = new ObjectMapper();
        mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
        mapper.configure(SerializationFeature.WRITE_NULL_MAP_VALUES, true);
        mapper.setSerializationInclusion(Include.NON_NULL);
        mapper.getSerializerProvider().setNullKeySerializer(new NullSerializer());
        json = mapper.writeValueAsString(swaggerSpecification);
    } catch (Exception exp) {
        log.error("Error while creating object mapper", exp);
    }
    return json;
}

From source file:com.netflix.dyno.queues.redis.RedisDynoQueue.java

public RedisDynoQueue(String redisKeyPrefix, String queueName, Set<String> allShards, String shardName,
        ExecutorService dynoCallExecutor) {
    this.redisKeyPrefix = redisKeyPrefix;
    this.queueName = queueName;
    this.allShards = allShards.stream().collect(Collectors.toList());
    this.shardName = shardName;
    this.messageStoreKey = redisKeyPrefix + ".MESSAGE." + queueName;
    this.myQueueShard = getQueueShardKey(queueName, shardName);

    ObjectMapper om = new ObjectMapper();
    om.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
    om.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false);
    om.configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, false);
    om.setSerializationInclusion(Include.NON_NULL);
    om.setSerializationInclusion(Include.NON_EMPTY);
    om.disable(SerializationFeature.INDENT_OUTPUT);

    this.om = om;
    this.monitor = new QueueMonitor(queueName, shardName);
    this.prefetchedIds = new LinkedBlockingQueue<>();
    this.executorService = dynoCallExecutor;

    Executors.newScheduledThreadPool(1).scheduleAtFixedRate(() -> processUnacks(), unackScheduleInMS,
            unackScheduleInMS, TimeUnit.MILLISECONDS);
    Executors.newScheduledThreadPool(1).scheduleAtFixedRate(() -> prefetchIds(), 0, 10, TimeUnit.MILLISECONDS);

    logger.info(RedisDynoQueue.class.getName() + " is ready to serve " + queueName);

}

From source file:synapticloop.scaleway.api.ScalewayApiClient.java

/**
 * Serialize an object to JSON/* w  w w  .  j  a v a2  s  . c om*/
 *
 * @param object The object to serialize
 * @return The object serialized as a JSON String
 * @throws JsonProcessingException if there was an error serializing
 */
private String serializeObject(Object object) throws JsonProcessingException {
    ObjectMapper objectMapper = new ObjectMapper();
    objectMapper.setSerializationInclusion(Include.NON_NULL);
    return (objectMapper.writeValueAsString(object));
}

From source file:org.opencb.cellbase.app.cli.VariantAnnotationCommandExecutor.java

private boolean runAnnotation() throws Exception {

    // Build indexes for custom files and/or population frequencies file
    getIndexes();// ww w  .  j  av a  2s  .  c  o m

    if (variantAnnotationCommandOptions.variant != null && !variantAnnotationCommandOptions.variant.isEmpty()) {
        List<Variant> variants = Variant.parseVariants(variantAnnotationCommandOptions.variant);
        if (local) {
            DBAdaptorFactory dbAdaptorFactory = new MongoDBAdaptorFactory(configuration);
            VariantAnnotationCalculator variantAnnotationCalculator = new VariantAnnotationCalculator(
                    this.species, this.assembly, dbAdaptorFactory);
            List<QueryResult<VariantAnnotation>> annotationByVariantList = variantAnnotationCalculator
                    .getAnnotationByVariantList(variants, queryOptions);

            ObjectMapper jsonObjectMapper = new ObjectMapper();
            jsonObjectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
            jsonObjectMapper.configure(MapperFeature.REQUIRE_SETTERS_FOR_GETTERS, true);
            ObjectWriter objectWriter = jsonObjectMapper.writer();

            Path outPath = Paths.get(variantAnnotationCommandOptions.output);
            FileUtils.checkDirectory(outPath.getParent());
            BufferedWriter bufferedWriter = FileUtils.newBufferedWriter(outPath);
            for (QueryResult queryResult : annotationByVariantList) {
                bufferedWriter.write(objectWriter.writeValueAsString(queryResult.getResult()));
                bufferedWriter.newLine();
            }
            bufferedWriter.close();
        }
        return true;
    }

    // If a variant file is provided then we annotate it. Lines in the input file can be computationally
    // expensive to parse, i.e.: multisample vcf with thousands of samples. A specific task is created to enable
    // parallel parsing of these lines
    if (input != null) {
        DataReader dataReader = new StringDataReader(input);
        List<ParallelTaskRunner.TaskWithException<String, Variant, Exception>> variantAnnotatorTaskList = getStringTaskList();
        DataWriter dataWriter = getDataWriter(output.toString());

        ParallelTaskRunner.Config config = new ParallelTaskRunner.Config(numThreads, batchSize, QUEUE_CAPACITY,
                false);
        ParallelTaskRunner<String, Variant> runner = new ParallelTaskRunner<>(dataReader,
                variantAnnotatorTaskList, dataWriter, config);
        runner.run();
        // For internal use only - will only be run when -Dpopulation-frequencies is activated
        writeRemainingPopFrequencies();
    } else {
        // This will annotate the CellBase Variation collection
        if (cellBaseAnnotation) {
            // TODO: enable this query in the parseQuery method within VariantMongoDBAdaptor
            //                    Query query = new Query("$match",
            //                            new Document("annotation.consequenceTypes", new Document("$exists", 0)));
            //                    Query query = new Query();
            QueryOptions options = new QueryOptions("include", "chromosome,start,reference,alternate,type");
            List<ParallelTaskRunner.TaskWithException<Variant, Variant, Exception>> variantAnnotatorTaskList = getVariantTaskList();
            ParallelTaskRunner.Config config = new ParallelTaskRunner.Config(numThreads, batchSize,
                    QUEUE_CAPACITY, false);

            for (String chromosome : chromosomeList) {
                logger.info("Annotating chromosome {}", chromosome);
                Query query = new Query("chromosome", chromosome);
                DataReader dataReader = new VariationDataReader(dbAdaptorFactory.getVariationDBAdaptor(species),
                        query, options);
                DataWriter dataWriter = getDataWriter(
                        output.toString() + "/" + VARIATION_ANNOTATION_FILE_PREFIX + chromosome + ".json.gz");
                ParallelTaskRunner<Variant, Variant> runner = new ParallelTaskRunner<Variant, Variant>(
                        dataReader, variantAnnotatorTaskList, dataWriter, config);
                runner.run();
            }
        }
    }

    if (customFiles != null || populationFrequenciesFile != null) {
        closeIndexes();
    }

    logger.info("Variant annotation finished.");
    return false;
}

From source file:org.redisson.config.ConfigSupport.java

private ObjectMapper createMapper(JsonFactory mapping) {
    ObjectMapper mapper = new ObjectMapper(mapping);
    mapper.addMixIn(MasterSlaveServersConfig.class, MasterSlaveServersConfigMixIn.class);
    mapper.addMixIn(SingleServerConfig.class, SingleSeverConfigMixIn.class);
    mapper.addMixIn(Config.class, ConfigMixIn.class);
    mapper.addMixIn(CodecProvider.class, ClassMixIn.class);
    mapper.addMixIn(ResolverProvider.class, ClassMixIn.class);
    mapper.addMixIn(Codec.class, ClassMixIn.class);
    mapper.addMixIn(RedissonNodeInitializer.class, ClassMixIn.class);
    mapper.addMixIn(LoadBalancer.class, ClassMixIn.class);
    FilterProvider filterProvider = new SimpleFilterProvider().addFilter("classFilter",
            SimpleBeanPropertyFilter.filterOutAllExcept());
    mapper.setFilterProvider(filterProvider);
    mapper.setSerializationInclusion(Include.NON_NULL);
    return mapper;
}

From source file:de.brendamour.jpasskit.signing.PKAbstractSIgningUtil.java

protected ObjectWriter configureObjectMapper(final ObjectMapper jsonObjectMapper) {
    jsonObjectMapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false);
    jsonObjectMapper.setDateFormat(new ISO8601DateFormat());

    SimpleFilterProvider filters = new SimpleFilterProvider();

    // haven't found out, how to stack filters. Copying the validation one for now.
    filters.addFilter("validateFilter",
            SimpleBeanPropertyFilter.serializeAllExcept("valid", "validationErrors"));
    filters.addFilter("pkPassFilter", SimpleBeanPropertyFilter.serializeAllExcept("valid", "validationErrors",
            "foregroundColorAsObject", "backgroundColorAsObject", "labelColorAsObject", "passThatWasSet"));
    filters.addFilter("barcodeFilter", SimpleBeanPropertyFilter.serializeAllExcept("valid", "validationErrors",
            "messageEncodingAsString"));
    filters.addFilter("charsetFilter", SimpleBeanPropertyFilter.filterOutAllExcept("name"));
    jsonObjectMapper.setSerializationInclusion(Include.NON_NULL);
    jsonObjectMapper.addMixIn(Object.class, ValidateFilterMixIn.class);
    jsonObjectMapper.addMixIn(PKPass.class, PkPassFilterMixIn.class);
    jsonObjectMapper.addMixIn(PKBarcode.class, BarcodeFilterMixIn.class);
    jsonObjectMapper.addMixIn(Charset.class, CharsetFilterMixIn.class);
    return jsonObjectMapper.writer(filters);
}

From source file:com.nesscomputing.jackson.NessObjectMapperProvider.java

@Override
public ObjectMapper get() {
    final ObjectMapper mapper = new ObjectMapper(jsonFactory);

    // Set the features
    for (Map.Entry<Enum<?>, Boolean> entry : featureMap.entrySet()) {
        final Enum<?> key = entry.getKey();

        if (key instanceof JsonGenerator.Feature) {
            mapper.configure(((JsonGenerator.Feature) key), entry.getValue());
        } else if (key instanceof JsonParser.Feature) {
            mapper.configure(((JsonParser.Feature) key), entry.getValue());
        } else if (key instanceof SerializationFeature) {
            mapper.configure(((SerializationFeature) key), entry.getValue());
        } else if (key instanceof DeserializationFeature) {
            mapper.configure(((DeserializationFeature) key), entry.getValue());
        } else if (key instanceof MapperFeature) {
            mapper.configure(((MapperFeature) key), entry.getValue());
        } else {/*  www.java 2s.  co m*/
            throw new IllegalArgumentException("Can not configure ObjectMapper with " + key.name());
        }
    }

    for (Module module : modules) {
        mapper.registerModule(module);
    }
    // by default, don't serialize null values.
    mapper.setSerializationInclusion(Include.NON_NULL);

    return mapper;
}

From source file:com.thinkbiganalytics.metadata.rest.client.MetadataClient.java

private ObjectMapper createObjectMapper() {
    ObjectMapper mapper = new ObjectMapper();
    mapper.registerModule(new JodaModule());
    // TODO Module dependency is causing a conflict somehow.
    //        mapper.registerModule(new JavaTimeModule());
    mapper.setSerializationInclusion(Include.NON_NULL);
    return mapper;
}