Example usage for com.fasterxml.jackson.databind ObjectMapper enable

List of usage examples for com.fasterxml.jackson.databind ObjectMapper enable

Introduction

In this page you can find the example usage for com.fasterxml.jackson.databind ObjectMapper enable.

Prototype

public ObjectMapper enable(SerializationFeature f) 

Source Link

Document

Method for enabling specified DeserializationConfig feature.

Usage

From source file:test.SemanticConverter2.java

public static void main(String[] args) throws JAXBException {

    SemanticConverter2 ri = new SemanticConverter2();

    ObjectMapper objectMapper = new ObjectMapper();
    objectMapper.enable(SerializationFeature.INDENT_OUTPUT);
    RegisterContextRequest rcr = new RegisterContextRequest();
    try {/*  w  ww  . j a v  a 2  s . c o m*/
        rcr = objectMapper.readValue(ri.NGSI_FILE, RegisterContextRequest.class);
    } catch (Exception e) {

    }
    rcr.setTimestamp(Instant.now());
    ri.createJenaModel(rcr);

    //        //hMap.put("class", "OnDeviceResource"); 
    ////        hMap.put("class", new String[]{"ResourceService"});
    //        hMap.put("class", new String[]{"VirtualEntity"});
    ////        hMap.put("hasID", new String[]{"Resource_1"});
    //        hMap.put("hasID", new String[]{"VirtualEntity_1"});
    //        hMap.put("hasAttributeType", new String[]{"http://purl.oclc.org/NET/ssnx/qu/quantity#temperature"});     
    ////        hMap.put("isHostedOn", "PloggBoard_49_BA_01_light");
    ////        hMap.put("hasType", "Sensor");
    ////        hMap.put("hasName", "lightsensor49_BA_01");
    ////        hMap.put("isExposedThroughService", "http://www.surrey.ac.uk/ccsr/ontologies/ServiceModel.owl#49_BA_01_light_sensingService");
    ////        hMap.put("hasTag", "light sensor 49,1st,BA,office");
    //        hMap.put("hasLatitude", new String[]{"51.243455"});
    ////        hMap.put("hasGlobalLocation", "http://www.geonames.org/2647793/");
    ////        hMap.put("hasResourceID", "Resource_53_BA_power_sensor");
    ////        hMap.put("hasLocalLocation", "http://www.surrey.ac.uk/ccsr/ontologies/LocationModel.owl#U49");
    //        hMap.put("hasAltitude", new String[]{""});
    //        hMap.put("hasLongitude", new String[]{"-0.588088"});
    ////        hMap.put("hasTimeOffset", "20");
    //
    //        //ri.ONT_URL = "http://www.surrey.ac.uk/ccsr/ontologies/ServiceModel.owl#";
    //        //ri.ONT_URL = "http://www.surrey.ac.uk/ccsr/ontologies/VirtualEntityModel.owl#";
    //        //ri.ONT_FILE = "web/IoTA-Models/VirtualEntityModel.owl";
    //        ri.createJenaModel(hMap);
}

From source file:test.SemanticConverter3.java

public static void main(String[] args) throws JAXBException {

    SemanticConverter3 ri = new SemanticConverter3();

    ObjectMapper objectMapper = new ObjectMapper();
    objectMapper.enable(SerializationFeature.INDENT_OUTPUT);
    RegisterContextRequest rcr = new RegisterContextRequest();
    try {/*from   w  w w  .j a  v  a 2  s.c om*/
        rcr = objectMapper.readValue(ri.NGSI_FILE, RegisterContextRequest.class);
    } catch (Exception e) {

    }
    rcr.setTimestamp(Instant.now());
    ri.createJenaModel(rcr);

    //        //hMap.put("class", "OnDeviceResource"); 
    ////        hMap.put("class", new String[]{"ResourceService"});
    //        hMap.put("class", new String[]{"VirtualEntity"});
    ////        hMap.put("hasID", new String[]{"Resource_1"});
    //        hMap.put("hasID", new String[]{"VirtualEntity_1"});
    //        hMap.put("hasAttributeType", new String[]{"http://purl.oclc.org/NET/ssnx/qu/quantity#temperature"});     
    ////        hMap.put("isHostedOn", "PloggBoard_49_BA_01_light");
    ////        hMap.put("hasType", "Sensor");
    ////        hMap.put("hasName", "lightsensor49_BA_01");
    ////        hMap.put("isExposedThroughService", "http://www.surrey.ac.uk/ccsr/ontologies/ServiceModel.owl#49_BA_01_light_sensingService");
    ////        hMap.put("hasTag", "light sensor 49,1st,BA,office");
    //        hMap.put("hasLatitude", new String[]{"51.243455"});
    ////        hMap.put("hasGlobalLocation", "http://www.geonames.org/2647793/");
    ////        hMap.put("hasResourceID", "Resource_53_BA_power_sensor");
    ////        hMap.put("hasLocalLocation", "http://www.surrey.ac.uk/ccsr/ontologies/LocationModel.owl#U49");
    //        hMap.put("hasAltitude", new String[]{""});
    //        hMap.put("hasLongitude", new String[]{"-0.588088"});
    ////        hMap.put("hasTimeOffset", "20");
    //
    //        //ri.ONT_URL = "http://www.surrey.ac.uk/ccsr/ontologies/ServiceModel.owl#";
    //        //ri.ONT_URL = "http://www.surrey.ac.uk/ccsr/ontologies/VirtualEntityModel.owl#";
    //        //ri.ONT_FILE = "web/IoTA-Models/VirtualEntityModel.owl";
    //        ri.createJenaModel(hMap);
}

From source file:com.ottogroup.bi.asap.operator.twitter.consumer.TwitterStreamConsumer.java

public static void main(String[] args) throws Exception {
    ObjectMapper m = new ObjectMapper();
    m.enable(SerializationFeature.INDENT_OUTPUT);

    ComponentConfiguration cfg = new ComponentConfiguration("twitter-stream-reader", ComponentType.SOURCE,
            "twitterStreamConsumer", "0.0.1");
    cfg.setComponentExceptionHandler(/*from ww w.  j ava 2s .  c  om*/
            new ExceptionHandlerConfiguration(ExceptionHandlerType.COMPONENT_EXCEPTION_HANDLER,
                    "twitterComponentExceptionHandler", "log4jExceptionHandler", "0.0.1"));
    cfg.setExecutorExceptionHandler(
            new ExceptionHandlerConfiguration(ExceptionHandlerType.EXECUTOR_EXCEPTION_HANDLER,
                    "twitterExecutorExceptionHandler", "log4jExceptionHandler", "0.0.1"));
    cfg.setMessageWaitStrategy(new MessageWaitStrategyConfiguration("twitterExecutorWaitStrategy",
            "sleepingMessageWaitStrategy", "0.0.1"));
    cfg.setMailbox(
            new MailboxConfiguration("twitterConsumerMailbox", "oneToOneConcurrentArrayQueueMailbox", "0.0.1"));
    cfg.addSubscription("simpleFilteringOperator");

    cfg.addSetting(TwitterStreamConsumer.CFG_TWITTER_CONSUMER_KEY, "<consumer_id>");
    cfg.addSetting(TwitterStreamConsumer.CFG_TWITTER_CONSUMER_SECRET, "<consumer_secret>");
    cfg.addSetting(TwitterStreamConsumer.CFG_TWITTER_PROFILES, "1,2,3");
    cfg.addSetting(TwitterStreamConsumer.CFG_TWITTER_TOKEN_KEY, "<token_key>");
    cfg.addSetting(TwitterStreamConsumer.CFG_TWITTER_TOKEN_SECRET, "<token_secret>");
    cfg.addSetting(TwitterStreamConsumer.CFG_TWITTER_TWEET_LANGUAGES, "DE,FR,EN");
    cfg.addSetting(TwitterStreamConsumer.CFG_TWITTER_TWEET_SEARCH_TERMS, "SOCCER,FOOTBALL,FUSSBALL");

    System.out.println(m.writeValueAsString(cfg));
}

From source file:com.twentyn.patentScorer.ScoreMerger.java

public static void main(String[] args) throws Exception {
    System.out.println("Starting up...");
    System.out.flush();/*from w  w w .  j av a2 s. c om*/
    Options opts = new Options();
    opts.addOption(Option.builder("h").longOpt("help").desc("Print this help message and exit").build());

    opts.addOption(Option.builder("r").longOpt("results").required().hasArg()
            .desc("A directory of search results to read").build());
    opts.addOption(Option.builder("s").longOpt("scores").required().hasArg()
            .desc("A directory of patent classification scores to read").build());
    opts.addOption(Option.builder("o").longOpt("output").required().hasArg()
            .desc("The output file where results will be written.").build());

    HelpFormatter helpFormatter = new HelpFormatter();
    CommandLineParser cmdLineParser = new DefaultParser();
    CommandLine cmdLine = null;
    try {
        cmdLine = cmdLineParser.parse(opts, args);
    } catch (ParseException e) {
        System.out.println("Caught exception when parsing command line: " + e.getMessage());
        helpFormatter.printHelp("DocumentIndexer", opts);
        System.exit(1);
    }

    if (cmdLine.hasOption("help")) {
        helpFormatter.printHelp("DocumentIndexer", opts);
        System.exit(0);
    }
    File scoresDirectory = new File(cmdLine.getOptionValue("scores"));
    if (cmdLine.getOptionValue("scores") == null || !scoresDirectory.isDirectory()) {
        LOGGER.error("Not a directory of score files: " + cmdLine.getOptionValue("scores"));
    }

    File resultsDirectory = new File(cmdLine.getOptionValue("results"));
    if (cmdLine.getOptionValue("results") == null || !resultsDirectory.isDirectory()) {
        LOGGER.error("Not a directory of results files: " + cmdLine.getOptionValue("results"));
    }

    FileWriter outputWriter = new FileWriter(cmdLine.getOptionValue("output"));

    ObjectMapper objectMapper = new ObjectMapper();
    objectMapper.enable(SerializationFeature.INDENT_OUTPUT);
    objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY);

    FilenameFilter jsonFilter = new FilenameFilter() {
        public final Pattern JSON_PATTERN = Pattern.compile("\\.json$");

        public boolean accept(File dir, String name) {
            return JSON_PATTERN.matcher(name).find();
        }
    };

    Map<String, PatentScorer.ClassificationResult> scores = new HashMap<>();
    LOGGER.info("Reading scores from directory at " + scoresDirectory.getAbsolutePath());
    for (File scoreFile : scoresDirectory.listFiles(jsonFilter)) {
        BufferedReader reader = new BufferedReader(new FileReader(scoreFile));
        int count = 0;
        String line;
        while ((line = reader.readLine()) != null) {
            PatentScorer.ClassificationResult res = objectMapper.readValue(line,
                    PatentScorer.ClassificationResult.class);
            scores.put(res.docId, res);
            count++;
        }
        LOGGER.info("Read " + count + " scores from " + scoreFile.getAbsolutePath());
    }

    Map<String, List<DocumentSearch.SearchResult>> synonymsToResults = new HashMap<>();
    Map<String, List<DocumentSearch.SearchResult>> inchisToResults = new HashMap<>();
    LOGGER.info("Reading results from directory at " + resultsDirectory);
    // With help from http://stackoverflow.com/questions/6846244/jackson-and-generic-type-reference.
    JavaType resultsType = objectMapper.getTypeFactory().constructCollectionType(List.class,
            DocumentSearch.SearchResult.class);

    List<File> resultsFiles = Arrays.asList(resultsDirectory.listFiles(jsonFilter));
    Collections.sort(resultsFiles, new Comparator<File>() {
        @Override
        public int compare(File o1, File o2) {
            return o1.getName().compareTo(o2.getName());
        }
    });
    for (File resultsFile : resultsFiles) {
        BufferedReader reader = new BufferedReader(new FileReader(resultsFile));
        CharBuffer buffer = CharBuffer.allocate(Long.valueOf(resultsFile.length()).intValue());
        int bytesRead = reader.read(buffer);
        LOGGER.info("Read " + bytesRead + " bytes from " + resultsFile.getName() + " (length is "
                + resultsFile.length() + ")");
        List<DocumentSearch.SearchResult> results = objectMapper.readValue(new CharArrayReader(buffer.array()),
                resultsType);

        LOGGER.info("Read " + results.size() + " results from " + resultsFile.getAbsolutePath());

        int count = 0;
        for (DocumentSearch.SearchResult sres : results) {
            for (DocumentSearch.ResultDocument resDoc : sres.getResults()) {
                String docId = resDoc.getDocId();
                PatentScorer.ClassificationResult classificationResult = scores.get(docId);
                if (classificationResult == null) {
                    LOGGER.warn("No classification result found for " + docId);
                } else {
                    resDoc.setClassifierScore(classificationResult.getScore());
                }
            }
            if (!synonymsToResults.containsKey(sres.getSynonym())) {
                synonymsToResults.put(sres.getSynonym(), new ArrayList<DocumentSearch.SearchResult>());
            }
            synonymsToResults.get(sres.getSynonym()).add(sres);
            count++;
            if (count % 1000 == 0) {
                LOGGER.info("Processed " + count + " search result documents");
            }
        }
    }

    Comparator<DocumentSearch.ResultDocument> resultDocumentComparator = new Comparator<DocumentSearch.ResultDocument>() {
        @Override
        public int compare(DocumentSearch.ResultDocument o1, DocumentSearch.ResultDocument o2) {
            int cmp = o2.getClassifierScore().compareTo(o1.getClassifierScore());
            if (cmp != 0) {
                return cmp;
            }
            cmp = o2.getScore().compareTo(o1.getScore());
            return cmp;
        }
    };

    for (Map.Entry<String, List<DocumentSearch.SearchResult>> entry : synonymsToResults.entrySet()) {
        DocumentSearch.SearchResult newSearchRes = null;
        // Merge all result documents into a single search result.
        for (DocumentSearch.SearchResult sr : entry.getValue()) {
            if (newSearchRes == null) {
                newSearchRes = sr;
            } else {
                newSearchRes.getResults().addAll(sr.getResults());
            }
        }
        if (newSearchRes == null || newSearchRes.getResults() == null) {
            LOGGER.error("Search results for " + entry.getKey() + " are null.");
            continue;
        }
        Collections.sort(newSearchRes.getResults(), resultDocumentComparator);
        if (!inchisToResults.containsKey(newSearchRes.getInchi())) {
            inchisToResults.put(newSearchRes.getInchi(), new ArrayList<DocumentSearch.SearchResult>());
        }
        inchisToResults.get(newSearchRes.getInchi()).add(newSearchRes);
    }

    List<String> sortedKeys = new ArrayList<String>(inchisToResults.keySet());
    Collections.sort(sortedKeys);
    List<GroupedInchiResults> orderedResults = new ArrayList<>(sortedKeys.size());
    Comparator<DocumentSearch.SearchResult> synonymSorter = new Comparator<DocumentSearch.SearchResult>() {
        @Override
        public int compare(DocumentSearch.SearchResult o1, DocumentSearch.SearchResult o2) {
            return o1.getSynonym().compareTo(o2.getSynonym());
        }
    };
    for (String inchi : sortedKeys) {
        List<DocumentSearch.SearchResult> res = inchisToResults.get(inchi);
        Collections.sort(res, synonymSorter);
        orderedResults.add(new GroupedInchiResults(inchi, res));
    }

    objectMapper.writerWithView(Object.class).writeValue(outputWriter, orderedResults);
    outputWriter.close();
}

From source file:com.twentyn.patentSearch.DocumentSearch.java

public static void main(String[] args) throws Exception {
    System.out.println("Starting up...");
    System.out.flush();//from www.  j  ava 2s . c om
    Options opts = new Options();
    opts.addOption(Option.builder("x").longOpt("index").hasArg().required().desc("Path to index file to read")
            .build());
    opts.addOption(Option.builder("h").longOpt("help").desc("Print this help message and exit").build());
    opts.addOption(Option.builder("v").longOpt("verbose").desc("Print verbose log output").build());

    opts.addOption(Option.builder("f").longOpt("field").hasArg().desc("The indexed field to search").build());
    opts.addOption(
            Option.builder("q").longOpt("query").hasArg().desc("The query to use when searching").build());
    opts.addOption(Option.builder("l").longOpt("list-file").hasArg()
            .desc("A file containing a list of queries to run in sequence").build());
    opts.addOption(
            Option.builder("e").longOpt("enumerate").desc("Enumerate the documents in the index").build());
    opts.addOption(Option.builder("d").longOpt("dump").hasArg()
            .desc("Dump terms in the document index for a specified field").build());
    opts.addOption(
            Option.builder("o").longOpt("output").hasArg().desc("Write results JSON to this file.").build());
    opts.addOption(Option.builder("n").longOpt("inchi-field").hasArg()
            .desc("The index of the InChI field if an input TSV is specified.").build());
    opts.addOption(Option.builder("s").longOpt("synonym-field").hasArg()
            .desc("The index of the chemical synonym field if an input TSV is specified.").build());

    HelpFormatter helpFormatter = new HelpFormatter();
    CommandLineParser cmdLineParser = new DefaultParser();
    CommandLine cmdLine = null;
    try {
        cmdLine = cmdLineParser.parse(opts, args);
    } catch (ParseException e) {
        System.out.println("Caught exception when parsing command line: " + e.getMessage());
        helpFormatter.printHelp("DocumentIndexer", opts);
        System.exit(1);
    }

    if (cmdLine.hasOption("help")) {
        helpFormatter.printHelp("DocumentIndexer", opts);
        System.exit(0);
    }

    if (!(cmdLine.hasOption("enumerate") || cmdLine.hasOption("dump") || (cmdLine.hasOption("field")
            && (cmdLine.hasOption("query") || cmdLine.hasOption("list-file"))))) {
        System.out.println("Must specify one of 'enumerate', 'dump', or 'field' + {'query', 'list-file'}");
        helpFormatter.printHelp("DocumentIndexer", opts);
        System.exit(1);
    }

    if (cmdLine.hasOption("verbose")) {
        // With help from http://stackoverflow.com/questions/23434252/programmatically-change-log-level-in-log4j2
        LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
        Configuration ctxConfig = ctx.getConfiguration();
        LoggerConfig logConfig = ctxConfig.getLoggerConfig(LogManager.ROOT_LOGGER_NAME);
        logConfig.setLevel(Level.DEBUG);

        ctx.updateLoggers();
        LOGGER.debug("Verbose logging enabled");
    }

    ObjectMapper objectMapper = new ObjectMapper();
    objectMapper.enable(SerializationFeature.INDENT_OUTPUT);
    objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY);

    LOGGER.info("Opening index at " + cmdLine.getOptionValue("index"));

    try (Directory indexDir = FSDirectory.open(new File(cmdLine.getOptionValue("index")).toPath());
            IndexReader indexReader = DirectoryReader.open(indexDir);) {
        if (cmdLine.hasOption("enumerate")) {
            /* Enumerate all documents in the index.
             * With help from
             * http://stackoverflow.com/questions/2311845/is-it-possible-to-iterate-through-documents-stored-in-lucene-index
             */
            for (int i = 0; i < indexReader.maxDoc(); i++) {
                Document doc = indexReader.document(i);
                LOGGER.info("Doc " + i + ":");
                LOGGER.info(doc);
            }
        } else if (cmdLine.hasOption("dump")) {
            /* Dump indexed terms for a specific field.
             * With help from http://stackoverflow.com/questions/11148036/find-list-of-terms-indexed-by-lucene */
            Terms terms = SlowCompositeReaderWrapper.wrap(indexReader).terms(cmdLine.getOptionValue("dump"));
            LOGGER.info("Has positions: " + terms.hasPositions());
            LOGGER.info("Has offsets:   " + terms.hasOffsets());
            LOGGER.info("Has freqs:     " + terms.hasFreqs());
            LOGGER.info("Stats:         " + terms.getStats());
            LOGGER.info(terms);
            TermsEnum termsEnum = terms.iterator();
            BytesRef br = null;
            while ((br = termsEnum.next()) != null) {
                LOGGER.info("  " + br.utf8ToString());
            }

        } else {
            IndexSearcher searcher = new IndexSearcher(indexReader);
            String field = cmdLine.getOptionValue("field");

            List<Pair<String, String>> queries = null;
            if (cmdLine.hasOption("query")) {
                queries = Collections.singletonList(Pair.of("", cmdLine.getOptionValue("query")));
            } else if (cmdLine.hasOption("list-file")) {
                if (!(cmdLine.hasOption("inchi-field") && cmdLine.hasOption("synonym-field"))) {
                    LOGGER.error("Must specify both inchi-field and synonym-field when using list-file.");
                    System.exit(1);
                }
                Integer inchiField = Integer.parseInt(cmdLine.getOptionValue("inchi-field"));
                Integer synonymField = Integer.parseInt(cmdLine.getOptionValue("synonym-field"));

                queries = new LinkedList<>();
                BufferedReader r = new BufferedReader(new FileReader(cmdLine.getOptionValue("list-file")));
                String line;
                while ((line = r.readLine()) != null) {
                    line = line.trim();
                    if (!line.isEmpty()) {
                        // TODO: use a proper TSV reader; this is intentionally terrible as is.
                        String[] fields = line.split("\t");
                        queries.add(Pair.of(fields[inchiField].replace("\"", ""), fields[synonymField]));
                    }
                }
                r.close();
            }

            if (queries == null || queries.size() == 0) {
                LOGGER.error("Found no queries to run.");
                return;
            }

            List<SearchResult> searchResults = new ArrayList<>(queries.size());
            for (Pair<String, String> queryPair : queries) {
                String inchi = queryPair.getLeft();
                String rawQueryString = queryPair.getRight();
                /* The Lucene query parser interprets the kind of structural annotations we see in chemical entities
                 * as query directives, which is not what we want at all.  Phrase queries seem to work adequately
                 * with the analyzer we're currently using. */
                String queryString = rawQueryString.trim().toLowerCase();
                String[] parts = queryString.split("\\s+");
                PhraseQuery query = new PhraseQuery();
                for (String p : parts) {
                    query.add(new Term(field, p));
                }
                LOGGER.info("Running query: " + query.toString());

                BooleanQuery bq = new BooleanQuery();
                bq.add(query, BooleanClause.Occur.MUST);
                bq.add(new TermQuery(new Term(field, "yeast")), BooleanClause.Occur.SHOULD);
                bq.add(new TermQuery(new Term(field, "ferment")), BooleanClause.Occur.SHOULD);
                bq.add(new TermQuery(new Term(field, "fermentation")), BooleanClause.Occur.SHOULD);
                bq.add(new TermQuery(new Term(field, "fermentive")), BooleanClause.Occur.SHOULD);
                bq.add(new TermQuery(new Term(field, "saccharomyces")), BooleanClause.Occur.SHOULD);

                LOGGER.info("  Full query: " + bq.toString());

                TopDocs topDocs = searcher.search(bq, 100);
                ScoreDoc[] scoreDocs = topDocs.scoreDocs;
                if (scoreDocs.length == 0) {
                    LOGGER.info("Search returned no results.");
                }
                List<ResultDocument> results = new ArrayList<>(scoreDocs.length);
                for (int i = 0; i < scoreDocs.length; i++) {
                    ScoreDoc scoreDoc = scoreDocs[i];
                    Document doc = indexReader.document(scoreDoc.doc);
                    LOGGER.info("Doc " + i + ": " + scoreDoc.doc + ", score " + scoreDoc.score + ": "
                            + doc.get("id") + ", " + doc.get("title"));
                    results.add(new ResultDocument(scoreDoc.doc, scoreDoc.score, doc.get("title"),
                            doc.get("id"), null));
                }
                LOGGER.info("----- Done with query " + query.toString());
                // TODO: reduce memory usage when not writing results to an output file.
                searchResults.add(new SearchResult(inchi, rawQueryString, bq, results));
            }

            if (cmdLine.hasOption("output")) {
                try (FileWriter writer = new FileWriter(cmdLine.getOptionValue("output"));) {
                    writer.write(objectMapper.writeValueAsString(searchResults));
                }
            }
        }
    }
}

From source file:io.fabric8.devops.projects.finder.gogs.JsonHelper.java

/**
 * Creates a configured Jackson object mapper for parsing JSON
 */// ww  w.j ava 2 s.  c  om
public static ObjectMapper createObjectMapper() {
    ObjectMapper mapper = new ObjectMapper();
    mapper.enable(SerializationFeature.INDENT_OUTPUT);
    return mapper;
}

From source file:org.hawkular.datamining.rest.json.JacksonConfig.java

public static void initializeObjectMapper(ObjectMapper mapper) {
    mapper.enable(SerializationFeature.INDENT_OUTPUT);
    mapper.disable(SerializationFeature.WRITE_NULL_MAP_VALUES);
    mapper.disable(SerializationFeature.WRITE_EMPTY_JSON_ARRAYS);
    mapper.setSerializationInclusion(JsonInclude.Include.NON_EMPTY);

    ObjectMapperConfig.config(mapper);/* w w w  . j  a  v  a  2 s  . c o m*/
}

From source file:io.fabric8.forge.addon.utils.OutputFormatHelper.java

public static String toJson(Object result) throws JsonProcessingException {
    ObjectMapper mapper = new ObjectMapper();
    mapper.enable(SerializationFeature.INDENT_OUTPUT);
    return mapper.writeValueAsString(result);
}

From source file:at.ac.tuwien.dsg.smartcom.adapters.rest.ObjectMapperProvider.java

private static ObjectMapper createDefaultMapper() {
    final ObjectMapper result = new ObjectMapper();
    result.enable(SerializationFeature.INDENT_OUTPUT);

    return result;
}

From source file:io.fouad.jtb.core.utils.JsonUtils.java

public static <T> T toJavaObject(String json, Class<T> clazz) throws IOException {
    ObjectMapper mapper = new ObjectMapper();
    mapper.enable(DeserializationFeature.READ_ENUMS_USING_TO_STRING);
    return mapper.readValue(json, clazz);
}