Example usage for com.fasterxml.jackson.databind ObjectMapper convertValue

List of usage examples for com.fasterxml.jackson.databind ObjectMapper convertValue

Introduction

In this page you can find the example usage for com.fasterxml.jackson.databind ObjectMapper convertValue.

Prototype

@SuppressWarnings("unchecked")
    public <T> T convertValue(Object fromValue, JavaType toValueType) throws IllegalArgumentException 

Source Link

Usage

From source file:com.galenframework.ide.devices.tasks.DeviceTaskParser.java

private static DeviceCommand parseCommand(ObjectMapper mapper, JsonNode commandNode)
        throws NoSuchMethodException, IllegalAccessException, InvocationTargetException,
        InstantiationException {/*  w  ww. j av a 2 s.c o  m*/
    String commandName = commandNode.get("name").asText();
    if (commandClasses.containsKey(commandName)) {
        Class<? extends DeviceCommand> commandClass = commandClasses.get(commandName);
        if (commandNode.has("parameters")) {
            return mapper.convertValue(commandNode.get("parameters"), commandClasses.get(commandName));
        } else {
            return commandClass.getConstructor().newInstance();
        }

    } else {
        throw new RuntimeException("Unknown command type: " + commandName);
    }
}

From source file:models.daos.DatasetDao.java

public static void setDatasetRecord(JsonNode dataset) throws Exception {
    ObjectMapper om = new ObjectMapper();
    om.setPropertyNamingStrategy(PropertyNamingStrategy.CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES);
    DatasetRecord record = om.convertValue(dataset, DatasetRecord.class);

    if (record != null) {
        Map<String, Object> params = new HashMap<>();
        params.put("urn", record.getUrn());
        try {/*from   w  w w  .  j a v a2  s. c om*/
            Map<String, Object> result = JdbcUtil.wherehowsNamedJdbcTemplate.queryForMap(GET_DATASET_BY_URN,
                    params);
            updateDataset(dataset);
        } catch (EmptyResultDataAccessException e) {
            insertDataset(dataset);
        }
    }
}

From source file:models.daos.DatasetDao.java

public static void updateDataset(JsonNode dataset) throws Exception {
    ObjectMapper om = new ObjectMapper();
    om.setPropertyNamingStrategy(PropertyNamingStrategy.CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES);
    DatasetRecord record = om.convertValue(dataset, DatasetRecord.class);
    if (record.getRefDatasetUrn() != null) {
        Map<String, Object> refDataset = getDatasetByUrn(record.getRefDatasetUrn());
        // Find ref dataset id
        if (refDataset != null) {
            record.setRefDatasetId(((Long) refDataset.get("id")).intValue());
        }/* w  w  w . j  a v  a  2 s  .  c o  m*/
    }
    // Find layout id
    if (record.getSamplePartitionFullPath() != null) {
        PartitionPatternMatcher ppm = new PartitionPatternMatcher(PartitionLayoutDao.getPartitionLayouts());
        record.setPartitionLayoutPatternId(ppm.analyze(record.getSamplePartitionFullPath()));
    }

    DatabaseWriter dw = new DatabaseWriter(JdbcUtil.wherehowsJdbcTemplate, "dict_dataset");
    dw.update(record.toUpdateDatabaseValue(), record.getUrn());
    dw.close();
}

From source file:models.daos.DatasetDao.java

public static void insertDataset(JsonNode dataset) throws Exception {

    ObjectMapper om = new ObjectMapper();
    om.setPropertyNamingStrategy(PropertyNamingStrategy.CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES);
    DatasetRecord record = om.convertValue(dataset, DatasetRecord.class);
    if (record.getRefDatasetUrn() != null) {
        Map<String, Object> refDataset = getDatasetByUrn(record.getRefDatasetUrn());
        // Find ref dataset id
        if (refDataset != null) {
            record.setRefDatasetId(((Long) refDataset.get("id")).intValue());
        }//from   w ww. j a v  a  2 s  . c o m
    }

    // Find layout id
    if (record.getSamplePartitionFullPath() != null) {
        PartitionPatternMatcher ppm = new PartitionPatternMatcher(PartitionLayoutDao.getPartitionLayouts());
        record.setPartitionLayoutPatternId(ppm.analyze(record.getSamplePartitionFullPath()));
    }

    DatabaseWriter dw = new DatabaseWriter(JdbcUtil.wherehowsJdbcTemplate, "dict_dataset");
    dw.append(record);
    dw.close();
}

From source file:com.metamx.emitter.core.Emitters.java

public static Emitter create(Properties props, HttpClient httpClient, ObjectMapper jsonMapper,
        Lifecycle lifecycle) {/*  ww w .  jav a 2 s.  co m*/
    Map<String, Object> jsonified = Maps.newHashMap();

    if (props.getProperty(LOG_EMITTER_PROP) != null) {
        jsonified.put("logging", makeLoggingMap(props));
    } else if (props.getProperty(HTTP_EMITTER_PROP) != null) {
        jsonified.put("http", makeHttpMap(props));
    } else {
        throw new ISE("Unknown type of emitter. Please set [%s] or [%s]", LOG_EMITTER_PROP, HTTP_EMITTER_PROP);
    }

    return jsonMapper.convertValue(jsonified, EmitterBuilder.class).build(jsonMapper, httpClient, lifecycle);
}

From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.ElasticsearchIndexConfigUtils.java

/** Converts the bucket schema into the config bean
 * @param bucket//  w  w  w . j a  v a 2 s. c  o  m
 * @param backup - provides defaults from the global configuration
 * @return
 */
public static ElasticsearchIndexServiceConfigBean buildConfigBeanFromSchema(final DataBucketBean bucket,
        final ElasticsearchIndexServiceConfigBean backup, final ObjectMapper mapper) {

    final SearchIndexSchemaDefaultBean search_index_bits_tmp = Optional.ofNullable(bucket.data_schema())
            .map(DataSchemaBean::search_index_schema).filter(s -> Optional.ofNullable(s.enabled()).orElse(true))
            .map(s -> s.technology_override_schema())
            .map(map -> mapper.convertValue(map, SearchIndexSchemaDefaultBean.class))
            .orElse(backup.search_technology_override());

    // The _actual_ settings technology override object is taken from either the bucket or the backup on a top-level-field by top-leve-field basis
    final SearchIndexSchemaDefaultBean search_index_bits = BeanTemplateUtils.clone(search_index_bits_tmp)
            //(target index size is a special case)
            .with(SearchIndexSchemaDefaultBean::target_index_size_mb,
                    Optionals.of(() -> bucket.data_schema().search_index_schema().target_index_size_mb())
                            .orElse(backup.search_technology_override().target_index_size_mb()))
            .with(SearchIndexSchemaDefaultBean::collide_policy,
                    Optional.ofNullable(search_index_bits_tmp.collide_policy())
                            .orElse(backup.search_technology_override().collide_policy()))
            .with(SearchIndexSchemaDefaultBean::type_name_or_prefix,
                    Optional.ofNullable(search_index_bits_tmp.type_name_or_prefix())
                            .orElse(backup.search_technology_override().type_name_or_prefix()))
            .with(SearchIndexSchemaDefaultBean::verbose,
                    Optional.ofNullable(search_index_bits_tmp.verbose())
                            .orElse(backup.search_technology_override().verbose()))
            .with(SearchIndexSchemaDefaultBean::settings,
                    Optional.ofNullable(search_index_bits_tmp.settings())
                            .orElse(backup.search_technology_override().settings()))
            .with(SearchIndexSchemaDefaultBean::aliases,
                    Optional.ofNullable(search_index_bits_tmp.aliases())
                            .orElse(backup.search_technology_override().aliases()))
            .with(SearchIndexSchemaDefaultBean::mappings,
                    Optional.ofNullable(search_index_bits_tmp.mappings())
                            .orElse(backup.search_technology_override().mappings()))
            .with(SearchIndexSchemaDefaultBean::mapping_overrides,
                    Optional.ofNullable(search_index_bits_tmp.mapping_overrides())
                            .orElse(backup.search_technology_override().mapping_overrides()))
            .with(SearchIndexSchemaDefaultBean::tokenized_string_field,
                    Optional.ofNullable(search_index_bits_tmp.tokenized_string_field())
                            .orElse(backup.search_technology_override().tokenized_string_field()))
            .with(SearchIndexSchemaDefaultBean::untokenized_string_field,
                    Optional.ofNullable(search_index_bits_tmp.untokenized_string_field())
                            .orElse(backup.search_technology_override().untokenized_string_field()))
            .with(SearchIndexSchemaDefaultBean::dual_tokenized_string_field,
                    Optional.ofNullable(search_index_bits_tmp.dual_tokenized_string_field())
                            .orElse(backup.search_technology_override().dual_tokenized_string_field()))
            .with(SearchIndexSchemaDefaultBean::dual_untokenized_string_field,
                    Optional.ofNullable(search_index_bits_tmp.dual_untokenized_string_field())
                            .orElse(backup.search_technology_override().dual_untokenized_string_field()))
            .with(SearchIndexSchemaDefaultBean::dual_tokenize_by_default,
                    Optional.ofNullable(search_index_bits_tmp.dual_tokenize_by_default())
                            .orElse(backup.search_technology_override().dual_tokenize_by_default()))
            .with(SearchIndexSchemaDefaultBean::dual_tokenization_override,
                    Optional.ofNullable(search_index_bits_tmp.dual_tokenization_override())
                            .orElse(backup.search_technology_override().dual_tokenization_override()))
            .done();

    final ColumnarSchemaDefaultBean columnar_bits_tmp = Optional.ofNullable(bucket.data_schema())
            .map(DataSchemaBean::columnar_schema).filter(s -> Optional.ofNullable(s.enabled()).orElse(true))
            .map(s -> s.technology_override_schema())
            .map(map -> mapper.convertValue(map, ColumnarSchemaDefaultBean.class))
            .orElse(backup.columnar_technology_override());

    final ColumnarSchemaDefaultBean columnar_bits = BeanTemplateUtils.clone(columnar_bits_tmp)
            .with(ColumnarSchemaDefaultBean::default_field_data_analyzed,
                    Optional.ofNullable(columnar_bits_tmp.default_field_data_analyzed())
                            .orElse(backup.columnar_technology_override().default_field_data_analyzed()))
            .with(ColumnarSchemaDefaultBean::default_field_data_notanalyzed,
                    Optional.ofNullable(columnar_bits_tmp.default_field_data_notanalyzed())
                            .orElse(backup.columnar_technology_override().default_field_data_notanalyzed()))
            .with(ColumnarSchemaDefaultBean::enabled_field_data_analyzed,
                    Optional.ofNullable(columnar_bits_tmp.enabled_field_data_analyzed())
                            .orElse(backup.columnar_technology_override().enabled_field_data_analyzed()))
            .with(ColumnarSchemaDefaultBean::enabled_field_data_notanalyzed,
                    Optional.ofNullable(columnar_bits_tmp.enabled_field_data_notanalyzed())
                            .orElse(backup.columnar_technology_override().enabled_field_data_notanalyzed()))
            .done();

    final DataSchemaBean.TemporalSchemaBean temporal_bits_tmp = Optional.ofNullable(bucket.data_schema())
            .map(DataSchemaBean::temporal_schema).filter(s -> Optional.ofNullable(s.enabled()).orElse(true))
            .orElse(backup.temporal_technology_override());

    final DataSchemaBean.TemporalSchemaBean temporal_bits = BeanTemplateUtils.clone(temporal_bits_tmp)
            .with(DataSchemaBean.TemporalSchemaBean::technology_override_schema, Lambdas.get(() -> {
                HashMap<String, Object> tmp = new HashMap<>();
                tmp.putAll(Optional.ofNullable(temporal_bits_tmp.technology_override_schema())
                        .orElse(Collections.emptyMap()));
                tmp.putAll(
                        Optional.ofNullable(backup.temporal_technology_override().technology_override_schema())
                                .orElse(Collections.emptyMap()));
                return tmp;
            })).done();

    return BeanTemplateUtils.build(ElasticsearchIndexServiceConfigBean.class)
            .with(ElasticsearchIndexServiceConfigBean::search_technology_override, search_index_bits)
            .with(ElasticsearchIndexServiceConfigBean::columnar_technology_override, columnar_bits)
            .with(ElasticsearchIndexServiceConfigBean::temporal_technology_override, temporal_bits)
            .with(ElasticsearchIndexServiceConfigBean::document_schema_override,
                    backup.document_schema_override())
            .done().get();
}

From source file:com.ikanow.aleph2.data_model.utils.BeanTemplateUtils.java

/** Converts a bean to its Map<String, Object> representation (not high performance)
 * @param bean - the bean to convert to JSON
 * @return - the JSON//from ww w  . j  ava2 s.  co  m
 */
@SuppressWarnings("unchecked")
static public <T> Map<String, Object> toMap(final T bean) {
    ObjectMapper object_mapper = BeanTemplateUtils.configureMapper(Optional.empty());
    return object_mapper.convertValue(bean, Map.class);
}

From source file:io.github.robwin.swagger2markup.Swagger2MarkupConverter.java

/**
 * Creates a Swagger2MarkupConverter.Builder from a given Swagger YAML or JSON String.
 *
 * @param swagger the Swagger YAML or JSON String.
 * @return a Swagger2MarkupConverter//from   ww w.j a  v  a 2s . c  o m
 * @throws java.io.IOException if String can not be parsed
 */
public static Builder fromString(String swagger) throws IOException {
    Validate.notEmpty(swagger, "swagger must not be null!");
    ObjectMapper mapper;
    if (swagger.trim().startsWith("{")) {
        mapper = Json.mapper();
    } else {
        mapper = Yaml.mapper();
    }
    JsonNode rootNode = mapper.readTree(swagger);

    // must have swagger node set
    JsonNode swaggerNode = rootNode.get("swagger");
    if (swaggerNode == null)
        throw new IllegalArgumentException("Swagger String is in the wrong format");

    return new Builder(mapper.convertValue(rootNode, Swagger.class));
}

From source file:com.ikanow.aleph2.data_model.utils.BeanTemplateUtils.java

/** Converts a JsonNode to a bean template of the specified type
 * (note: not very high performance, should only be used for management-type operations)
 * @param map_json - the bean to convert to JSON
 * @return - the bean template//ww w . j a va  2s .c om
 */
static public <T> BeanTemplate<T> from(final Map<String, Object> map_json, final Class<T> clazz) {
    try {
        ObjectMapper object_mapper = BeanTemplateUtils.configureMapper(Optional.empty());
        return BeanTemplate.of(object_mapper.convertValue(map_json, clazz));
    } catch (Exception e) { // on fail returns an unchecked error
        throw new RuntimeException(e); // (this can only happen due to "static" code type issues, so unchecked exception is fine
    }
}

From source file:io.fabric8.maven.core.util.KubernetesResourceUtil.java

/**
 * Read a Kubernetes resource fragment and add meta information extracted from the filename
 * to the resource descriptor. I.e. the following elements are added if not provided in the fragment:
 *
 * <ul>/*from   w w  w  . j  ava 2 s  .  co  m*/
 *     <li>name - Name of the resource added to metadata</li>
 *     <li>kind - Resource's kind</li>
 *     <li>apiVersion - API version (given as parameter to this method)</li>
 * </ul>
 *
 *
 * @param apiVersions the API versions to add if not given.
 * @param file file to read, whose name must match {@link #FILENAME_PATTERN}.  @return map holding the fragment
 * @param appName resource name specifying resources belonging to this application
 */
public static HasMetadata getResource(ResourceVersioning apiVersions, File file, String appName)
        throws IOException {
    Map<String, Object> fragment = readAndEnrichFragment(apiVersions, file, appName);
    ObjectMapper mapper = new ObjectMapper();
    try {
        return mapper.convertValue(fragment, HasMetadata.class);
    } catch (ClassCastException exp) {
        throw new IllegalArgumentException(String.format("Resource fragment %s has an invalid syntax (%s)",
                file.getPath(), exp.getMessage()));
    }
}