List of usage examples for com.fasterxml.jackson.databind.jsontype NamedType NamedType
public NamedType(Class<?> paramClass, String paramString)
From source file:com.netflix.suro.jackson.DefaultObjectMapper.java
@Inject public DefaultObjectMapper(final Injector injector, Set<TypeHolder> crossInjectable) { SimpleModule serializerModule = new SimpleModule("SuroServer default serializers"); serializerModule.addSerializer(ByteOrder.class, ToStringSerializer.instance); serializerModule.addDeserializer(ByteOrder.class, new JsonDeserializer<ByteOrder>() { @Override/*ww w . ja v a 2s.c o m*/ public ByteOrder deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { if (ByteOrder.BIG_ENDIAN.toString().equals(jp.getText())) { return ByteOrder.BIG_ENDIAN; } return ByteOrder.LITTLE_ENDIAN; } }); registerModule(serializerModule); registerModule(new GuavaModule()); if (injector != null) { setInjectableValues(new InjectableValues() { @Override public Object findInjectableValue(Object valueId, DeserializationContext ctxt, BeanProperty forProperty, Object beanInstance) { LOG.info("Looking for " + valueId); try { return injector.getInstance( Key.get(forProperty.getType().getRawClass(), Names.named((String) valueId))); } catch (Exception e) { try { return injector.getInstance(forProperty.getType().getRawClass()); } catch (Exception ex) { LOG.info("No implementation found, returning null"); } return null; } } }); } configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); configure(MapperFeature.AUTO_DETECT_GETTERS, false); configure(MapperFeature.AUTO_DETECT_CREATORS, false); configure(MapperFeature.AUTO_DETECT_FIELDS, false); configure(MapperFeature.AUTO_DETECT_IS_GETTERS, false); configure(MapperFeature.AUTO_DETECT_SETTERS, false); configure(SerializationFeature.INDENT_OUTPUT, false); if (crossInjectable != null) { for (TypeHolder entry : crossInjectable) { LOG.info("Registering subtype : " + entry.getName() + " -> " + entry.getRawType().getCanonicalName()); registerSubtypes(new NamedType(entry.getRawType(), entry.getName())); } } }
From source file:com.addthis.codec.jackson.CodecIntrospector.java
/** report all non-alias plugin types */ @Override//from w ww .j av a 2 s. c om public List<NamedType> findSubtypes(Annotated a) { Pluggable pluggable = a.getAnnotation(Pluggable.class); PluginMap pluginMap; if (pluggable != null) { pluginMap = pluginRegistry.byCategory().get(pluggable.value()); } else if (pluginRegistry.byClass().containsKey(a.getRawType())) { pluginMap = pluginRegistry.byClass().get(a.getRawType()); } else { return null; } List<NamedType> result = new ArrayList<>(pluginMap.asBiMap().size()); for (Map.Entry<String, Class<?>> type : pluginMap.asBiMap().entrySet()) { result.add(new NamedType(type.getValue(), type.getKey())); } return result; }
From source file:com.flipkart.foxtrot.core.querystore.actions.spi.AnalyticsLoader.java
@Override public void start() throws Exception { Reflections reflections = new Reflections("com.flipkart.foxtrot", new SubTypesScanner()); Set<Class<? extends Action>> actions = reflections.getSubTypesOf(Action.class); if (actions.isEmpty()) { throw new Exception("No analytics actions found!!"); }//from ww w .ja va2 s.co m List<NamedType> types = new ArrayList<>(); for (Class<? extends Action> action : actions) { AnalyticsProvider analyticsProvider = action.getAnnotation(AnalyticsProvider.class); if (null == analyticsProvider.request() || null == analyticsProvider.opcode() || analyticsProvider.opcode().isEmpty() || null == analyticsProvider.response()) { throw new Exception("Invalid annotation on " + action.getCanonicalName()); } if (analyticsProvider.opcode().equalsIgnoreCase("default")) { logger.warn("Action " + action.getCanonicalName() + " does not specify cache token. " + "Using default cache."); } register(new ActionMetadata(analyticsProvider.request(), action, analyticsProvider.cacheable(), analyticsProvider.opcode())); types.add(new NamedType(analyticsProvider.request(), analyticsProvider.opcode())); types.add(new NamedType(analyticsProvider.response(), analyticsProvider.opcode())); logger.info("Registered action: " + action.getCanonicalName()); } objectMapper.getSubtypeResolver().registerSubtypes(types.toArray(new NamedType[types.size()])); }
From source file:io.dropwizard.revolver.RevolverBundle.java
private void registerTypes(final Bootstrap<?> bootstrap) { bootstrap.getObjectMapper().registerSubtypes(new NamedType(RevolverHttpServiceConfig.class, "http")); bootstrap.getObjectMapper().registerSubtypes(new NamedType(RevolverHttpsServiceConfig.class, "https")); bootstrap.getObjectMapper().registerSubtypes(new NamedType(BasicAuthConfig.class, "basic")); bootstrap.getObjectMapper().registerSubtypes(new NamedType(TokenAuthConfig.class, "token")); bootstrap.getObjectMapper().registerSubtypes(new NamedType(SimpleEndpointSpec.class, "simple")); bootstrap.getObjectMapper().registerSubtypes(new NamedType(RangerEndpointSpec.class, "ranger_sharded")); bootstrap.getObjectMapper().registerSubtypes(new NamedType(InMemoryMailBoxConfig.class, "in_memory")); bootstrap.getObjectMapper().registerSubtypes(new NamedType(AerospikeMailBoxConfig.class, "aerospike")); }
From source file:io.druid.indexer.IndexGeneratorJobTest.java
@Before public void setUp() throws Exception { mapper = HadoopDruidIndexerConfig.jsonMapper; mapper.registerSubtypes(new NamedType(HashBasedNumberedShardSpec.class, "hashed")); mapper.registerSubtypes(new NamedType(SingleDimensionShardSpec.class, "single")); dataFile = temporaryFolder.newFile(); tmpDir = temporaryFolder.newFolder(); HashMap<String, Object> inputSpec = new HashMap<String, Object>(); inputSpec.put("paths", dataFile.getCanonicalPath()); inputSpec.put("type", "static"); if (inputFormatName != null) { inputSpec.put("inputFormat", inputFormatName); }// w w w . ja v a 2 s . c o m if (SequenceFileInputFormat.class.getName().equals(inputFormatName)) { writeDataToLocalSequenceFile(dataFile, data); } else { FileUtils.writeLines(dataFile, data); } config = new HadoopDruidIndexerConfig(new HadoopIngestionSpec( new DataSchema("website", mapper.convertValue(inputRowParser, Map.class), new AggregatorFactory[] { new LongSumAggregatorFactory("visited_num", "visited_num"), new HyperUniquesAggregatorFactory("unique_hosts", "host") }, new UniformGranularitySpec(Granularity.DAY, QueryGranularity.NONE, ImmutableList.of(this.interval)), mapper), new HadoopIOConfig(ImmutableMap.copyOf(inputSpec), null, tmpDir.getCanonicalPath()), new HadoopTuningConfig(tmpDir.getCanonicalPath(), null, null, null, null, null, false, false, false, false, ImmutableMap.of(JobContext.NUM_REDUCES, "0"), //verifies that set num reducers is ignored false, false, false, null, null, useCombiner))); config.setShardSpecs(loadShardSpecs(partitionType, shardInfoForEachSegment)); config = HadoopDruidIndexerConfig.fromSpec(config.getSchema()); }
From source file:org.apache.druid.indexer.IndexGeneratorJobTest.java
@Before public void setUp() throws Exception { mapper = HadoopDruidIndexerConfig.JSON_MAPPER; mapper.registerSubtypes(new NamedType(HashBasedNumberedShardSpec.class, "hashed")); mapper.registerSubtypes(new NamedType(SingleDimensionShardSpec.class, "single")); dataFile = temporaryFolder.newFile(); tmpDir = temporaryFolder.newFolder(); HashMap<String, Object> inputSpec = new HashMap<String, Object>(); inputSpec.put("paths", dataFile.getCanonicalPath()); inputSpec.put("type", "static"); if (inputFormatName != null) { inputSpec.put("inputFormat", inputFormatName); }/*from ww w .j a v a2 s .co m*/ if (SequenceFileInputFormat.class.getName().equals(inputFormatName)) { writeDataToLocalSequenceFile(dataFile, data); } else { FileUtils.writeLines(dataFile, data); } config = new HadoopDruidIndexerConfig(new HadoopIngestionSpec( new DataSchema(datasourceName, mapper.convertValue(inputRowParser, Map.class), aggs, new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, ImmutableList.of(this.interval)), null, mapper), new HadoopIOConfig(ImmutableMap.copyOf(inputSpec), null, tmpDir.getCanonicalPath()), new HadoopTuningConfig(tmpDir.getCanonicalPath(), null, null, null, null, maxRowsInMemory, maxBytesInMemory, false, false, false, false, ImmutableMap.of(JobContext.NUM_REDUCES, "0"), //verifies that set num reducers is ignored false, useCombiner, null, true, null, forceExtendableShardSpecs, false, null, null, null))); config.setShardSpecs(loadShardSpecs(partitionType, shardInfoForEachSegment)); config = HadoopDruidIndexerConfig.fromSpec(config.getSchema()); }
From source file:org.apache.druid.segment.loading.HdfsDataSegmentFinderTest.java
@BeforeClass public static void setupStatic() throws IOException { mapper.registerSubtypes(new NamedType(NumberedShardSpec.class, "numbered")); hdfsTmpDir = File.createTempFile("hdfsDataSource", "dir"); if (!hdfsTmpDir.delete()) { throw new IOE("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath()); }/*ww w . jav a 2s . c om*/ conf = new Configuration(true); conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsTmpDir.getAbsolutePath()); miniCluster = new MiniDFSCluster.Builder(conf).build(); uriBase = miniCluster.getURI(); fs = miniCluster.getFileSystem(); }
From source file:org.deeplearning4j.nn.conf.ComputationGraphConfiguration.java
/** * Create a computation graph configuration from json * * @param json the neural net configuration from json * @return {@link org.deeplearning4j.nn.conf.ComputationGraphConfiguration} *//*from w ww. j ava 2 s . co m*/ public static ComputationGraphConfiguration fromJson(String json) { //As per MultiLayerConfiguration.fromJson() ObjectMapper mapper = NeuralNetConfiguration.mapper(); try { return mapper.readValue(json, ComputationGraphConfiguration.class); } catch (IOException e) { //No op - try again after adding new subtypes } //Try: programmatically registering JSON subtypes for GraphVertex classes. This allows users to to add custom GraphVertex // implementations without needing to manually register subtypes //First: get all registered subtypes AnnotatedClass ac = AnnotatedClass.construct(GraphVertex.class, mapper.getSerializationConfig().getAnnotationIntrospector(), null); Collection<NamedType> types = mapper.getSubtypeResolver().collectAndResolveSubtypes(ac, mapper.getSerializationConfig(), mapper.getSerializationConfig().getAnnotationIntrospector()); Set<Class<?>> registeredSubtypes = new HashSet<>(); for (NamedType nt : types) { registeredSubtypes.add(nt.getType()); } //Second: get all subtypes of GraphVertex using reflection Reflections reflections = new Reflections(); Set<Class<? extends GraphVertex>> subTypes = reflections.getSubTypesOf(GraphVertex.class); //Third: register all subtypes that are not already registered List<NamedType> toRegister = new ArrayList<>(); for (Class<? extends GraphVertex> c : subTypes) { if (!registeredSubtypes.contains(c)) { String name; if (ClassUtils.isInnerClass(c)) { Class<?> c2 = c.getDeclaringClass(); name = c2.getSimpleName() + "$" + c.getSimpleName(); } else { name = c.getSimpleName(); } toRegister.add(new NamedType(c, name)); } } mapper = NeuralNetConfiguration.reinitMapperWithSubtypes(toRegister); try { return mapper.readValue(json, ComputationGraphConfiguration.class); } catch (IOException e) { throw new RuntimeException(e); } }