List of usage examples for java.lang Class asSubclass
@SuppressWarnings("unchecked") public <U> Class<? extends U> asSubclass(Class<U> clazz)
From source file:org.sejda.core.context.XmlConfigurationStrategy.java
/** * Retrieves the value of the input xpath in the given node, creates a Class object and performs a check to ensure that the input assignableInterface is assignable by the * created Class object./*from w w w . ja v a 2s . c o m*/ * * @param <T> * * @param node * @param attributeName * @param assignableInterface * @return the retrieved class. * @throws ConfigurationException */ private <T> Class<? extends T> getClassFromNode(Node node, String attributeName, Class<T> assignableInterface) throws ConfigurationException { String attributeValue = nullSafeGetStringAttribute(node, attributeName); if (isNotBlank(attributeValue)) { Class<?> clazz; try { clazz = Class.forName(attributeValue.trim()); } catch (ClassNotFoundException e) { throw new ConfigurationException(String.format("Unable to find the configured %s", attributeValue), e); } if (assignableInterface.isAssignableFrom(clazz)) { return clazz.asSubclass(assignableInterface); } throw new ConfigurationException( String.format("The configured %s is not a subtype of %s", clazz, assignableInterface)); } throw new ConfigurationException(String.format("Missing %s configuration parameter.", attributeName)); }
From source file:com.chiorichan.plugin.loader.JavaPluginLoader.java
void setClass(final String name, final Class<?> clazz) { if (!classes.containsKey(name)) { classes.put(name, clazz);// ww w . j av a 2 s . co m if (ConfigurationSerializable.class.isAssignableFrom(clazz)) { Class<? extends ConfigurationSerializable> serializable = clazz .asSubclass(ConfigurationSerializable.class); ConfigurationSerialization.registerClass(serializable); } } }
From source file:org.gradle.api.internal.AbstractClassGenerator.java
private <T> Class<? extends T> generateUnderLock(Class<T> type) { Map<Class<?>, Class<?>> cache = GENERATED_CLASSES.get(getClass()); if (cache == null) { // WeakHashMap won't work here. It keeps a strong reference to the mapping value, which is the generated class in this case // However, the generated class has a strong reference to the source class (by extending it), so the keys will always be // strongly reachable while this Class is strongly reachable. Use weak references for both key and value of the mapping instead. cache = new ReferenceMap(AbstractReferenceMap.WEAK, AbstractReferenceMap.WEAK); GENERATED_CLASSES.put(getClass(), cache); }//w ww . j a v a2s .c o m Class<?> generatedClass = cache.get(type); if (generatedClass != null) { return generatedClass.asSubclass(type); } if (Modifier.isPrivate(type.getModifiers())) { throw new GradleException( String.format("Cannot create a proxy class for private class '%s'.", type.getSimpleName())); } if (Modifier.isAbstract(type.getModifiers())) { throw new GradleException( String.format("Cannot create a proxy class for abstract class '%s'.", type.getSimpleName())); } Class<? extends T> subclass; try { ClassMetaData classMetaData = inspectType(type); ClassBuilder<T> builder = start(type, classMetaData); builder.startClass(); if (!DynamicObjectAware.class.isAssignableFrom(type)) { if (ExtensionAware.class.isAssignableFrom(type)) { throw new UnsupportedOperationException( "A type that implements ExtensionAware must currently also implement DynamicObjectAware."); } builder.mixInDynamicAware(); } if (!GroovyObject.class.isAssignableFrom(type)) { builder.mixInGroovyObject(); } builder.addDynamicMethods(); if (classMetaData.conventionAware && !IConventionAware.class.isAssignableFrom(type)) { builder.mixInConventionAware(); } Class noMappingClass = Object.class; for (Class<?> c = type; c != null && noMappingClass == Object.class; c = c.getSuperclass()) { if (c.getAnnotation(NoConventionMapping.class) != null) { noMappingClass = c; } } Set<PropertyMetaData> conventionProperties = new HashSet<PropertyMetaData>(); for (PropertyMetaData property : classMetaData.properties.values()) { if (SKIP_PROPERTIES.contains(property.name)) { continue; } if (property.injector) { builder.addInjectorProperty(property); for (Method getter : property.getters) { builder.applyServiceInjectionToGetter(property, getter); } for (Method setter : property.setters) { builder.applyServiceInjectionToSetter(property, setter); } continue; } boolean needsConventionMapping = false; if (classMetaData.isExtensible()) { for (Method getter : property.getters) { if (!Modifier.isFinal(getter.getModifiers()) && !getter.getDeclaringClass().isAssignableFrom(noMappingClass)) { needsConventionMapping = true; break; } } } if (needsConventionMapping) { conventionProperties.add(property); builder.addConventionProperty(property); for (Method getter : property.getters) { builder.applyConventionMappingToGetter(property, getter); } } if (needsConventionMapping) { for (Method setter : property.setters) { if (!Modifier.isFinal(setter.getModifiers())) { builder.applyConventionMappingToSetter(property, setter); } } } } Set<Method> actionMethods = classMetaData.missingOverloads; for (Method method : actionMethods) { builder.addActionMethod(method); } // Adds a set method for each mutable property for (PropertyMetaData property : classMetaData.properties.values()) { if (property.setters.isEmpty()) { continue; } if (Iterable.class.isAssignableFrom(property.getType())) { // Currently not supported continue; } if (property.setMethods.isEmpty()) { for (Method setter : property.setters) { builder.addSetMethod(property, setter); } } else if (conventionProperties.contains(property)) { for (Method setMethod : property.setMethods) { builder.applyConventionMappingToSetMethod(property, setMethod); } } } for (Constructor<?> constructor : type.getConstructors()) { if (Modifier.isPublic(constructor.getModifiers())) { builder.addConstructor(constructor); } } subclass = builder.generate(); } catch (Throwable e) { throw new GradleException( String.format("Could not generate a proxy class for class %s.", type.getName()), e); } cache.put(type, subclass); cache.put(subclass, subclass); return subclass; }
From source file:org.apache.nifi.authorization.AuthorizerFactoryBean.java
private Authorizer createAuthorizer(final String identifier, final String authorizerClassName) throws Exception { // get the classloader for the specified authorizer final ClassLoader authorizerClassLoader = ExtensionManager.getClassLoader(authorizerClassName); if (authorizerClassLoader == null) { throw new Exception(String.format("The specified authorizer class '%s' is not known to this nifi.", authorizerClassName));//w w w . jav a2s . c o m } // get the current context classloader final ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader(); final Authorizer instance; try { // set the appropriate class loader Thread.currentThread().setContextClassLoader(authorizerClassLoader); // attempt to load the class Class<?> rawAuthorizerClass = Class.forName(authorizerClassName, true, authorizerClassLoader); Class<? extends Authorizer> authorizerClass = rawAuthorizerClass.asSubclass(Authorizer.class); // otherwise create a new instance Constructor constructor = authorizerClass.getConstructor(); instance = (Authorizer) constructor.newInstance(); // method injection performMethodInjection(instance, authorizerClass); // field injection performFieldInjection(instance, authorizerClass); // call post construction lifecycle event instance.initialize(new StandardAuthorizerInitializationContext(identifier, this)); } finally { if (currentClassLoader != null) { Thread.currentThread().setContextClassLoader(currentClassLoader); } } return withNarLoader(instance); }
From source file:org.apache.giraph.hive.HiveGiraphRunner.java
/** * * @param className to find//from w w w.j a v a 2s . c om * @param base base class * @param <T> class type found * @return type found */ private <T> Class<? extends T> findClass(String className, Class<T> base) { try { Class<?> cls = Class.forName(className); if (base.isAssignableFrom(cls)) { return cls.asSubclass(base); } return null; } catch (ClassNotFoundException e) { throw new IllegalArgumentException(className + ": Invalid class name"); } }
From source file:org.pentaho.pac.server.PacServiceImpl.java
/** * NOTE: caller is responsible for closing connection * /* w w w .j ava 2 s. co m*/ * @param ds * @return * @throws DataSourceManagementException */ private static Connection getDataSourceConnection(PentahoDataSource ds) throws DataSourceManagementException { Connection conn = null; String driverClass = ds.getDriverClass(); if (StringUtils.isEmpty(driverClass)) { throw new DataSourceManagementException( Messages.getErrorString("PacService.ERROR_0024_CONNECTION_ATTEMPT_FAILED", driverClass)); //$NON-NLS-1$ } Class<?> driverC = null; try { driverC = Class.forName(driverClass); } catch (ClassNotFoundException e) { throw new DataSourceManagementException( Messages.getErrorString("PacService.ERROR_0026_DRIVER_NOT_FOUND_IN_CLASSPATH", driverClass), e); //$NON-NLS-1$ } if (!Driver.class.isAssignableFrom(driverC)) { throw new DataSourceManagementException( Messages.getErrorString("PacService.ERROR_0026_DRIVER_NOT_FOUND_IN_CLASSPATH", driverClass)); //$NON-NLS-1$ } } Driver driver = null; try { driver = driverC.asSubclass(Driver.class).newInstance(); } catch (InstantiationException e) { throw new DataSourceManagementException( Messages.getErrorString("PacService.ERROR_0027_UNABLE_TO_INSTANCE_DRIVER", driverClass), e); //$NON-NLS-1$ } catch (IllegalAccessException e) { throw new DataSourceManagementException( Messages.getErrorString("PacService.ERROR_0027_UNABLE_TO_INSTANCE_DRIVER", driverClass), e); //$NON-NLS-1$ } } try { DriverManager.registerDriver(driver); conn = DriverManager.getConnection(ds.getUrl(), ds.getUserName(), ds.getPassword()); return conn; } catch (SQLException e) { throw new DataSourceManagementException( Messages.getErrorString("PacService.ERROR_0025_UNABLE_TO_CONNECT", e.getMessage()), e); //$NON-NLS-1$ } }
From source file:eu.stratosphere.runtime.fs.hdfs.DistributedFileSystem.java
/** * Creates a new DistributedFileSystem object to access HDFS * /*from w w w .j av a 2s . c om*/ * @throws IOException * throw if the required HDFS classes cannot be instantiated */ public DistributedFileSystem() throws IOException { // Create new Hadoop configuration object this.conf = getHadoopConfiguration(); Class<? extends org.apache.hadoop.fs.FileSystem> fsClass = null; // try to get the FileSystem implementation class Hadoop 2.0.0 style { LOG.debug("Trying to load HDFS class Hadoop 2.x style."); Object fsHandle = null; try { Method newApi = org.apache.hadoop.fs.FileSystem.class.getMethod("getFileSystemClass", String.class, org.apache.hadoop.conf.Configuration.class); fsHandle = newApi.invoke(null, "hdfs", conf); } catch (Exception e) { // if we can't find the FileSystem class using the new API, // clazz will still be null, we assume we're running on an older Hadoop version } if (fsHandle != null) { if (fsHandle instanceof Class && org.apache.hadoop.fs.FileSystem.class.isAssignableFrom((Class<?>) fsHandle)) { fsClass = ((Class<?>) fsHandle).asSubclass(org.apache.hadoop.fs.FileSystem.class); if (LOG.isDebugEnabled()) { LOG.debug("Loaded '" + fsClass.getName() + "' as HDFS class."); } } else { LOG.debug( "Unexpected return type from 'org.apache.hadoop.fs.FileSystem.getFileSystemClass(String, Configuration)'."); throw new RuntimeException( "The value returned from org.apache.hadoop.fs.FileSystem.getFileSystemClass(String, Configuration) is not a valid subclass of org.apache.hadoop.fs.FileSystem."); } } } // fall back to an older Hadoop version if (fsClass == null) { // first of all, check for a user-defined hdfs class if (LOG.isDebugEnabled()) { LOG.debug( "Falling back to loading HDFS class old Hadoop style. Looking for HDFS class configuration entry '" + HDFS_IMPLEMENTATION_KEY + "'."); } Class<?> classFromConfig = conf.getClass(HDFS_IMPLEMENTATION_KEY, null); if (classFromConfig != null) { if (org.apache.hadoop.fs.FileSystem.class.isAssignableFrom(classFromConfig)) { fsClass = classFromConfig.asSubclass(org.apache.hadoop.fs.FileSystem.class); if (LOG.isDebugEnabled()) { LOG.debug("Loaded HDFS class '" + fsClass.getName() + "' as specified in configuration."); } } else { if (LOG.isDebugEnabled()) { LOG.debug("HDFS class specified by " + HDFS_IMPLEMENTATION_KEY + " is of wrong type."); } throw new IOException("HDFS class specified by " + HDFS_IMPLEMENTATION_KEY + " cannot be cast to a FileSystem type."); } } else { // load the default HDFS class if (LOG.isDebugEnabled()) { LOG.debug("Trying to load default HDFS implementation " + DEFAULT_HDFS_CLASS); } try { Class<?> reflectedClass = Class.forName(DEFAULT_HDFS_CLASS); if (org.apache.hadoop.fs.FileSystem.class.isAssignableFrom(reflectedClass)) { fsClass = reflectedClass.asSubclass(org.apache.hadoop.fs.FileSystem.class); } else { if (LOG.isDebugEnabled()) { LOG.debug("Default HDFS class is of wrong type."); } throw new IOException("The default HDFS class '" + DEFAULT_HDFS_CLASS + "' cannot be cast to a FileSystem type."); } } catch (ClassNotFoundException e) { if (LOG.isDebugEnabled()) { LOG.debug("Default HDFS class cannot be loaded."); } throw new IOException("No HDFS class has been configured and the default class '" + DEFAULT_HDFS_CLASS + "' cannot be loaded."); } } } this.fs = instantiateFileSystem(fsClass); }
From source file:ome.services.graphs.GraphPathBean.java
/** * If the given property of the given class is actually declared by an interface that it implements, * find the name of the interface that first declares the property. * @param className the name of an {@link IObject} class * @param propertyName the name of a property of the class * @return the interface declaring the property, or {@code null} if none *//*from ww w.j av a2 s. c o m*/ private Class<? extends IObject> getInterfaceForProperty(String className, String propertyName) { Class<? extends IObject> interfaceForProperty = null; Set<Class<? extends IObject>> interfacesFrom, interfacesTo; try { interfacesFrom = ImmutableSet .<Class<? extends IObject>>of(Class.forName(className).asSubclass(IObject.class)); } catch (ClassNotFoundException e) { log.error("could not load " + IObject.class.getName() + " subclass " + className); return null; } while (!interfacesFrom.isEmpty()) { interfacesTo = new HashSet<Class<? extends IObject>>(); for (final Class<? extends IObject> interfaceFrom : interfacesFrom) { if (interfaceFrom.isInterface() && BeanUtils.getPropertyDescriptor(interfaceFrom, propertyName) != null) { interfaceForProperty = interfaceFrom; } for (final Class<?> newInterface : interfaceFrom.getInterfaces()) { if (newInterface != IObject.class && IObject.class.isAssignableFrom(newInterface)) { interfacesTo.add(newInterface.asSubclass(IObject.class)); classesBySimpleName.put(newInterface.getSimpleName(), newInterface.asSubclass(IObject.class)); } } } interfacesFrom = interfacesTo; } return interfaceForProperty == null ? null : interfaceForProperty; }
From source file:net.nicholaswilliams.java.licensing.licensor.interfaces.cli.ConsoleLicenseGenerator.java
private <T> T getObjectAsClass(String className, Class<T> castClass) { try {// w w w. j a v a 2 s .c o m Class<?> objectClass = Class.forName(className); return objectClass.asSubclass(castClass).newInstance(); } catch (ClassNotFoundException e) { throw new RuntimeException("The class [" + className + "] could not be located."); } catch (ClassCastException e) { throw new RuntimeException("The class [" + className + "] does not implement interface [" + castClass.getCanonicalName() + "]."); } catch (Exception e) { throw new RuntimeException("Unable to instantiate class [" + className + "].", e); } }
From source file:org.apache.nifi.authorization.AuthorityProviderFactoryBean.java
private AuthorityProvider createAuthorityProvider(final String identifier, final String authorityProviderClassName) throws Exception { // get the classloader for the specified authority provider final ClassLoader authorityProviderClassLoader = ExtensionManager .getClassLoader(authorityProviderClassName); if (authorityProviderClassLoader == null) { throw new Exception( String.format("The specified authority provider class '%s' is not known to this nifi.", authorityProviderClassName)); }/*from w w w .j a va2 s . c om*/ // get the current context classloader final ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader(); final AuthorityProvider instance; try { // set the appropriate class loader Thread.currentThread().setContextClassLoader(authorityProviderClassLoader); // attempt to load the class Class<?> rawAuthorityProviderClass = Class.forName(authorityProviderClassName, true, authorityProviderClassLoader); Class<? extends AuthorityProvider> authorityProviderClass = rawAuthorityProviderClass .asSubclass(AuthorityProvider.class); // otherwise create a new instance Constructor constructor = authorityProviderClass.getConstructor(); instance = (AuthorityProvider) constructor.newInstance(); // method injection performMethodInjection(instance, authorityProviderClass); // field injection performFieldInjection(instance, authorityProviderClass); // call post construction lifecycle event instance.initialize(new StandardAuthorityProviderInitializationContext(identifier, this)); } finally { if (currentClassLoader != null) { Thread.currentThread().setContextClassLoader(currentClassLoader); } } return withNarLoader(instance); }