Example usage for org.apache.hadoop.conf Configuration getClass

List of usage examples for org.apache.hadoop.conf Configuration getClass

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration getClass.

Prototype

public <U> Class<? extends U> getClass(String name, Class<? extends U> defaultValue, Class<U> xface) 

Source Link

Document

Get the value of the name property as a Class implementing the interface specified by xface.

Usage

From source file:backup.datanode.BackupFsDatasetSpiFactory.java

License:Apache License

private void setupDefaultFactory(Configuration conf) {
    if (factory == null) {
        Class<? extends Factory> defaultFactoryClass = conf.getClass(DFS_DATANODE_BACKUP_FSDATASET_FACTORY_KEY,
                FsDatasetFactory.class, Factory.class);
        factory = ReflectionUtils.newInstance(defaultFactoryClass, conf);
    }/*  w  w  w  .  ja v  a 2s. c  om*/
}

From source file:co.cask.cdap.data.stream.AbstractStreamInputFormat.java

License:Apache License

/**
 * Returns the {@link StreamEventDecoder} class as specified in the job configuration.
 *
 * @param conf The job configuration/*from w  w  w  .  jav  a  2s  .c o m*/
 * @return The {@link StreamEventDecoder} class or {@code null} if it is not set.
 */
@SuppressWarnings("unchecked")
public static <K, V> Class<? extends StreamEventDecoder<K, V>> getDecoderClass(Configuration conf) {
    return (Class<? extends StreamEventDecoder<K, V>>) conf.getClass(DECODER_TYPE, null,
            StreamEventDecoder.class);
}

From source file:co.cask.cdap.internal.app.runtime.batch.dataset.partitioned.DynamicPartitionerWriterWrapper.java

License:Apache License

DynamicPartitionerWriterWrapper(TaskAttemptContext job) {
    this.job = job;
    this.outputName = DynamicPartitioningOutputFormat.getOutputName(job);

    Configuration configuration = job.getConfiguration();
    Class<? extends DynamicPartitioner> partitionerClass = configuration.getClass(
            PartitionedFileSetArguments.DYNAMIC_PARTITIONER_CLASS_NAME, null, DynamicPartitioner.class);
    this.dynamicPartitioner = new InstantiatorFactory(false).get(TypeToken.of(partitionerClass)).create();

    MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(configuration);
    this.taskContext = classLoader.getTaskContextProvider().get(job);

    String outputDatasetName = configuration.get(Constants.Dataset.Partitioned.HCONF_ATTR_OUTPUT_DATASET);
    PartitionedFileSet outputDataset = taskContext.getDataset(outputDatasetName);
    this.partitioning = outputDataset.getPartitioning();

    this.dynamicPartitioner.initialize(taskContext);
}

From source file:co.cask.cdap.internal.app.runtime.batch.dataset.partitioned.DynamicPartitioningOutputFormat.java

License:Apache License

/**
 * Create a composite record writer that can write key/value data to different output files.
 *
 * @return a composite record writer//from  w  ww .  ja  va 2 s  .c om
 * @throws IOException
 */
@Override
public RecordWriter<K, V> getRecordWriter(final TaskAttemptContext job) throws IOException {
    final String outputName = FileOutputFormat.getOutputName(job);

    Configuration configuration = job.getConfiguration();
    Class<? extends DynamicPartitioner> partitionerClass = configuration.getClass(
            PartitionedFileSetArguments.DYNAMIC_PARTITIONER_CLASS_NAME, null, DynamicPartitioner.class);

    @SuppressWarnings("unchecked")
    final DynamicPartitioner<K, V> dynamicPartitioner = new InstantiatorFactory(false)
            .get(TypeToken.of(partitionerClass)).create();

    MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(configuration);
    final BasicMapReduceTaskContext<K, V> taskContext = classLoader.getTaskContextProvider().get(job);

    String outputDatasetName = configuration.get(Constants.Dataset.Partitioned.HCONF_ATTR_OUTPUT_DATASET);
    PartitionedFileSet outputDataset = taskContext.getDataset(outputDatasetName);
    final Partitioning partitioning = outputDataset.getPartitioning();

    dynamicPartitioner.initialize(taskContext);

    return new RecordWriter<K, V>() {

        // a cache storing the record writers for different output files.
        Map<PartitionKey, RecordWriter<K, V>> recordWriters = new HashMap<>();

        public void write(K key, V value) throws IOException, InterruptedException {
            PartitionKey partitionKey = dynamicPartitioner.getPartitionKey(key, value);
            RecordWriter<K, V> rw = this.recordWriters.get(partitionKey);
            if (rw == null) {
                String relativePath = PartitionedFileSetDataset.getOutputPath(partitionKey, partitioning);
                String finalPath = relativePath + "/" + outputName;

                // if we don't have the record writer yet for the final path, create one and add it to the cache
                rw = getBaseRecordWriter(getTaskAttemptContext(job, finalPath));
                this.recordWriters.put(partitionKey, rw);
            }
            rw.write(key, value);
        }

        @Override
        public void close(TaskAttemptContext context) throws IOException, InterruptedException {
            try {
                List<RecordWriter<?, ?>> recordWriters = new ArrayList<>();
                recordWriters.addAll(this.recordWriters.values());
                MultipleOutputs.closeRecordWriters(recordWriters, context);

                taskContext.flushOperations();
            } catch (Exception e) {
                throw new IOException(e);
            } finally {
                dynamicPartitioner.destroy();
            }
        }
    };
}

From source file:co.cask.cdap.internal.app.runtime.batch.MapReduceRuntimeService.java

License:Apache License

/**
 * Returns a resolved {@link TypeToken} of the given super type by reading a class from the job configuration that
 * extends from super type./*from   www  . ja  v  a 2  s .c  o  m*/
 *
 * @param conf the job configuration
 * @param typeAttr The job configuration attribute for getting the user class
 * @param superType Super type of the class to get from the configuration
 * @param <V> Type of the super type
 * @return A resolved {@link TypeToken} or {@code null} if no such class in the job configuration
 */
@SuppressWarnings("unchecked")
@VisibleForTesting
@Nullable
static <V> TypeToken<V> resolveClass(Configuration conf, String typeAttr, Class<V> superType) {
    Class<? extends V> userClass = conf.getClass(typeAttr, null, superType);
    if (userClass == null) {
        return null;
    }
    return resolveClass(userClass, superType);
}

From source file:com.ailk.oci.ocnosql.tools.load.csvbulkload.PhoenixCsvToKeyValueMapper.java

License:Apache License

/**
 * Load the configured ImportPreUpsertKeyValueProcessor, or supply a dummy
 * processor.// w w w. ja v a2 s  . c  o m
 */
@VisibleForTesting
static ImportPreUpsertKeyValueProcessor loadPreUpsertProcessor(Configuration conf) {
    Class<? extends ImportPreUpsertKeyValueProcessor> processorClass = null;
    try {
        processorClass = conf.getClass(UPSERT_HOOK_CLASS_CONFKEY, DefaultImportPreUpsertKeyValueProcessor.class,
                ImportPreUpsertKeyValueProcessor.class);
    } catch (Exception e) {
        throw new IllegalStateException("Couldn't load upsert hook class", e);
    }

    return ReflectionUtils.newInstance(processorClass, conf);
}

From source file:com.alexholmes.hadooputils.combine.common.mapred.SplitMetricsCombineInputFormat.java

License:Apache License

protected void writeSplitsToSink(Configuration conf, Map<String, List<CombineFileSplitAdapter>> splits) {

    Class<? extends MetricsSink> theClass = conf.getClass("hadooputils.combine.sink.class", LoggerSink.class,
            MetricsSink.class);

    MetricsSink sink = ReflectionUtils.newInstance(theClass, conf);

    for (Map.Entry<String, List<CombineFileSplitAdapter>> entry : splits.entrySet()) {
        sink.pushLocation(entry.getKey(), entry.getValue());
    }/*from   www. j a  va 2 s.c  o m*/
}

From source file:com.alibaba.wasp.master.balancer.LoadBalancerFactory.java

License:Apache License

/**
 * Create a loadblanacer from the given conf.
 * @param conf//w w w . j a v a  2 s  .com
 * @return A {@link com.alibaba.wasp.master.LoadBalancer}
 */
public static LoadBalancer getLoadBalancer(Configuration conf) {
    // Create the balancer
    Class<? extends LoadBalancer> balancerClass = conf.getClass(FConstants.WASP_MASTER_LOADBALANCER_CLASS,
            DefaultLoadBalancer.class, LoadBalancer.class);
    return ReflectionUtils.newInstance(balancerClass, conf);
}

From source file:com.asakusafw.runtime.stage.output.StageOutputDriver.java

License:Apache License

private ResultOutput<?> buildSink(String name) throws IOException, InterruptedException {
    assert name != null;
    Configuration conf = context.getConfiguration();
    @SuppressWarnings("rawtypes")
    Class<? extends OutputFormat> formatClass = conf.getClass(getPropertyName(K_FORMAT_PREFIX, name), null,
            OutputFormat.class);
    Class<?> keyClass = conf.getClass(getPropertyName(K_KEY_PREFIX, name), null);
    Class<?> valueClass = conf.getClass(getPropertyName(K_VALUE_PREFIX, name), null);

    if (formatClass == null) {
        throw new IllegalStateException(
                MessageFormat.format("OutputFormat is not declared for output \"{0}\"", name));
    }/*from   w  w w  .  j a  va 2  s .c  om*/
    if (keyClass == null) {
        throw new IllegalStateException(
                MessageFormat.format("Output key type is not declared for output \"{0}\"", name));
    }
    if (valueClass == null) {
        throw new IllegalStateException(
                MessageFormat.format("Output value type is not declared for output \"{0}\"", name));
    }

    List<Counter> counters = getCounters(name);
    if (TemporaryOutputFormat.class.isAssignableFrom(formatClass)) {
        return buildTemporarySink(name, valueClass, counters);
    } else {
        return buildNormalSink(name, formatClass, keyClass, valueClass, counters);
    }
}

From source file:com.basho.riak.hadoop.config.RiakConfig.java

License:Apache License

/**
 * @param conf//from www . java2 s . c o  m
 *            the {@link Configuration} to query
 * @return the {@link KeyLister} the job was configured with
 * @throws RuntimeException
 *             if a {@link IllegalAccessException} or
 *             {@link InstantiationException} is thrown creating a
 *             {@link KeyLister}
 */
public static KeyLister getKeyLister(Configuration conf) throws IOException {
    Class<? extends KeyLister> clazz = conf.getClass(KEY_LISTER_CLASS_PROPERTY, BucketKeyLister.class,
            KeyLister.class);
    try {
        KeyLister lister = clazz.newInstance();
        lister.init(conf.get(KEY_LISTER_INIT_STRING_PROPERTY));
        return lister;
    } catch (IllegalAccessException e) {
        throw new RuntimeException(e);
    } catch (InstantiationException e) {
        throw new RuntimeException(e);
    }
}