Example usage for org.apache.hadoop.util ReflectionUtils setConf

List of usage examples for org.apache.hadoop.util ReflectionUtils setConf

Introduction

In this page you can find the example usage for org.apache.hadoop.util ReflectionUtils setConf.

Prototype

public static void setConf(Object theObject, Configuration conf) 

Source Link

Document

Check and set 'configuration' if necessary.

Usage

From source file:com.asakusafw.bridge.hadoop.directio.DirectFileInputFormat.java

License:Apache License

@Override
public List<InputSplit> getSplits(JobContext context) throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();
    StageInfo stage = getStageInfo(conf);
    DirectFileInputInfo<?> info = extractInfo(context);
    DirectDataSourceRepository repository = getDataSourceRepository(context);
    String containerPath = repository.getContainerPath(info.basePath);
    List<DirectInputFragment> fragments = findFragments(info, repository);
    List<InputSplit> results = new ArrayList<>();
    for (DirectInputFragment fragment : fragments) {
        DirectFileInputSplit split = new DirectFileInputSplit(containerPath, info.definition, fragment,
                stage.getBatchArguments());
        ReflectionUtils.setConf(split, conf);
        results.add(split);//w ww  .ja v  a 2  s . co  m
    }
    if (results.isEmpty()) {
        results.add(new NullInputSplit());
    }
    return results;
}

From source file:com.cloudera.sqoop.TestCompression.java

License:Apache License

public void runTextCompressionTest(CompressionCodec codec, int expectedNum) throws IOException {

    String[] columns = HsqldbTestServer.getFieldNames();
    String[] argv = getArgv(true, columns, codec, "--as-textfile");
    runImport(argv);//from  w ww . j  a  va  2s  .  c  o  m

    Configuration conf = new Configuration();
    if (!BaseSqoopTestCase.isOnPhysicalCluster()) {
        conf.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS);
    }
    FileSystem fs = FileSystem.get(conf);

    if (codec == null) {
        codec = new GzipCodec();
    }
    ReflectionUtils.setConf(codec, getConf());
    Path p = new Path(getDataFilePath().toString() + codec.getDefaultExtension());
    InputStream is = codec.createInputStream(fs.open(p));
    BufferedReader r = new BufferedReader(new InputStreamReader(is));
    int numLines = 0;
    while (true) {
        String ln = r.readLine();
        if (ln == null) {
            break;
        }
        numLines++;
    }
    r.close();
    assertEquals(expectedNum, numLines);
}

From source file:eu.stratosphere.hadoopcompatibility.HadoopInputFormatWrapper.java

License:Apache License

@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
    hadoopInputFormatName = in.readUTF();
    if (jobConf == null) {
        jobConf = new JobConf();
    }//from www. j a v a2 s .c  o  m
    jobConf.readFields(in);
    try {
        this.hadoopInputFormat = (org.apache.hadoop.mapred.InputFormat<K, V>) Class
                .forName(this.hadoopInputFormatName).newInstance();
    } catch (Exception e) {
        throw new RuntimeException("Unable to instantiate the hadoop input format", e);
    }
    ReflectionUtils.setConf(hadoopInputFormat, jobConf);
    converter = (HadoopTypeConverter<K, V>) in.readObject();
}

From source file:eu.stratosphere.hadoopcompatibility.HadoopOutputFormatWrapper.java

License:Apache License

@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
    hadoopOutputFormatName = in.readUTF();
    if (jobConf == null) {
        jobConf = new JobConf();
    }/* www .j  a v  a  2 s  .com*/
    jobConf.readFields(in);
    try {
        this.hadoopOutputFormat = (org.apache.hadoop.mapred.OutputFormat<K, V>) Class
                .forName(this.hadoopOutputFormatName).newInstance();
    } catch (Exception e) {
        throw new RuntimeException("Unable to instantiate the hadoop output format", e);
    }
    ReflectionUtils.setConf(hadoopOutputFormat, jobConf);
    converter = (StratosphereTypeConverter<K, V>) in.readObject();
    fileOutputCommitterWrapper = (FileOutputCommitterWrapper) in.readObject();
}

From source file:eu.stratosphere.hadoopcompatibility.mapred.HadoopInputFormat.java

License:Apache License

@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
    String hadoopInputFormatClassName = in.readUTF();
    String keyClassName = in.readUTF();
    String valueClassName = in.readUTF();
    if (jobConf == null) {
        jobConf = new JobConf();
    }/*  w  w  w  .  j  ava  2 s  . com*/
    jobConf.readFields(in);
    try {
        this.mapredInputFormat = (org.apache.hadoop.mapred.InputFormat<K, V>) Class
                .forName(hadoopInputFormatClassName, true, Thread.currentThread().getContextClassLoader())
                .newInstance();
    } catch (Exception e) {
        throw new RuntimeException("Unable to instantiate the hadoop input format", e);
    }
    try {
        this.keyClass = (Class<K>) Class.forName(keyClassName, true,
                Thread.currentThread().getContextClassLoader());
    } catch (Exception e) {
        throw new RuntimeException("Unable to find key class.", e);
    }
    try {
        this.valueClass = (Class<V>) Class.forName(valueClassName, true,
                Thread.currentThread().getContextClassLoader());
    } catch (Exception e) {
        throw new RuntimeException("Unable to find value class.", e);
    }
    ReflectionUtils.setConf(mapredInputFormat, jobConf);
}

From source file:eu.stratosphere.hadoopcompatibility.mapred.HadoopOutputFormat.java

License:Apache License

@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
    String hadoopOutputFormatName = in.readUTF();
    if (jobConf == null) {
        jobConf = new JobConf();
    }//from  www  .  j av  a  2  s .  c om
    jobConf.readFields(in);
    try {
        this.mapredOutputFormat = (org.apache.hadoop.mapred.OutputFormat<K, V>) Class
                .forName(hadoopOutputFormatName, true, Thread.currentThread().getContextClassLoader())
                .newInstance();
    } catch (Exception e) {
        throw new RuntimeException("Unable to instantiate the hadoop output format", e);
    }
    ReflectionUtils.setConf(mapredOutputFormat, jobConf);
}

From source file:eu.stratosphere.hadoopcompatibility.mapred.record.HadoopRecordOutputFormat.java

License:Apache License

@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
    hadoopOutputFormatName = in.readUTF();
    if (jobConf == null) {
        jobConf = new JobConf();
    }//from w  w  w  .j  a  v  a 2  s. c  o m
    jobConf.readFields(in);
    try {
        this.hadoopOutputFormat = (org.apache.hadoop.mapred.OutputFormat<K, V>) Class
                .forName(this.hadoopOutputFormatName).newInstance();
    } catch (Exception e) {
        throw new RuntimeException("Unable to instantiate the hadoop output format", e);
    }
    ReflectionUtils.setConf(hadoopOutputFormat, jobConf);
    converter = (StratosphereTypeConverter<K, V>) in.readObject();
    fileOutputCommitterWrapper = (HadoopFileOutputCommitter) in.readObject();
}

From source file:org.apache.avro.hadoop.io.AvroSerialization.java

License:Apache License

private static GenericData newDataModelInstance(Class<? extends GenericData> modelClass, Configuration conf) {
    GenericData dataModel;//  w w w. j  a v a  2 s  . co m
    try {
        Constructor<? extends GenericData> ctor = modelClass.getDeclaredConstructor(ClassLoader.class);
        ctor.setAccessible(true);
        dataModel = ctor.newInstance(conf.getClassLoader());
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
    ReflectionUtils.setConf(dataModel, conf);
    return dataModel;
}

From source file:org.apache.flink.api.java.hadoop.mapred.HadoopInputFormatBase.java

License:Apache License

public HadoopInputFormatBase(org.apache.hadoop.mapred.InputFormat<K, V> mapredInputFormat, Class<K> key,
        Class<V> value, JobConf job) {
    super(job.getCredentials());
    this.mapredInputFormat = mapredInputFormat;
    this.keyClass = key;
    this.valueClass = value;
    HadoopUtils.mergeHadoopConf(job);/*from  www. java  2  s. c om*/
    this.jobConf = job;
    ReflectionUtils.setConf(mapredInputFormat, jobConf);
}

From source file:org.apache.flink.api.java.hadoop.mapred.HadoopInputFormatBase.java

License:Apache License

@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
    super.read(in);

    String hadoopInputFormatClassName = in.readUTF();
    String keyClassName = in.readUTF();
    String valueClassName = in.readUTF();
    if (jobConf == null) {
        jobConf = new JobConf();
    }// w  w  w .ja v a2 s .  c  om
    jobConf.readFields(in);
    try {
        this.mapredInputFormat = (org.apache.hadoop.mapred.InputFormat<K, V>) Class
                .forName(hadoopInputFormatClassName, true, Thread.currentThread().getContextClassLoader())
                .newInstance();
    } catch (Exception e) {
        throw new RuntimeException("Unable to instantiate the hadoop input format", e);
    }
    try {
        this.keyClass = (Class<K>) Class.forName(keyClassName, true,
                Thread.currentThread().getContextClassLoader());
    } catch (Exception e) {
        throw new RuntimeException("Unable to find key class.", e);
    }
    try {
        this.valueClass = (Class<V>) Class.forName(valueClassName, true,
                Thread.currentThread().getContextClassLoader());
    } catch (Exception e) {
        throw new RuntimeException("Unable to find value class.", e);
    }
    ReflectionUtils.setConf(mapredInputFormat, jobConf);

    jobConf.getCredentials().addAll(this.credentials);
    Credentials currentUserCreds = getCredentialsFromUGI(UserGroupInformation.getCurrentUser());
    if (currentUserCreds != null) {
        jobConf.getCredentials().addAll(currentUserCreds);
    }
}