Example usage for org.apache.hadoop.conf Configuration readFields

List of usage examples for org.apache.hadoop.conf Configuration readFields

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration readFields.

Prototype

@Override
    public void readFields(DataInput in) throws IOException 

Source Link

Usage

From source file:com.bah.culvert.adapter.DatabaseAdapter.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    Configuration conf = new Configuration();
    conf.readFields(in);
    this.verify();

}

From source file:com.bah.culvert.adapter.TableAdapter.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    Configuration conf = new Configuration();
    conf.readFields(in);
    setConf(conf);//from  ww  w. j a  v  a 2s.c  o  m
}

From source file:com.bah.culvert.data.index.Index.java

License:Apache License

@Override
public void readFields(DataInput arg0) throws IOException {
    Configuration conf = new Configuration();
    conf.readFields(arg0);
    setConf(conf);/*from w w w  . j ava2  s  .  c o  m*/
}

From source file:com.bah.culvert.hive.CulvertHiveUtils.java

License:Apache License

/**
 * Get the culvert configuration. Checks to see if we should just treat the
 * properties as the configuration, or if we should load an external file.
 * Finally, returns the configuration based off of the properties.
 * //from   w  w  w.j a v  a2  s .  co  m
 * @param props The properties to examine and possibly turn into a
 *        configuration.
 * @return The new configuration.
 * @see CulvertHiveUtils#isCulvertConfigurationEmbedded(Properties)
 * @see #propsToConf(Properties)
 * @see #setCulvertConfiguration(Properties, Configuration)
 * @see #setCulvertConfiguration(Properties, String)
 */
public static Configuration getCulvertConfiguration(Properties props) {
    if (isCulvertConfigurationEmbedded(props)) {

        return propsToConf(props);
    } else {
        String fileName = props.getProperty(CULVERT_HIVE_EXTERNAL_CONF_FILE_CONF_KEY);
        Path fileLocation = new Path(fileName);
        FSDataInputStream input = null;
        try {
            FileSystem fs = fileLocation.getFileSystem(new Configuration());
            Configuration conf = new Configuration(false);
            input = fs.open(fileLocation);
            conf.readFields(input);
            return conf;
        } catch (IOException e) {
            throw new RuntimeException(e);
        } finally {
            if (input != null)
                try {
                    input.close();
                } catch (IOException e) {
                    throw new RuntimeException(e);
                }
        }
    }
}

From source file:com.bah.culvert.hive.CulvertInputSplit.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    Configuration conf = new Configuration();
    conf.readFields(in);
    String constraintClassName = in.readUTF();
    try {//from  www. j a  v a  2s . c  o m
        this.queryConstraint = Constraint.class.cast(Class.forName(constraintClassName).newInstance());
    } catch (InstantiationException e) {
        throw new IOException("Unable to instantiate " + constraintClassName, e);
    } catch (IllegalAccessException e) {
        throw new IOException(
                "Illegal access, make sure constructor and class are public: " + constraintClassName, e);
    } catch (ClassNotFoundException e) {
        throw new IOException("Unable to find class " + constraintClassName, e);
    }
    this.queryConstraint.readFields(in);
    this.locations = new ArrayList<String>(in.readInt());
    for (int i = 0; i < locations.size(); i++)
        locations.add(in.readUTF());
}

From source file:datafu.hourglass.jobs.AbstractJob.java

License:Apache License

/**
 * Creates Hadoop configuration using the provided properties.
 * /*from www  .  ja v a 2 s  . com*/
 * @param props
 * @return
 */
private void updateConfigurationFromProps(Properties props) {
    Configuration config = getConf();

    if (config == null) {
        config = new Configuration();
    }

    // to enable unit tests to inject configuration  
    if (props.containsKey("test.conf")) {
        try {
            byte[] decoded = Base64.decodeBase64(props.getProperty("test.conf"));
            ByteArrayInputStream byteInput = new ByteArrayInputStream(decoded);
            DataInputStream inputStream = new DataInputStream(byteInput);
            config.readFields(inputStream);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    } else {
        for (String key : props.stringPropertyNames()) {
            String newKey = key;
            String value = props.getProperty(key);

            if (key.toLowerCase().startsWith(HADOOP_PREFIX)) {
                newKey = key.substring(HADOOP_PREFIX.length());
                config.set(newKey, value);
            }
        }
    }
}

From source file:gaffer.accumulostore.operation.spark.handler.AbstractGetRDDOperationHandler.java

License:Apache License

protected Configuration getConfiguration(final GetOperation<?, ?> operation) throws OperationException {
    final Configuration conf = new Configuration();
    final String serialisedConf = operation.getOption(AbstractGetRDDOperationHandler.HADOOP_CONFIGURATION_KEY);
    if (serialisedConf != null) {
        try {/*from w ww  .  j av a 2s.com*/
            final ByteArrayInputStream bais = new ByteArrayInputStream(
                    serialisedConf.getBytes(CommonConstants.UTF_8));
            conf.readFields(new DataInputStream(bais));
        } catch (final IOException e) {
            throw new OperationException("Exception decoding Configuration from options", e);
        }
    }
    return conf;
}

From source file:org.apache.flink.formats.sequencefile.SerializableHadoopConfiguration.java

License:Apache License

private void readObject(ObjectInputStream in) throws IOException {
    final Configuration config = new Configuration();
    config.readFields(in);

    if (this.hadoopConfig == null) {
        this.hadoopConfig = config;
    }/*from w ww  . j  av  a 2s  . co  m*/
}

From source file:org.apache.flink.hcatalog.HCatInputFormatBase.java

License:Apache License

@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
    this.fieldNames = new String[in.readInt()];
    for (int i = 0; i < this.fieldNames.length; i++) {
        this.fieldNames[i] = in.readUTF();
    }/*from w w w. j  a  va  2 s  .  c  o m*/

    Configuration configuration = new Configuration();
    configuration.readFields(in);

    if (this.configuration == null) {
        this.configuration = configuration;
    }

    this.hCatInputFormat = new org.apache.hive.hcatalog.mapreduce.HCatInputFormat();
    this.outputSchema = (HCatSchema) HCatUtil
            .deserialize(this.configuration.get("mapreduce.lib.hcat.output.schema"));
}

From source file:org.apache.flink.orc.OrcRowInputFormat.java

License:Apache License

@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
    batchSize = in.readInt();//from w  w  w. ja va  2  s  .  c  o m
    org.apache.hadoop.conf.Configuration configuration = new org.apache.hadoop.conf.Configuration();
    configuration.readFields(in);

    if (this.conf == null) {
        this.conf = configuration;
    }
    this.schema = TypeDescription.fromString(in.readUTF());

    this.selectedFields = new int[in.readInt()];
    for (int i = 0; i < selectedFields.length; i++) {
        this.selectedFields[i] = in.readInt();
    }

    this.conjunctPredicates = new ArrayList<>();
    int numPreds = in.readInt();
    for (int i = 0; i < numPreds; i++) {
        conjunctPredicates.add((Predicate) in.readObject());
    }
}