Example usage for org.apache.hadoop.io DefaultStringifier load

List of usage examples for org.apache.hadoop.io DefaultStringifier load

Introduction

In this page you can find the example usage for org.apache.hadoop.io DefaultStringifier load.

Prototype

public static <K> K load(Configuration conf, String keyName, Class<K> itemClass) throws IOException 

Source Link

Document

Restores the object from the configuration.

Usage

From source file:com.marklogic.contentpump.RDFInputFormat.java

License:Apache License

protected LinkedMapWritable getRoleMap(TaskAttemptContext context) throws IOException {
    //Restores the object from the configuration.
    Configuration conf = context.getConfiguration();
    LinkedMapWritable fhmap = null;/*from   w  w  w .ja  v a 2 s . c om*/
    if (conf.get(ConfigConstants.CONF_ROLE_MAP) != null) {
        fhmap = DefaultStringifier.load(conf, ConfigConstants.CONF_ROLE_MAP, LinkedMapWritable.class);
    }
    return fhmap;
}

From source file:com.marklogic.contentpump.RDFInputFormat.java

License:Apache License

protected String getServerVersion(TaskAttemptContext context) throws IOException {
    //Restores the object from the configuration.
    Configuration conf = context.getConfiguration();
    Text version = DefaultStringifier.load(conf, ConfigConstants.CONF_ML_VERSION, Text.class);
    return version.toString();
}

From source file:com.marklogic.contentpump.TransformOutputFormat.java

License:Apache License

/**
 * initialize mimetype map if not initialized, return the map
 * //from  www .  j a  v a2  s. c o  m
 * @return
 * @throws IOException
 */
private LinkedMapWritable getMimetypesMap() throws IOException {
    if (mimetypeMap != null)
        return mimetypeMap;
    String mtmap = conf.get(ConfigConstants.CONF_MIMETYPES);
    if (mtmap != null) {
        mimetypeMap = DefaultStringifier.load(conf, ConfigConstants.CONF_MIMETYPES, LinkedMapWritable.class);
        return mimetypeMap;
    }
    String host = conf.get(OUTPUT_HOST);
    Session session = null;
    ResultSequence result = null;
    try {
        ContentSource cs = InternalUtilities.getOutputContentSource(conf, host);
        session = cs.newSession();
        AdhocQuery query = session.newAdhocQuery(MIMETYPES_QUERY);
        RequestOptions options = new RequestOptions();
        options.setDefaultXQueryVersion("1.0-ml");
        query.setOptions(options);
        result = session.submitRequest(query);
        if (!result.hasNext())
            throw new IOException("Server-side transform requires MarkLogic 7 or later");
        mimetypeMap = new LinkedMapWritable();
        while (result.hasNext()) {
            String suffs = result.next().asString();
            Text format = new Text(result.next().asString());
            // some extensions are in a space separated string
            for (String s : suffs.split(" ")) {
                Text suff = new Text(s);
                mimetypeMap.put(suff, format);
            }
        }
        return mimetypeMap;
    } catch (Exception e) {
        LOG.error(e.getMessage(), e);
        throw new IOException(e);
    } finally {
        if (result != null) {
            result.close();
        }
        if (session != null) {
            session.close();
        }
    }
}

From source file:com.marklogic.mapreduce.ContentOutputFormat.java

License:Apache License

protected LinkedMapWritable getForestStatusMap(Configuration conf) throws IOException {
    String forestHost = conf.get(OUTPUT_FOREST_HOST);
    if (forestHost != null) {
        //Restores the object from the configuration.
        LinkedMapWritable fhmap = DefaultStringifier.load(conf, OUTPUT_FOREST_HOST, LinkedMapWritable.class);
        // must be in fast load mode, otherwise won't reach here
        String s = conf.get(ASSIGNMENT_POLICY);
        //EXECUTION_MODE must have a value in mlcp;
        //default is "distributed" in hadoop connector
        String mode = conf.get(EXECUTION_MODE, MODE_DISTRIBUTED);
        if (MODE_DISTRIBUTED.equals(mode)) {
            AssignmentPolicy.Kind policy = AssignmentPolicy.Kind.forName(s);
            am.initialize(policy, fhmap, conf.getInt(BATCH_SIZE, 10));
        }/*  w  ww  . ja  v  a2s . c  om*/
        return fhmap;
    } else {
        try {
            // try getting a connection
            ContentSource cs = InternalUtilities.getOutputContentSource(conf, conf.get(OUTPUT_HOST));
            //get policy
            initialize(cs.newSession());
            // query forest status mapping
            return queryForestInfo(cs);
        } catch (Exception ex) {
            throw new IOException(ex);
        }
    }
}

From source file:com.marklogic.mapreduce.MarkLogicOutputFormat.java

License:Apache License

protected TextArrayWritable getHosts(Configuration conf) throws IOException {
    String forestHost = conf.get(OUTPUT_FOREST_HOST);
    if (forestHost != null) {
        // Restores the object from the configuration.
        TextArrayWritable hosts = DefaultStringifier.load(conf, OUTPUT_FOREST_HOST, TextArrayWritable.class);
        return hosts;
    } else {/*from ww  w.  ja  v  a  2  s . c o  m*/
        try {
            // try getting a connection
            ContentSource cs = InternalUtilities.getOutputContentSource(conf, conf.get(OUTPUT_HOST));
            // query hosts
            return queryHosts(cs);
        } catch (Exception ex) {
            throw new IOException(ex);
        }
    }
}

From source file:org.apache.gora.util.IOUtils.java

License:Apache License

/**
 * Loads the object stored by {@link #storeToConf(Object, Configuration, String)}
 * method from the configuration under the given dataKey.
 * @param conf the configuration to read from
 * @param dataKey the key to get the data from
 * @return the store object//from   w  w w.j a  v  a 2 s .c o m
 */
@SuppressWarnings("unchecked")
public static <T> T loadFromConf(Configuration conf, String dataKey) throws IOException {
    String classKey = dataKey + "._class";
    String className = conf.get(classKey);
    try {
        T obj = (T) DefaultStringifier.load(conf, dataKey, ClassLoadingUtils.loadClass(className));
        return obj;
    } catch (Exception ex) {
        throw new IOException(ex);
    }
}

From source file:org.apache.mahout.text.LuceneStorageConfiguration.java

License:Apache License

/**
 * Deserializes a {@link LuceneStorageConfiguration} from a {@link Configuration}.
 *
 * @param conf the {@link Configuration} object with a serialized {@link LuceneStorageConfiguration}
 * @throws IOException if deserialization fails
 *//*  ww  w . j av a 2 s.co  m*/
public LuceneStorageConfiguration(Configuration conf) throws IOException {
    Preconditions.checkNotNull(conf, "Parameter 'configuration' cannot be null");

    String serializedConfigString = conf.get(KEY);

    if (serializedConfigString == null) {
        throw new IllegalArgumentException(
                "Parameter 'configuration' does not contain a serialized " + this.getClass());
    }

    LuceneStorageConfiguration luceneStorageConf = DefaultStringifier.load(conf, KEY,
            LuceneStorageConfiguration.class);

    this.configuration = conf;
    this.indexPaths = luceneStorageConf.getIndexPaths();
    this.sequenceFilesOutputPath = luceneStorageConf.getSequenceFilesOutputPath();
    this.idField = luceneStorageConf.getIdField();
    this.fields = luceneStorageConf.getFields();
    this.query = luceneStorageConf.getQuery();
    this.maxHits = luceneStorageConf.getMaxHits();
}

From source file:org.apache.sqoop.mapreduce.AvroExportMapper.java

License:Apache License

@Override
protected void setup(Context context) throws IOException, InterruptedException {

    super.setup(context);

    Configuration conf = context.getConfiguration();

    // Instantiate a copy of the user's class to hold and parse the record.
    String recordClassName = conf.get(ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY);
    if (null == recordClassName) {
        throw new IOException(
                "Export table class name (" + ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY + ") is not set!");
    }//w w  w .  j  a va 2  s.co m

    try {
        Class cls = Class.forName(recordClassName, true, Thread.currentThread().getContextClassLoader());
        recordImpl = (SqoopRecord) ReflectionUtils.newInstance(cls, conf);
    } catch (ClassNotFoundException cnfe) {
        throw new IOException(cnfe);
    }

    if (null == recordImpl) {
        throw new IOException("Could not instantiate object of type " + recordClassName);
    }

    columnTypes = DefaultStringifier.load(conf, AVRO_COLUMN_TYPES_MAP, MapWritable.class);
}

From source file:org.apache.sqoop.mapreduce.GenericRecordExportMapper.java

License:Apache License

@Override
protected void setup(Context context) throws IOException, InterruptedException {
    super.setup(context);

    Configuration conf = context.getConfiguration();

    // Instantiate a copy of the user's class to hold and parse the record.
    String recordClassName = conf.get(ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY);
    if (null == recordClassName) {
        throw new IOException(
                "Export table class name (" + ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY + ") is not set!");
    }//  www . j ava  2s  . c o m

    try {
        Class cls = Class.forName(recordClassName, true, Thread.currentThread().getContextClassLoader());
        recordImpl = (SqoopRecord) ReflectionUtils.newInstance(cls, conf);
    } catch (ClassNotFoundException cnfe) {
        throw new IOException(cnfe);
    }

    if (null == recordImpl) {
        throw new IOException("Could not instantiate object of type " + recordClassName);
    }

    columnTypes = DefaultStringifier.load(conf, AVRO_COLUMN_TYPES_MAP, MapWritable.class);

    // Add decimal support
    GenericData.get().addLogicalTypeConversion(new Conversions.DecimalConversion());
}

From source file:org.apache.sqoop.mapreduce.hcat.SqoopHCatExportHelper.java

License:Apache License

public SqoopHCatExportHelper(Configuration conf) throws IOException, InterruptedException {

    colTypesJava = DefaultStringifier.load(conf, SqoopHCatUtilities.HCAT_DB_OUTPUT_COLTYPES_JAVA,
            MapWritable.class);
    colTypesSql = DefaultStringifier.load(conf, SqoopHCatUtilities.HCAT_DB_OUTPUT_COLTYPES_SQL,
            MapWritable.class);
    // Instantiate a copy of the user's class to hold and parse the record.

    String recordClassName = conf.get(ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY);
    if (null == recordClassName) {
        throw new IOException(
                "Export table class name (" + ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY + ") is not set!");
    }//from   w  w  w  .jav a  2 s .co  m
    debugHCatExportMapper = conf.getBoolean(SqoopHCatUtilities.DEBUG_HCAT_EXPORT_MAPPER_PROP, false);
    try {
        Class<?> cls = Class.forName(recordClassName, true, Thread.currentThread().getContextClassLoader());
        sqoopRecord = (SqoopRecord) ReflectionUtils.newInstance(cls, conf);
    } catch (ClassNotFoundException cnfe) {
        throw new IOException(cnfe);
    }

    if (null == sqoopRecord) {
        throw new IOException("Could not instantiate object of type " + recordClassName);
    }

    String inputJobInfoStr = conf.get(HCatConstants.HCAT_KEY_JOB_INFO);
    jobInfo = (InputJobInfo) HCatUtil.deserialize(inputJobInfoStr);
    HCatSchema tableSchema = jobInfo.getTableInfo().getDataColumns();
    HCatSchema partitionSchema = jobInfo.getTableInfo().getPartitionColumns();
    hCatFullTableSchema = new HCatSchema(tableSchema.getFields());
    for (HCatFieldSchema hfs : partitionSchema.getFields()) {
        hCatFullTableSchema.append(hfs);
    }
}