Example usage for org.apache.hadoop.io DefaultStringifier load

List of usage examples for org.apache.hadoop.io DefaultStringifier load

Introduction

In this page you can find the example usage for org.apache.hadoop.io DefaultStringifier load.

Prototype

public static <K> K load(Configuration conf, String keyName, Class<K> itemClass) throws IOException 

Source Link

Document

Restores the object from the configuration.

Usage

From source file:org.apache.sqoop.mapreduce.odps.ParquetExportMapper.java

License:Apache License

@Override
protected void setup(Context context) throws IOException, InterruptedException {
    super.setup(context);

    Configuration conf = context.getConfiguration();

    // Instantiate a copy of the user's class to hold and parse the record.
    String recordClassName = conf.get(ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY);
    if (null == recordClassName) {
        throw new IOException(
                "Export table class name (" + ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY + ") is not set!");
    }//from   w  w  w  . j av  a2  s.co m

    try {
        Class cls = Class.forName(recordClassName, true, Thread.currentThread().getContextClassLoader());
        recordImpl = (SqoopRecord) ReflectionUtils.newInstance(cls, conf);
    } catch (ClassNotFoundException cnfe) {
        throw new IOException(cnfe);
    }

    if (null == recordImpl) {
        throw new IOException("Could not instantiate object of type " + recordClassName);
    }

    columnTypes = DefaultStringifier.load(conf, AVRO_COLUMN_TYPES_MAP, MapWritable.class);
}

From source file:org.apache.sqoop.mapreduce.TestJdbcExportJob.java

License:Apache License

@Test
public void testAvroWithNoColumnsSpecified() throws Exception {
    SqoopOptions opts = new SqoopOptions();
    opts.setExportDir("myexportdir");
    JdbcExportJob jdbcExportJob = stubJdbcExportJob(opts, FileType.AVRO_DATA_FILE);
    Job job = new Job();
    jdbcExportJob.configureInputFormat(job, null, null, null);
    assertEquals(asSetOfText("Age", "Name", "Gender"), DefaultStringifier
            .load(job.getConfiguration(), AvroExportMapper.AVRO_COLUMN_TYPES_MAP, MapWritable.class).keySet());
}

From source file:org.apache.sqoop.mapreduce.TestJdbcExportJob.java

License:Apache License

@Test
public void testAvroWithAllColumnsSpecified() throws Exception {
    SqoopOptions opts = new SqoopOptions();
    opts.setExportDir("myexportdir");
    String[] columns = { "Age", "Name", "Gender" };
    opts.setColumns(columns);//  w ww. j a  va  2 s. c o  m
    JdbcExportJob jdbcExportJob = stubJdbcExportJob(opts, FileType.AVRO_DATA_FILE);
    Job job = new Job();
    jdbcExportJob.configureInputFormat(job, null, null, null);
    assertEquals(asSetOfText("Age", "Name", "Gender"), DefaultStringifier
            .load(job.getConfiguration(), AvroExportMapper.AVRO_COLUMN_TYPES_MAP, MapWritable.class).keySet());
}

From source file:org.apache.sqoop.mapreduce.TestJdbcExportJob.java

License:Apache License

@Test
public void testAvroWithOneColumnSpecified() throws Exception {
    SqoopOptions opts = new SqoopOptions();
    opts.setExportDir("myexportdir");
    String[] columns = { "Gender" };
    opts.setColumns(columns);/*from  w w  w. j  a  va 2 s  .c  o  m*/
    JdbcExportJob jdbcExportJob = stubJdbcExportJob(opts, FileType.AVRO_DATA_FILE);
    Job job = new Job();
    jdbcExportJob.configureInputFormat(job, null, null, null);
    assertEquals(asSetOfText("Gender"), DefaultStringifier
            .load(job.getConfiguration(), AvroExportMapper.AVRO_COLUMN_TYPES_MAP, MapWritable.class).keySet());
}

From source file:org.apache.sqoop.mapreduce.TestJdbcExportJob.java

License:Apache License

@Test
public void testAvroWithSomeColumnsSpecified() throws Exception {
    SqoopOptions opts = new SqoopOptions();
    opts.setExportDir("myexportdir");
    String[] columns = { "Age", "Name" };
    opts.setColumns(columns);//from   ww w  .j a  va 2  s  .  c  o  m
    JdbcExportJob jdbcExportJob = stubJdbcExportJob(opts, FileType.AVRO_DATA_FILE);
    Job job = new Job();
    jdbcExportJob.configureInputFormat(job, null, null, null);
    assertEquals(asSetOfText("Age", "Name"), DefaultStringifier
            .load(job.getConfiguration(), AvroExportMapper.AVRO_COLUMN_TYPES_MAP, MapWritable.class).keySet());
}

From source file:org.apache.sqoop.mapreduce.TestJdbcExportJob.java

License:Apache License

@Test
public void testAvroWithMoreColumnsSpecified() throws Exception {
    SqoopOptions opts = new SqoopOptions();
    opts.setExportDir("myexportdir");
    String[] columns = { "Age", "Name", "Gender", "Address" };
    opts.setColumns(columns);/*from  www.  j a v a 2 s . com*/
    JdbcExportJob jdbcExportJob = stubJdbcExportJob(opts, FileType.AVRO_DATA_FILE);
    Job job = new Job();
    jdbcExportJob.configureInputFormat(job, null, null, null);
    assertEquals(asSetOfText("Age", "Name", "Gender"), DefaultStringifier
            .load(job.getConfiguration(), AvroExportMapper.AVRO_COLUMN_TYPES_MAP, MapWritable.class).keySet());
}

From source file:tachyon.hadoop.ConfUtils.java

License:Apache License

/**
 * Loads {@link TachyonConf} from Hadoop {@link org.apache.hadoop.conf.Configuration} source.
 *
 * @param source the {@link org.apache.hadoop.conf.Configuration} to load from
 * @return instance of {@link TachyonConf} to be loaded
 *///  w  w  w .  j  a  v  a 2s.co  m
public static TachyonConf loadFromHadoopConfiguration(Configuration source) {
    // Load TachyonConf if any and merge to the one in TachyonFS
    // Push TachyonConf to the Job conf
    if (source.get(Constants.TACHYON_CONF_SITE) != null) {
        LOG.info("Found TachyonConf site from Job configuration for Tachyon");
        Properties tachyonConfProperties = null;
        try {
            tachyonConfProperties = DefaultStringifier.load(source, Constants.TACHYON_CONF_SITE,
                    Properties.class);
        } catch (IOException e) {
            LOG.error("Unable to load TachyonConf from Hadoop configuration", e);
            throw new RuntimeException(e);
        }

        return tachyonConfProperties != null ? new TachyonConf(tachyonConfProperties) : null;
    }
    return null;
}

From source file:tachyon.util.ConfUtils.java

License:Apache License

/**
 * Load {@link TachyonConf} from Hadoop {@link org.apache.hadoop.conf.Configuration} source
 * @param source the {@link org.apache.hadoop.conf.Configuration} to load from.
 * @return instance of {@link TachyonConf} to be loaded
 *//*from   w  w w .  java  2  s.com*/
public static TachyonConf loadFromHadoopConfiguration(Configuration source) {
    // Load TachyonConf if any and merge to the one in TachyonFS
    // Push TachyonConf to the Job conf
    if (source.get(Constants.TACHYON_CONF_SITE) != null) {
        LOG.info("Found TachyonConf site from Job configuration for Tachyon");
        Properties tachyonConfProperties = null;
        try {
            tachyonConfProperties = DefaultStringifier.load(source, Constants.TACHYON_CONF_SITE,
                    Properties.class);
        } catch (IOException ex) {
            LOG.error("Unable to load TachyonConf from Haddop configuration", ex);
            throw new RuntimeException(ex);
        }
        if (tachyonConfProperties != null) {
            return new TachyonConf(tachyonConfProperties);
        } else {
            return null;
        }
    } else {
        return null;
    }
}