Example usage for org.apache.hadoop.mapreduce.lib.db DBConfiguration INPUT_ORDER_BY_PROPERTY

List of usage examples for org.apache.hadoop.mapreduce.lib.db DBConfiguration INPUT_ORDER_BY_PROPERTY

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce.lib.db DBConfiguration INPUT_ORDER_BY_PROPERTY.

Prototype

String INPUT_ORDER_BY_PROPERTY

To view the source code for org.apache.hadoop.mapreduce.lib.db DBConfiguration INPUT_ORDER_BY_PROPERTY.

Click Source Link

Document

ORDER BY clause in the input SELECT statement

Usage

From source file:co.cask.hydrator.plugin.db.batch.source.DBSource.java

License:Apache License

@Override
public void prepareRun(BatchSourceContext context) throws Exception {
    sourceConfig.substituteMacros(context);
    LOG.debug(//  ww  w.  ja v  a  2  s .c  om
            "pluginType = {}; pluginName = {}; connectionString = {}; importQuery = {}; "
                    + "boundingQuery = {}",
            sourceConfig.jdbcPluginType, sourceConfig.jdbcPluginName, sourceConfig.connectionString,
            sourceConfig.getImportQuery(), sourceConfig.getBoundingQuery());
    Configuration hConf = new Configuration();
    hConf.clear();

    // Load the plugin class to make sure it is available.
    Class<? extends Driver> driverClass = context.loadPluginClass(getJDBCPluginId());
    if (sourceConfig.user == null && sourceConfig.password == null) {
        DBConfiguration.configureDB(hConf, driverClass.getName(), sourceConfig.connectionString);
    } else {
        DBConfiguration.configureDB(hConf, driverClass.getName(), sourceConfig.connectionString,
                sourceConfig.user, sourceConfig.password);
    }
    DataDrivenETLDBInputFormat.setInput(hConf, DBRecord.class, sourceConfig.getImportQuery(),
            sourceConfig.getBoundingQuery(), sourceConfig.getEnableAutoCommit());
    if (sourceConfig.numSplits == null || sourceConfig.numSplits != 1) {
        hConf.set(DBConfiguration.INPUT_ORDER_BY_PROPERTY, sourceConfig.splitBy);
    }
    if (sourceConfig.numSplits != null) {
        hConf.setInt(MRJobConfig.NUM_MAPS, sourceConfig.numSplits);
    }
    context.setInput(Input.of(sourceConfig.referenceName,
            new SourceInputFormatProvider(DataDrivenETLDBInputFormat.class, hConf)));
}

From source file:org.apache.beam.sdk.io.hadoop.format.HadoopFormatIOIT.java

License:Apache License

private static void setupHadoopConfiguration(PostgresIOTestPipelineOptions options) {
    Configuration conf = new Configuration();
    DBConfiguration.configureDB(conf, "org.postgresql.Driver", DatabaseTestHelper.getPostgresDBUrl(options),
            options.getPostgresUsername(), options.getPostgresPassword());

    conf.set(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, tableName);
    conf.setStrings(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY, "id", "name");
    conf.set(DBConfiguration.INPUT_ORDER_BY_PROPERTY, "id ASC");
    conf.setClass(DBConfiguration.INPUT_CLASS_PROPERTY, TestRowDBWritable.class, DBWritable.class);

    conf.setClass("key.class", LongWritable.class, Object.class);
    conf.setClass("value.class", TestRowDBWritable.class, Object.class);
    conf.setClass("mapreduce.job.inputformat.class", DBInputFormat.class, InputFormat.class);

    conf.set(DBConfiguration.OUTPUT_TABLE_NAME_PROPERTY, tableName);
    conf.set(DBConfiguration.OUTPUT_FIELD_COUNT_PROPERTY, "2");
    conf.setStrings(DBConfiguration.OUTPUT_FIELD_NAMES_PROPERTY, "id", "name");

    conf.setClass(HadoopFormatIO.OUTPUT_KEY_CLASS, TestRowDBWritable.class, Object.class);
    conf.setClass(HadoopFormatIO.OUTPUT_VALUE_CLASS, NullWritable.class, Object.class);
    conf.setClass(HadoopFormatIO.OUTPUT_FORMAT_CLASS_ATTR, DBOutputFormat.class, OutputFormat.class);
    conf.set(HadoopFormatIO.JOB_ID, String.valueOf(1));

    hadoopConfiguration = new SerializableConfiguration(conf);
}

From source file:org.apache.beam.sdk.io.hadoop.inputformat.HadoopInputFormatIOIT.java

License:Apache License

private static void setupHadoopConfiguration(PostgresIOTestPipelineOptions options) {
    Configuration conf = new Configuration();
    DBConfiguration.configureDB(conf, "org.postgresql.Driver", DatabaseTestHelper.getPostgresDBUrl(options),
            options.getPostgresUsername(), options.getPostgresPassword());
    conf.set(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, tableName);
    conf.setStrings(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY, "id", "name");
    conf.set(DBConfiguration.INPUT_ORDER_BY_PROPERTY, "id ASC");
    conf.setClass(DBConfiguration.INPUT_CLASS_PROPERTY, TestRowDBWritable.class, DBWritable.class);

    conf.setClass("key.class", LongWritable.class, Object.class);
    conf.setClass("value.class", TestRowDBWritable.class, Object.class);
    conf.setClass("mapreduce.job.inputformat.class", DBInputFormat.class, InputFormat.class);

    hadoopConfiguration = new SerializableConfiguration(conf);
}