Example usage for org.apache.hadoop.mapreduce.lib.db DBConfiguration setInputClass

List of usage examples for org.apache.hadoop.mapreduce.lib.db DBConfiguration setInputClass

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce.lib.db DBConfiguration setInputClass.

Prototype

public void setInputClass(Class<? extends DBWritable> inputClass) 

Source Link

Usage

From source file:co.cask.hydrator.plugin.db.batch.source.DataDrivenETLDBInputFormat.java

License:Apache License

public static void setInput(Configuration conf, Class<? extends DBWritable> inputClass, String inputQuery,
        String inputBoundingQuery, boolean enableAutoCommit) {
    DBConfiguration dbConf = new DBConfiguration(conf);
    dbConf.setInputClass(inputClass);
    dbConf.setInputQuery(inputQuery);//from  w  w  w  .  java  2 s  .c  o m
    dbConf.setInputBoundingQuery(inputBoundingQuery);
    conf.setBoolean(AUTO_COMMIT_ENABLED, enableAutoCommit);
}

From source file:com.cloudera.sqoop.mapreduce.db.DBInputFormat.java

License:Apache License

/**
 * Initializes the map-part of the job with the appropriate input settings.
 *
 * @param job The map-reduce job//  www  .  j  a  v  a  2s  . c  o  m
 * @param inputClass the class object implementing DBWritable, which is the
 * Java object holding tuple fields.
 * @param tableName The table to read data from
 * @param conditions The condition which to select data with,
 * eg. '(updated &gt; 20070101 AND length &gt; 0)'
 * @param orderBy the fieldNames in the orderBy clause.
 * @param fieldNames The field names in the table
 * @see #setInput(Job, Class, String, String)
 */
public static void setInput(Job job, Class<? extends DBWritable> inputClass, String tableName,
        String conditions, String orderBy, String... fieldNames) {
    job.setInputFormatClass(DBInputFormat.class);
    DBConfiguration dbConf = new DBConfiguration(job.getConfiguration());
    dbConf.setInputClass(inputClass);
    dbConf.setInputTableName(tableName);
    dbConf.setInputFieldNames(fieldNames);
    dbConf.setInputConditions(conditions);
    dbConf.setInputOrderBy(orderBy);
}

From source file:com.cloudera.sqoop.mapreduce.db.DBInputFormat.java

License:Apache License

/**
 * Initializes the map-part of the job with the appropriate input settings.
 *
 * @param job The map-reduce job// ww  w .  j a v  a  2  s  .  co  m
 * @param inputClass the class object implementing DBWritable, which is the
 * Java object holding tuple fields.
 * @param inputQuery the input query to select fields. Example :
 * "SELECT f1, f2, f3 FROM Mytable ORDER BY f1"
 * @param inputCountQuery the input query that returns
 * the number of records in the table.
 * Example : "SELECT COUNT(f1) FROM Mytable"
 * @see #setInput(Job, Class, String, String, String, String...)
 */
public static void setInput(Job job, Class<? extends DBWritable> inputClass, String inputQuery,
        String inputCountQuery) {
    job.setInputFormatClass(DBInputFormat.class);
    DBConfiguration dbConf = new DBConfiguration(job.getConfiguration());
    dbConf.setInputClass(inputClass);
    dbConf.setInputQuery(inputQuery);
    dbConf.setInputCountQuery(inputCountQuery);
}