Example usage for org.apache.hadoop.mapred.lib.db DBConfiguration getInputConditions

List of usage examples for org.apache.hadoop.mapred.lib.db DBConfiguration getInputConditions

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred.lib.db DBConfiguration getInputConditions.

Prototype

public String getInputConditions() 

Source Link

Usage

From source file:co.nubetech.apache.hadoop.mapred.DataDrivenDBInputFormat.java

License:Apache License

protected RecordReader<LongWritable, T> createDBRecordReader(
        org.apache.hadoop.mapreduce.lib.db.DBInputFormat.DBInputSplit split, Configuration conf)
        throws IOException {

    org.apache.hadoop.mapreduce.lib.db.DBConfiguration dbConf = getDBConf();
    @SuppressWarnings("unchecked")
    Class<T> inputClass = (Class<T>) (dbConf.getInputClass());
    String dbProductName = getDBProductName();

    LOG.debug("Creating db record reader for db product: " + dbProductName);

    try {//  w  w w .j a  va 2  s  . c  o  m
        // use database product name to determine appropriate record reader.
        if (dbProductName.startsWith("MYSQL")) {
            // use MySQL-specific db reader.
            return new MySQLDataDrivenDBRecordReader<T>(split, inputClass, conf, getConnection(), dbConf,
                    dbConf.getInputConditions(), dbConf.getInputFieldNames(), dbConf.getInputTableName());
        } else {
            // Generic reader.
            return new DataDrivenDBRecordReader<T>(split, inputClass, conf, getConnection(), dbConf,
                    dbConf.getInputConditions(), dbConf.getInputFieldNames(), dbConf.getInputTableName(),
                    dbProductName);
        }
    } catch (SQLException ex) {
        throw new IOException(ex.getMessage());
    }
}

From source file:co.nubetech.apache.hadoop.mapred.DBQueryInputFormat.java

License:Apache License

@Override
protected RecordReader<LongWritable, GenericDBWritable> createDBRecordReader(
        org.apache.hadoop.mapreduce.lib.db.DBInputFormat.DBInputSplit split, Configuration conf)
        throws IOException {

    org.apache.hadoop.mapreduce.lib.db.DBConfiguration dbConf = getDBConf();
    @SuppressWarnings("unchecked")
    // Class<T> inputClass = (Class<T>) (dbConf.getInputClass());
    String dbProductName = getDBProductName();

    logger.debug("Creating db record reader for db product: " + dbProductName);
    ArrayList params = null;/*www . ja  v  a2s. c  o m*/
    try {
        if (conf.get(HIHOConf.QUERY_PARAMS) != null) {
            logger.debug("creating stringifier in DBQueryInputFormat");
            DefaultStringifier<ArrayList> stringifier = new DefaultStringifier<ArrayList>(conf,
                    ArrayList.class);
            logger.debug("created stringifier");

            params = stringifier.fromString(conf.get(HIHOConf.QUERY_PARAMS));
            logger.debug("created params");
        }
        // use database product name to determine appropriate record reader.
        if (dbProductName.startsWith("MYSQL")) {
            // use MySQL-specific db reader.
            return new MySQLQueryRecordReader(split, conf, getConnection(), dbConf, dbConf.getInputConditions(),
                    dbConf.getInputFieldNames(), dbConf.getInputTableName(), params);
        } else {
            // Generic reader.
            return new DBQueryRecordReader(split, conf, getConnection(), dbConf, dbConf.getInputConditions(),
                    dbConf.getInputFieldNames(), dbConf.getInputTableName(), dbProductName, params);
        }
    } catch (SQLException ex) {
        throw new IOException(ex.getMessage());
    }
}