Example usage for org.apache.hadoop.mapreduce.lib.db DBConfiguration DBConfiguration

List of usage examples for org.apache.hadoop.mapreduce.lib.db DBConfiguration DBConfiguration

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce.lib.db DBConfiguration DBConfiguration.

Prototype

public DBConfiguration(Configuration job) 

Source Link

Usage

From source file:co.cask.cdap.template.etl.common.ETLDBOutputFormat.java

License:Apache License

@Override
public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context) throws IOException {
    Configuration conf = context.getConfiguration();
    DBConfiguration dbConf = new DBConfiguration(conf);
    String tableName = dbConf.getOutputTableName();
    String[] fieldNames = dbConf.getOutputFieldNames();

    if (fieldNames == null) {
        fieldNames = new String[dbConf.getOutputFieldCount()];
    }//from  www .ja v a2 s .  c  o  m

    try {
        Connection connection = getConnection(conf);
        PreparedStatement statement = connection.prepareStatement(constructQuery(tableName, fieldNames));
        return new DBRecordWriter(connection, statement) {
            @Override
            public void close(TaskAttemptContext context) throws IOException {
                super.close(context);
                try {
                    DriverManager.deregisterDriver(driverShim);
                } catch (SQLException e) {
                    throw new IOException(e);
                }
            }
        };
    } catch (Exception ex) {
        throw new IOException(ex.getMessage());
    }
}

From source file:co.cask.hydrator.plugin.db.batch.sink.ETLDBOutputFormat.java

License:Apache License

@Override
public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context) throws IOException {
    Configuration conf = context.getConfiguration();
    DBConfiguration dbConf = new DBConfiguration(conf);
    String tableName = dbConf.getOutputTableName();
    String[] fieldNames = dbConf.getOutputFieldNames();

    if (fieldNames == null) {
        fieldNames = new String[dbConf.getOutputFieldCount()];
    }//from  ww  w.  j  a  v  a  2 s.c  o  m

    try {
        Connection connection = getConnection(conf);
        PreparedStatement statement = connection.prepareStatement(constructQuery(tableName, fieldNames));
        return new DBRecordWriter(connection, statement) {

            private boolean emptyData = true;

            //Implementation of the close method below is the exact implementation in DBOutputFormat except that
            //we check if there is any data to be written and if not, we skip executeBatch call.
            //There might be reducers that don't receive any data and thus this check is necessary to prevent
            //empty data to be committed (since some Databases doesn't support that).
            @Override
            public void close(TaskAttemptContext context) throws IOException {
                try {
                    if (!emptyData) {
                        getStatement().executeBatch();
                        getConnection().commit();
                    }
                } catch (SQLException e) {
                    try {
                        getConnection().rollback();
                    } catch (SQLException ex) {
                        LOG.warn(StringUtils.stringifyException(ex));
                    }
                    throw new IOException(e.getMessage());
                } finally {
                    try {
                        getStatement().close();
                        getConnection().close();
                    } catch (SQLException ex) {
                        throw new IOException(ex.getMessage());
                    }
                }

                try {
                    DriverManager.deregisterDriver(driverShim);
                } catch (SQLException e) {
                    throw new IOException(e);
                }
            }

            @Override
            public void write(K key, V value) throws IOException {
                super.write(key, value);
                emptyData = false;
            }
        };
    } catch (Exception ex) {
        throw new IOException(ex.getMessage());
    }
}

From source file:co.cask.hydrator.plugin.db.batch.source.DataDrivenETLDBInputFormat.java

License:Apache License

public static void setInput(Configuration conf, Class<? extends DBWritable> inputClass, String inputQuery,
        String inputBoundingQuery, boolean enableAutoCommit) {
    DBConfiguration dbConf = new DBConfiguration(conf);
    dbConf.setInputClass(inputClass);/*from  w  w w.j av a  2  s  .co  m*/
    dbConf.setInputQuery(inputQuery);
    dbConf.setInputBoundingQuery(inputBoundingQuery);
    conf.setBoolean(AUTO_COMMIT_ENABLED, enableAutoCommit);
}

From source file:hadoop.MysqlDBOutputFormat.java

License:Apache License

/** {@inheritDoc} */
public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context) throws IOException {
    DBConfiguration dbConf = new DBConfiguration(context.getConfiguration());
    String tableName = dbConf.getOutputTableName();
    String[] fieldNames = dbConf.getOutputFieldNames();

    if (fieldNames == null) {
        fieldNames = new String[dbConf.getOutputFieldCount()];
    }/*w w w .j a va  2  s . c o  m*/

    try {
        Connection connection = dbConf.getConnection();
        PreparedStatement statement = null;

        statement = connection.prepareStatement(constructQuery(tableName, fieldNames));
        return new DBRecordWriter(connection, statement);
    } catch (Exception ex) {
        throw new IOException(ex.getMessage());
    }
}

From source file:hadoop.MysqlDBOutputFormat.java

License:Apache License

private static DBConfiguration setOutput(Job job, String tableName) throws IOException {
    job.setOutputFormatClass(MysqlDBOutputFormat.class);
    job.setReduceSpeculativeExecution(false);

    DBConfiguration dbConf = new DBConfiguration(job.getConfiguration());

    dbConf.setOutputTableName(tableName);
    return dbConf;
}

From source file:link.neolink.datamonster.Datamonster.java

License:Apache License

/**
 * Set the SQL credentials/*from  www .  ja va  2s .  c o  m*/
 *
 * @param URL
 *    The SQL URI, in JDBC format
 * @param user
 *    The SQL username
 * @param password
 *    The SQL password
 */
public void setSQLCredentials(String URL, String user, String password) {
    DBConfiguration.configureDB(job.getConfiguration(), this.sqlDriver, URL, user, password);
    this.sqlConfig = new DBConfiguration(job.getConfiguration());
}

From source file:link.neolink.datamonster.DBUpdateOutputFormat.java

License:Apache License

private static DBConfiguration setOutput(Job job, String tableName) throws IOException {
    job.setOutputFormatClass(DBUpdateOutputFormat.class);
    job.setReduceSpeculativeExecution(false);

    DBConfiguration dbConf = new DBConfiguration(job.getConfiguration());

    dbConf.setOutputTableName(tableName);
    return dbConf;
}

From source file:net.mooncloud.mapreduce.lib.db.DBInputFormat.java

License:Apache License

/** {@inheritDoc} */
public void setConf(Configuration conf) {

    dbConf = new DBConfiguration(conf);

    try {/* www. j  av a2s .com*/
        getConnection();

        DatabaseMetaData dbMeta = connection.getMetaData();
        this.dbProductName = dbMeta.getDatabaseProductName().toUpperCase();
    } catch (Exception ex) {
        throw new RuntimeException(ex);
    }

    tableName = dbConf.getInputTableName();
    fieldNames = dbConf.getInputFieldNames();
    conditions = dbConf.getInputConditions();
}

From source file:net.mooncloud.mapreduce.lib.db.DBInputFormat.java

License:Apache License

/**
 * Initializes the map-part of the job with the appropriate input settings.
 * /*  www  . j ava 2  s  .  c  o  m*/
 * @param job
 *            The map-reduce job
 * @param inputClass
 *            the class object implementing DBWritable, which is the Java
 *            object holding tuple fields.
 * @param tableName
 *            The table to read data from
 * @param conditions
 *            The condition which to select data with, eg. '(updated >
 *            20070101 AND length > 0)'
 * @param orderBy
 *            the fieldNames in the orderBy clause.
 * @param fieldNames
 *            The field names in the table
 * @see #setInput(Job, Class, String, String)
 */
public static void setInput(Job job, Class<? extends DBWritable> inputClass, String tableName,
        String conditions, String orderBy, String... fieldNames) {
    job.setInputFormatClass(DBInputFormat.class);
    DBConfiguration dbConf = new DBConfiguration(job.getConfiguration());
    dbConf.setInputClass(inputClass);
    dbConf.setInputTableName(tableName);
    dbConf.setInputFieldNames(fieldNames);
    dbConf.setInputConditions(conditions);
    dbConf.setInputOrderBy(orderBy);
}

From source file:net.mooncloud.mapreduce.lib.db.DBInputFormat.java

License:Apache License

/**
 * Initializes the map-part of the job with the appropriate input settings.
 * /*from   ww w  .j  av a2  s.  c o m*/
 * @param job
 *            The map-reduce job
 * @param inputClass
 *            the class object implementing DBWritable, which is the Java
 *            object holding tuple fields.
 * @param inputQuery
 *            the input query to select fields. Example :
 *            "SELECT f1, f2, f3 FROM Mytable ORDER BY f1"
 * @param inputCountQuery
 *            the input query that returns the number of records in the
 *            table. Example : "SELECT COUNT(f1) FROM Mytable"
 * @see #setInput(Job, Class, String, String, String, String...)
 */
public static void setInput(Job job, Class<? extends DBWritable> inputClass, String inputQuery,
        String inputCountQuery) {
    job.setInputFormatClass(DBInputFormat.class);
    DBConfiguration dbConf = new DBConfiguration(job.getConfiguration());
    dbConf.setInputClass(inputClass);
    dbConf.setInputQuery(inputQuery);
    dbConf.setInputCountQuery(inputCountQuery);
}