Example usage for org.apache.hadoop.mapred.lib.db DBConfiguration OUTPUT_FIELD_NAMES_PROPERTY

List of usage examples for org.apache.hadoop.mapred.lib.db DBConfiguration OUTPUT_FIELD_NAMES_PROPERTY

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred.lib.db DBConfiguration OUTPUT_FIELD_NAMES_PROPERTY.

Prototype

String OUTPUT_FIELD_NAMES_PROPERTY

To view the source code for org.apache.hadoop.mapred.lib.db DBConfiguration OUTPUT_FIELD_NAMES_PROPERTY.

Click Source Link

Document

Field names in the Output table

Usage

From source file:com.vertica.hivestoragehandler.VerticaStorageHandler.java

License:Apache License

private void configureJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) {
    if (LOG.isDebugEnabled()) {
        LOG.debug("tableDesc: " + tableDesc);
        LOG.debug("jobProperties: " + jobProperties);
    }/*from  ww w  .  j  a v  a2s .  c  om*/

    String tblName = tableDesc.getTableName();
    Properties tblProps = tableDesc.getProperties();
    String columnNames = tblProps.getProperty(Constants.LIST_COLUMNS);
    jobProperties.put(DBConfiguration.INPUT_CLASS_PROPERTY, DbRecordWritable.class.getName());
    jobProperties.put(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, tblName);
    jobProperties.put(DBConfiguration.OUTPUT_TABLE_NAME_PROPERTY, tblName);
    jobProperties.put(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY, columnNames);
    jobProperties.put(DBConfiguration.OUTPUT_FIELD_NAMES_PROPERTY, columnNames);

    for (String key : tblProps.stringPropertyNames()) {
        if (key.startsWith("mapred.jdbc.")) {
            String value = tblProps.getProperty(key);
            LOG.info("JSH key = " + key + ", value = " + value);
            jobProperties.put(key, value);
            String key2 = key.replace("mapred.", "mapreduce.");
            if (!key.equalsIgnoreCase(key2)) {
                LOG.info("JSH key = " + key2 + ", value = " + value);
                jobProperties.put(key2, value);
            }
        }
    }
}

From source file:infinidb.hadoop.db.InfiniDBConfiguration.java

License:Apache License

String[] getOutputFieldNames() {
    return job.getStrings(DBConfiguration.OUTPUT_FIELD_NAMES_PROPERTY);
}

From source file:infinidb.hadoop.db.InfiniDBConfiguration.java

License:Apache License

void setOutputFieldNames(String... fieldNames) {
    job.setStrings(DBConfiguration.OUTPUT_FIELD_NAMES_PROPERTY, fieldNames);
}

From source file:org.apache.sqoop.mapreduce.sqlserver.SqlServerUpsertOutputFormatTest.java

License:Apache License

@SuppressWarnings("unchecked")
@Test//  w  w w  .java2 s .  c om
public void Merge_statement_is_parameterized_correctly() throws Exception {
    Configuration conf = new Configuration();
    conf.set(DBConfiguration.DRIVER_CLASS_PROPERTY, org.hsqldb.jdbcDriver.class.getName());
    conf.set(DBConfiguration.URL_PROPERTY, "jdbc:hsqldb:.");
    conf.set(ExportJobBase.SQOOP_EXPORT_UPDATE_COL_KEY, "");
    conf.set(DBConfiguration.OUTPUT_FIELD_NAMES_PROPERTY, "");
    String tableName = "#myTable";
    String[] columnNames = { "FirstColumn", "SecondColumn", "ThirdColumn" };
    String[] updateKeyColumns = { "FirstColumn" };
    conf.set(DBConfiguration.OUTPUT_TABLE_NAME_PROPERTY, tableName);
    conf.set(DBConfiguration.OUTPUT_FIELD_NAMES_PROPERTY, StringUtils.join(columnNames, ','));
    conf.set(ExportJobBase.SQOOP_EXPORT_UPDATE_COL_KEY, StringUtils.join(updateKeyColumns, ','));
    conf.set(SQLServerManager.IDENTITY_INSERT_PROP, "true");
    TaskAttemptContext context = null;
    Class cls = null;
    try {
        cls = Class.forName("org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl");
    } catch (ClassNotFoundException cnfe) {
        // Not hadoop 2.0
    }
    if (cls == null) {
        try {
            cls = Class.forName("org.apache.hadoop.mapreduce.task.TaskAttemptContext");
        } catch (ClassNotFoundException cnfe) {
            // Something wrong
        }
    }
    assertNotNull(cls);
    Constructor c = cls.getConstructor(Configuration.class, TaskAttemptID.class);
    context = (TaskAttemptContext) c.newInstance(conf, new TaskAttemptID());
    SqlServerUpsertOutputFormat outputFormat = new SqlServerUpsertOutputFormat();
    SqlServerUpsertRecordWriter recordWriter = outputFormat.new SqlServerUpsertRecordWriter(context);
    assertEquals(
            "SET IDENTITY_INSERT #myTable ON " + "MERGE INTO #myTable AS _target USING ( VALUES ( ?, ?, ? ) )"
                    + " AS _source ( FirstColumn, SecondColumn, ThirdColumn ) ON "
                    + "_source.FirstColumn = _target.FirstColumn"
                    + "  WHEN MATCHED THEN UPDATE SET _target.SecondColumn = "
                    + "_source.SecondColumn, _target.ThirdColumn = _source.ThirdColumn"
                    + "  WHEN NOT MATCHED THEN INSERT ( FirstColumn, SecondColumn," + " ThirdColumn ) VALUES "
                    + "( _source.FirstColumn, _source.SecondColumn, _source.ThirdColumn );",
            recordWriter.getUpdateStatement());
}