Example usage for org.apache.hadoop.mapreduce Job getConfiguration

List of usage examples for org.apache.hadoop.mapreduce Job getConfiguration

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Job getConfiguration.

Prototype

public Configuration getConfiguration() 

Source Link

Document

Return the configuration for the job.

Usage

From source file:com.knewton.mapreduce.io.SSTableInputFormatTest.java

License:Apache License

/**
 * Tests to see if when given an input directory the {@link SSTableInputFormat} correctly
 * expands all sub directories and picks up all the data tables corresponding to a specific
 * column family when a SNAP directory exists. The SST tables should be skipped.
 *///  w w  w  .  ja  v a2 s.  c  om
@Test
public void testListStatusWithColumnFamilyNameSkipSST() throws Exception {
    Job job = Job.getInstance(new Configuration(false));
    Configuration conf = job.getConfiguration();
    SSTableInputFormat.setColumnFamilyName("col_fam", job);
    List<FileStatus> result = testListStatus(conf, "./src/test/resources/backup_input");
    assertEquals(NUM_TABLES, result.size());
}

From source file:com.knewton.mapreduce.io.SSTableInputFormatTest.java

License:Apache License

@Test
public void testSetComparatorClass() throws Exception {
    Job job = Job.getInstance(new Configuration(false));
    Configuration conf = job.getConfiguration();
    String comparator = "my_comparator";
    SSTableInputFormat.setComparatorClass(comparator, job);
    assertEquals(comparator, conf.get(PropertyConstants.COLUMN_COMPARATOR.txt));
}

From source file:com.knewton.mapreduce.io.SSTableInputFormatTest.java

License:Apache License

@Test
public void testSetSubComparatorClass() throws Exception {
    Job job = Job.getInstance(new Configuration(false));
    Configuration conf = job.getConfiguration();
    String subComparator = "my_subcomparator";
    SSTableInputFormat.setSubComparatorClass(subComparator, job);
    assertEquals(subComparator, conf.get(PropertyConstants.COLUMN_SUBCOMPARATOR.txt));
}

From source file:com.knewton.mapreduce.io.SSTableInputFormatTest.java

License:Apache License

@Test
public void testPartitionerClass() throws Exception {
    Job job = Job.getInstance(new Configuration(false));
    Configuration conf = job.getConfiguration();
    String partitioner = "my_partitioner";
    SSTableInputFormat.setPartitionerClass(partitioner, job);
    assertEquals(partitioner, conf.get(PropertyConstants.PARTITIONER.txt));
}

From source file:com.knewton.mapreduce.io.SSTableInputFormatTest.java

License:Apache License

@Test
public void testColumnFamilyType() throws Exception {
    Job job = Job.getInstance(new Configuration(false));
    Configuration conf = job.getConfiguration();
    String cfType = "my_cftype";
    SSTableInputFormat.setColumnFamilyType(cfType, job);
    assertEquals(cfType, conf.get(PropertyConstants.COLUMN_FAMILY_TYPE.txt));
}

From source file:com.knewton.mapreduce.io.SSTableInputFormatTest.java

License:Apache License

@Test
public void testSetColumnFamilyName() throws Exception {
    Job job = Job.getInstance(new Configuration(false));
    Configuration conf = job.getConfiguration();
    String cfName = "my_cfName";
    SSTableInputFormat.setColumnFamilyName(cfName, job);
    assertEquals(cfName, conf.get(PropertyConstants.COLUMN_FAMILY_NAME.txt));
}

From source file:com.knewton.mapreduce.io.SSTableInputFormatTest.java

License:Apache License

@Test
public void testSetKeyspaceName() throws Exception {
    Job job = Job.getInstance(new Configuration(false));
    Configuration conf = job.getConfiguration();
    String keyspaceName = "my_keyspaceName";
    SSTableInputFormat.setKeyspaceName(keyspaceName, job);
    assertEquals(keyspaceName, conf.get(PropertyConstants.KEYSPACE_NAME.txt));
}

From source file:com.korrelate.pig.hbase.HBaseStorage.java

License:Apache License

@Override
public void setLocation(String location, Job job) throws IOException {
    Properties udfProps = getUDFProperties();
    job.getConfiguration().setBoolean("pig.noSplitCombination", true);

    initialiseHBaseClassLoaderResources(job);
    m_conf = initializeLocalJobConfig(job);
    String delegationTokenSet = udfProps.getProperty(HBASE_TOKEN_SET);
    if (delegationTokenSet == null) {
        addHBaseDelegationToken(m_conf, job);
        udfProps.setProperty(HBASE_TOKEN_SET, "true");
    }//from w  w  w.j  ava 2  s .  c o m

    String tablename = location;
    if (location.startsWith("hbase://")) {
        tablename = location.substring(8);
    }
    if (m_table == null) {
        m_table = new HTable(m_conf, tablename);
    }
    m_table.setScannerCaching(caching_);
    m_conf.set(TableInputFormat.INPUT_TABLE, tablename);

    String projectedFields = udfProps.getProperty(projectedFieldsName());
    if (projectedFields != null) {
        // update columnInfo_
        pushProjection((RequiredFieldList) ObjectSerializer.deserialize(projectedFields));
    }

    for (ColumnInfo columnInfo : columnInfo_) {
        // do we have a column family, or a column?
        if (columnInfo.isColumnMap()) {
            scan.addFamily(columnInfo.getColumnFamily());
        } else {
            scan.addColumn(columnInfo.getColumnFamily(), columnInfo.getColumnName());
        }

    }
    if (requiredFieldList != null) {
        Properties p = UDFContext.getUDFContext().getUDFProperties(this.getClass(),
                new String[] { contextSignature });
        p.setProperty(contextSignature + "_projectedFields", ObjectSerializer.serialize(requiredFieldList));
    }
    m_conf.set(TableInputFormat.SCAN, convertScanToString(scan));
}

From source file:com.korrelate.pig.hbase.HBaseStorage.java

License:Apache License

private void initialiseHBaseClassLoaderResources(Job job) throws IOException {
    // Make sure the HBase, ZooKeeper, and Guava jars get shipped.
    TableMapReduceUtil.addDependencyJars(job.getConfiguration(), org.apache.hadoop.hbase.client.HTable.class,
            com.google.common.collect.Lists.class, org.apache.zookeeper.ZooKeeper.class);

}

From source file:com.korrelate.pig.hbase.HBaseStorage.java

License:Apache License

@Override
public void setStoreLocation(String location, Job job) throws IOException {
    if (location.startsWith("hbase://")) {
        job.getConfiguration().set(TableOutputFormat.OUTPUT_TABLE, location.substring(8));
    } else {/*from ww  w  .j  a  va2  s .  c o  m*/
        job.getConfiguration().set(TableOutputFormat.OUTPUT_TABLE, location);
    }

    String serializedSchema = getUDFProperties().getProperty(contextSignature + "_schema");
    if (serializedSchema != null) {
        schema_ = (ResourceSchema) ObjectSerializer.deserialize(serializedSchema);
    }

    initialiseHBaseClassLoaderResources(job);
    m_conf = initializeLocalJobConfig(job);
    // Not setting a udf property and getting the hbase delegation token
    // only once like in setLocation as setStoreLocation gets different Job
    // objects for each call and the last Job passed is the one that is
    // launched. So we end up getting multiple hbase delegation tokens.
    addHBaseDelegationToken(m_conf, job);
}