Example usage for org.apache.hadoop.mapreduce JobContext getConfiguration

List of usage examples for org.apache.hadoop.mapreduce JobContext getConfiguration

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce JobContext getConfiguration.

Prototype

public Configuration getConfiguration();

Source Link

Document

Return the configuration for the job.

Usage

From source file:com.cloudera.recordservice.examples.terasort.TeraSort.java

License:Apache License

public static boolean getUseSimplePartitioner(JobContext job) {
    return job.getConfiguration().getBoolean(SIMPLE_PARTITIONER, false);
}

From source file:com.cloudera.recordservice.examples.terasort.TeraSort.java

License:Apache License

public static int getOutputReplication(JobContext job) {
    return job.getConfiguration().getInt(OUTPUT_REPLICATION, 1);
}

From source file:com.cloudera.recordservice.hcatalog.mapreduce.HCatRSBaseInputFormat.java

License:Apache License

/**
 * Logically split the set of input files for the job. Returns the
 * underlying InputFormat's splits//from w  w  w.  java  2 s . c o m
 * @param jobContext the job context object
 * @return the splits, a RecordServiceSplit wrapper over the storage
 *         handler InputSplits
 * @throws IOException or InterruptedException
 */
@Override
public List<InputSplit> getSplits(JobContext jobContext) throws IOException, InterruptedException {
    Configuration conf = jobContext.getConfiguration();
    // Get the job info from the configuration,
    // throws exception if not initialized
    InputJobInfo inputJobInfo;
    try {
        inputJobInfo = getJobInfo(conf);
    } catch (Exception e) {
        throw new IOException(e);
    }

    List<InputSplit> splits = new ArrayList<InputSplit>();
    List<PartInfo> partitionInfoList = inputJobInfo.getPartitions();
    if (partitionInfoList == null) {
        // No partitions match the specified partition filter
        return splits;
    }

    JobConf jobConf = HCatUtil.getJobConfFromContext(jobContext);
    Credentials credentials = jobContext.getCredentials();
    PlanUtil.SplitsInfo splitsInfo = PlanUtil.getSplits(jobConf, credentials);
    return splitsInfo.splits;
}

From source file:com.cloudera.recordservice.mapreduce.RecordServiceInputFormatBase.java

License:Apache License

@Override
public List<InputSplit> getSplits(JobContext context) throws IOException, InterruptedException {
    return PlanUtil.getSplits(context.getConfiguration(), context.getCredentials()).splits;
}

From source file:com.cloudera.recordservice.mapreduce.TextInputFormat.java

License:Apache License

@Override
public List<InputSplit> getSplits(JobContext context) throws IOException, InterruptedException {
    PlanUtil.SplitsInfo splits = PlanUtil.getSplits(context.getConfiguration(), context.getCredentials());
    verifyTextSchema(splits.schema);/*  w  ww  .j  a  va  2 s . co m*/
    return splits.splits;
}

From source file:com.cloudera.sa.hbase.to.hdfs.utils.NMapInputFormat.java

License:Apache License

@Override
public List<InputSplit> getSplits(JobContext context) throws IOException, InterruptedException {
    int count = getNumMapTasks(context.getConfiguration());
    List<InputSplit> splits = new ArrayList<InputSplit>(count);
    for (int i = 0; i < count; i++) {
        splits.add(new NullInputSplit());
    }/*from www.  j a  va 2 s.co  m*/
    return splits;
}

From source file:com.cloudera.sqoop.mapreduce.DelegatingOutputFormat.java

License:Apache License

@Override
/** {@inheritDoc} */
public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();

    if (null == conf.get(DELEGATE_CLASS_KEY)) {
        throw new IOException("Delegate FieldMapProcessor class is not set.");
    }//ww w  .  j  a v  a2s .c om
}

From source file:com.cloudera.sqoop.mapreduce.UpdateOutputFormat.java

License:Apache License

@Override
/** {@inheritDoc} */
public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();
    DBConfiguration dbConf = new DBConfiguration(conf);

    // Sanity check all the configuration values we need.
    if (null == conf.get(DBConfiguration.URL_PROPERTY)) {
        throw new IOException("Database connection URL is not set.");
    } else if (null == dbConf.getOutputTableName()) {
        throw new IOException("Table name is not set for export.");
    } else if (null == dbConf.getOutputFieldNames()) {
        throw new IOException("Output field names are null.");
    } else if (null == conf.get(ExportJobBase.SQOOP_EXPORT_UPDATE_COL_KEY)) {
        throw new IOException("Update key column is not set for export.");
    }//from w w w  .ja  v a 2 s.c om
}

From source file:com.cloudera.sqoop.shims.Apache22HadoopShim.java

License:Apache License

@Override
public int getJobNumMaps(JobContext job) {
    return job.getConfiguration().getInt(JobContext.NUM_MAPS, 1);
}

From source file:com.cloudera.sqoop.shims.CDH3Shim.java

License:Apache License

@Override
public int getJobNumMaps(JobContext job) {
    return job.getConfiguration().getInt("mapred.map.tasks", 1);
}