Example usage for org.apache.hadoop.mapred JobContextImpl JobContextImpl

List of usage examples for org.apache.hadoop.mapred JobContextImpl JobContextImpl

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred JobContextImpl JobContextImpl.

Prototype

public JobContextImpl(Configuration conf, JobID jobId) 

Source Link

Usage

From source file:org.gridgain.grid.kernal.processors.hadoop.v2.GridHadoopV2Job.java

License:Open Source License

/**
 * @param jobId Job ID./*w  ww  .j av  a2 s .  co  m*/
 * @param jobInfo Job info.
 * @param log Logger.
 */
public GridHadoopV2Job(GridHadoopJobId jobId, final GridHadoopDefaultJobInfo jobInfo, GridLogger log) {
    assert jobId != null;
    assert jobInfo != null;

    this.jobId = jobId;
    this.jobInfo = jobInfo;

    hadoopJobID = new JobID(jobId.globalId().toString(), jobId.localId());

    GridHadoopClassLoader clsLdr = (GridHadoopClassLoader) getClass().getClassLoader();

    // Before create JobConf instance we should set new context class loader.
    Thread.currentThread().setContextClassLoader(clsLdr);

    jobConf = new JobConf();

    GridHadoopFileSystemsUtils.setupFileSystems(jobConf);

    Thread.currentThread().setContextClassLoader(null);

    for (Map.Entry<String, String> e : jobInfo.properties().entrySet())
        jobConf.set(e.getKey(), e.getValue());

    jobCtx = new JobContextImpl(jobConf, hadoopJobID);

    rsrcMgr = new GridHadoopV2JobResourceManager(jobId, jobCtx, log);
}

From source file:org.gridgain.grid.kernal.processors.hadoop.v2.GridHadoopV2TaskContext.java

License:Open Source License

/**
 * @param taskInfo Task info.//from  w  ww .  j  a  va 2  s . c  om
 * @param job Job.
 * @param jobId Job ID.
 * @param locNodeId Local node ID.
 * @param jobConfDataInput DataInput for read JobConf.
 */
public GridHadoopV2TaskContext(GridHadoopTaskInfo taskInfo, GridHadoopJob job, GridHadoopJobId jobId,
        @Nullable UUID locNodeId, DataInput jobConfDataInput) throws GridException {
    super(taskInfo, job);
    this.locNodeId = locNodeId;

    // Before create JobConf instance we should set new context class loader.
    Thread.currentThread().setContextClassLoader(getClass().getClassLoader());

    try {
        JobConf jobConf = new JobConf();

        try {
            jobConf.readFields(jobConfDataInput);
        } catch (IOException e) {
            throw new GridException(e);
        }

        // For map-reduce jobs prefer local writes.
        jobConf.setBooleanIfUnset(PARAM_GGFS_PREFER_LOCAL_WRITES, true);

        jobCtx = new JobContextImpl(jobConf, new JobID(jobId.globalId().toString(), jobId.localId()));

        useNewMapper = jobConf.getUseNewMapper();
        useNewReducer = jobConf.getUseNewReducer();
        useNewCombiner = jobConf.getCombinerClass() == null;
    } finally {
        Thread.currentThread().setContextClassLoader(null);
    }
}