Example usage for org.apache.hadoop.mapreduce Mapper run

List of usage examples for org.apache.hadoop.mapreduce Mapper run

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Mapper run.

Prototype

public void run(Context context) throws IOException, InterruptedException 

Source Link

Document

Expert users can override this method for more complete control over the execution of the Mapper.

Usage

From source file:org.apache.tez.mapreduce.processor.map.MapProcessor.java

License:Apache License

private void runNewMapper(final JobConf job, MRTaskReporter reporter, final MRInputLegacy in,
        KeyValueWriter out) throws IOException, InterruptedException {

    // Initialize input in-line since it sets parameters which may be used by the processor.
    // Done only for MRInput.
    // TODO use new method in MRInput to get required info
    //in.initialize(job, master);

    // make a task context so we can get the classes
    org.apache.hadoop.mapreduce.TaskAttemptContext taskContext = getTaskAttemptContext();

    // make a mapper
    org.apache.hadoop.mapreduce.Mapper mapper;
    try {//from  w  ww  . j ava 2 s. com
        mapper = (org.apache.hadoop.mapreduce.Mapper) ReflectionUtils.newInstance(taskContext.getMapperClass(),
                job);
    } catch (ClassNotFoundException cnfe) {
        throw new IOException(cnfe);
    }

    org.apache.hadoop.mapreduce.RecordReader input = new NewRecordReader(in);

    org.apache.hadoop.mapreduce.RecordWriter output = new NewOutputCollector(out);

    org.apache.hadoop.mapreduce.InputSplit split = in.getNewInputSplit();

    updateJobWithSplit(job, split);

    org.apache.hadoop.mapreduce.MapContext mapContext = new MapContextImpl(job, taskAttemptId, input, output,
            committer, processorContext, split, reporter);

    org.apache.hadoop.mapreduce.Mapper.Context mapperContext = new WrappedMapper().getMapContext(mapContext);

    input.initialize(split, mapperContext);
    mapper.run(mapperContext);
    // Set progress to 1.0f if there was no exception,
    reporter.setProgress(1.0f);

    this.statusUpdate();
    input.close();
    output.close(mapperContext);
}

From source file:org.gridgain.grid.kernal.processors.hadoop.v2.GridHadoopV2MapTask.java

License:Open Source License

/** {@inheritDoc} */
@SuppressWarnings({ "ConstantConditions", "unchecked" })
@Override//  w w  w. jav a2 s .c  om
public void run0(GridHadoopV2TaskContext taskCtx) throws GridException {
    GridHadoopInputSplit split = info().inputSplit();

    InputSplit nativeSplit;

    if (split instanceof GridHadoopFileBlock) {
        GridHadoopFileBlock block = (GridHadoopFileBlock) split;

        nativeSplit = new FileSplit(new Path(block.file().toString()), block.start(), block.length(), null);
    } else
        nativeSplit = (InputSplit) taskCtx.getNativeSplit(split);

    assert nativeSplit != null;

    OutputFormat outputFormat = null;
    Exception err = null;

    JobContextImpl jobCtx = taskCtx.jobContext();

    try {
        InputFormat inFormat = ReflectionUtils.newInstance(jobCtx.getInputFormatClass(),
                hadoopContext().getConfiguration());

        RecordReader reader = inFormat.createRecordReader(nativeSplit, hadoopContext());

        reader.initialize(nativeSplit, hadoopContext());

        hadoopContext().reader(reader);

        GridHadoopJobInfo jobInfo = taskCtx.job().info();

        outputFormat = jobInfo.hasCombiner() || jobInfo.hasReducer() ? null : prepareWriter(jobCtx);

        Mapper mapper = ReflectionUtils.newInstance(jobCtx.getMapperClass(),
                hadoopContext().getConfiguration());

        try {
            mapper.run(new WrappedMapper().getMapContext(hadoopContext()));
        } finally {
            closeWriter();
        }

        commit(outputFormat);
    } catch (InterruptedException e) {
        err = e;

        Thread.currentThread().interrupt();

        throw new GridInterruptedException(e);
    } catch (Exception e) {
        err = e;

        throw new GridException(e);
    } finally {
        if (err != null)
            abort(outputFormat);
    }
}

From source file:org.warcbase.mapreduce.lib.Chain.java

License:Apache License

@SuppressWarnings("unchecked")
void runMapper(TaskInputOutputContext context, int index) throws IOException, InterruptedException {
    Mapper mapper = mappers.get(index);
    RecordReader rr = new ChainRecordReader(context);
    RecordWriter rw = new ChainRecordWriter(context);
    Mapper.Context mapperContext = createMapContext(rr, rw, context, getConf(index));
    mapper.run(mapperContext);
    rr.close();/*from   ww  w. j ava2  s .  c  o m*/
    rw.close(context);
}