Example usage for org.apache.hadoop.mapreduce TaskAttemptContext getReducerClass

List of usage examples for org.apache.hadoop.mapreduce TaskAttemptContext getReducerClass

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce TaskAttemptContext getReducerClass.

Prototype

public Class<? extends Reducer<?, ?, ?, ?>> getReducerClass() throws ClassNotFoundException;

Source Link

Document

Get the Reducer class for the job.

Usage

From source file:org.apache.tez.mapreduce.processor.reduce.ReduceProcessor.java

License:Apache License

void runNewReducer(JobConf job, final MRTaskReporter reporter, OrderedGroupedInputLegacy input,
        RawComparator comparator, Class keyClass, Class valueClass, final KeyValueWriter out)
        throws IOException, InterruptedException, ClassNotFoundException, TezException {

    // make a task context so we can get the classes
    org.apache.hadoop.mapreduce.TaskAttemptContext taskContext = getTaskAttemptContext();

    // make a reducer
    org.apache.hadoop.mapreduce.Reducer reducer = (org.apache.hadoop.mapreduce.Reducer) ReflectionUtils
            .newInstance(taskContext.getReducerClass(), job);

    // wrap value iterator to report progress.
    final TezRawKeyValueIterator rawIter = input.getIterator();
    TezRawKeyValueIterator rIter = new TezRawKeyValueIterator() {
        public void close() throws IOException {
            rawIter.close();//from   ww w .  j  av a 2 s . c om
        }

        public DataInputBuffer getKey() throws IOException {
            return rawIter.getKey();
        }

        public Progress getProgress() {
            return rawIter.getProgress();
        }

        @Override
        public boolean isSameKey() throws IOException {
            return rawIter.isSameKey();
        }

        public DataInputBuffer getValue() throws IOException {
            return rawIter.getValue();
        }

        public boolean next() throws IOException {
            boolean ret = rawIter.next();
            reporter.setProgress(rawIter.getProgress().getProgress());
            return ret;
        }
    };

    org.apache.hadoop.mapreduce.RecordWriter trackedRW = new org.apache.hadoop.mapreduce.RecordWriter() {

        @Override
        public void write(Object key, Object value) throws IOException, InterruptedException {
            out.write(key, value);
        }

        @Override
        public void close(TaskAttemptContext context) throws IOException, InterruptedException {
        }
    };

    org.apache.hadoop.mapreduce.Reducer.Context reducerContext = createReduceContext(reducer, job,
            taskAttemptId, rIter, reduceInputKeyCounter, reduceInputValueCounter, trackedRW, committer,
            reporter, comparator, keyClass, valueClass);

    reducer.run(reducerContext);

    // Set progress to 1.0f if there was no exception,
    reporter.setProgress(1.0f);

    trackedRW.close(reducerContext);
}