Example usage for org.apache.hadoop.mapreduce.task ReduceContextImpl ReduceContextImpl

List of usage examples for org.apache.hadoop.mapreduce.task ReduceContextImpl ReduceContextImpl

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce.task ReduceContextImpl ReduceContextImpl.

Prototype

public ReduceContextImpl(Configuration conf, TaskAttemptID taskid, RawKeyValueIterator input,
            Counter inputKeyCounter, Counter inputValueCounter, RecordWriter<KEYOUT, VALUEOUT> output,
            OutputCommitter committer, StatusReporter reporter, RawComparator<KEYIN> comparator,
            Class<KEYIN> keyClass, Class<VALUEIN> valueClass) throws InterruptedException, IOException 

Source Link

Usage

From source file:com.asakusafw.runtime.compatibility.hadoop2.JobCompatibilityHadoop2.java

License:Apache License

@Override
public <KEYIN, VALUEIN, KEYOUT, VALUEOUT> Reducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context newReducerContext(
        Configuration configuration, TaskAttemptID id, RawKeyValueIterator reader, Class<KEYIN> inputKeyClass,
        Class<VALUEIN> inputValueClass, RecordWriter<KEYOUT, VALUEOUT> writer, OutputCommitter committer,
        RawComparator<KEYIN> comparator) throws IOException, InterruptedException {
    StatusReporter reporter = new MockStatusReporter();
    ReduceContext<KEYIN, VALUEIN, KEYOUT, VALUEOUT> context = new ReduceContextImpl<>(configuration, id, reader,
            reporter.getCounter("asakusafw", "inputKey"), //$NON-NLS-1$ //$NON-NLS-2$
            reporter.getCounter("asakusafw", "inputValue"), //$NON-NLS-1$ //$NON-NLS-2$
            writer, committer, reporter, comparator, inputKeyClass, inputValueClass);
    return new WrappedReducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT>().getReducerContext(context);
}

From source file:edu.uci.ics.hyracks.dataflow.hadoop.util.MRContextUtil.java

License:Apache License

@SuppressWarnings({ "rawtypes", "unchecked" })
public Reducer.Context createReduceContext(Configuration conf, TaskAttemptID taskid, RawKeyValueIterator input,
        Counter inputKeyCounter, Counter inputValueCounter, RecordWriter output, OutputCommitter committer,
        StatusReporter reporter, RawComparator comparator, Class keyClass, Class valueClass)
        throws HyracksDataException {
    try {/*w  w  w.  ja v a 2s  .c om*/
        return new WrappedReducer()
                .getReducerContext(new ReduceContextImpl(conf, taskid, input, inputKeyCounter,
                        inputValueCounter, output, committer, reporter, comparator, keyClass, valueClass));
    } catch (Exception e) {
        throw new HyracksDataException(e);
    }
}

From source file:it.crs4.pydoop.mapreduce.pipes.TestPipeApplication.java

License:Apache License

/**
 * test org.apache.hadoop.mapreduce.pipes.PipesReducer
 * test the transfer of data: key and value
 *
 * @throws Exception//  w  w w .j av  a  2  s.c  o m
 */
@Test
public void testPipesReducer() throws Exception {
    System.err.println("testPipesReducer");

    File[] psw = cleanTokenPasswordFile();
    try {
        JobID jobId = new JobID("201408272347", 0);
        TaskID taskId = new TaskID(jobId, TaskType.MAP, 0);
        TaskAttemptID taskAttemptid = new TaskAttemptID(taskId, 0);

        Job job = new Job(new Configuration());
        job.setJobID(jobId);
        Configuration conf = job.getConfiguration();
        conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskAttemptid.toString());
        FileSystem fs = new RawLocalFileSystem();
        fs.setConf(conf);

        File fCommand = getFileCommand("it.crs4.pydoop.mapreduce.pipes.PipeReducerStub");
        conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
        System.err.println("fCommand" + fCommand.getAbsolutePath());

        Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>("user".getBytes(),
                "password".getBytes(), new Text("kind"), new Text("service"));
        TokenCache.setJobToken(token, job.getCredentials());
        conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);

        TestReporter reporter = new TestReporter();
        DummyInputFormat input_format = new DummyInputFormat();
        List<InputSplit> isplits = input_format.getSplits(job);
        InputSplit isplit = isplits.get(0);
        TaskAttemptContextImpl tcontext = new TaskAttemptContextImpl(conf, taskAttemptid);

        RecordWriter<IntWritable, Text> writer = new TestRecordWriter(
                new FileOutputStream(workSpace.getAbsolutePath() + File.separator + "outfile"));

        BooleanWritable bw = new BooleanWritable(true);
        List<Text> texts = new ArrayList<Text>();
        texts.add(new Text("first"));
        texts.add(new Text("second"));
        texts.add(new Text("third"));

        DummyRawKeyValueIterator kvit = new DummyRawKeyValueIterator();

        ReduceContextImpl<BooleanWritable, Text, IntWritable, Text> context = new ReduceContextImpl<BooleanWritable, Text, IntWritable, Text>(
                conf, taskAttemptid, kvit, null, null, writer, null, null, null, BooleanWritable.class,
                Text.class);

        PipesReducer<BooleanWritable, Text, IntWritable, Text> reducer = new PipesReducer<BooleanWritable, Text, IntWritable, Text>();
        reducer.setup(context);

        initStdOut(conf);
        reducer.reduce(bw, texts, context);
        reducer.cleanup(context);
        String stdOut = readStdOut(conf);

        // test data: key
        assertTrue(stdOut.contains("reducer key :true"));
        // and values
        assertTrue(stdOut.contains("reduce value  :first"));
        assertTrue(stdOut.contains("reduce value  :second"));
        assertTrue(stdOut.contains("reduce value  :third"));

    } finally {
        if (psw != null) {
            // remove password files
            for (File file : psw) {
                file.deleteOnExit();
            }
        }
    }

}

From source file:org.apache.tez.mapreduce.combine.MRCombiner.java

License:Apache License

private static <KEYIN, VALUEIN, KEYOUT, VALUEOUT> org.apache.hadoop.mapreduce.Reducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context createReduceContext(
        Configuration conf, TaskAttemptID mrTaskAttemptID, final TezRawKeyValueIterator rawIter,
        Counter combineInputKeyCounter, Counter combineInputValueCounter,
        RecordWriter<KEYOUT, VALUEOUT> recordWriter, MRTaskReporter reporter, RawComparator<KEYIN> comparator,
        Class<KEYIN> keyClass, Class<VALUEIN> valClass) throws InterruptedException, IOException {

    RawKeyValueIterator r = new RawKeyValueIterator() {

        @Override//ww w.j a  v  a 2 s  . c  om
        public boolean next() throws IOException {
            return rawIter.next();
        }

        @Override
        public DataInputBuffer getValue() throws IOException {
            return rawIter.getValue();
        }

        @Override
        public Progress getProgress() {
            return rawIter.getProgress();
        }

        @Override
        public DataInputBuffer getKey() throws IOException {
            return rawIter.getKey();
        }

        @Override
        public void close() throws IOException {
            rawIter.close();
        }
    };

    ReduceContext<KEYIN, VALUEIN, KEYOUT, VALUEOUT> rContext = new ReduceContextImpl<KEYIN, VALUEIN, KEYOUT, VALUEOUT>(
            conf, mrTaskAttemptID, r, combineInputKeyCounter, combineInputValueCounter, recordWriter, null,
            reporter, comparator, keyClass, valClass);

    org.apache.hadoop.mapreduce.Reducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context reducerContext = new WrappedReducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT>()
            .getReducerContext(rContext);
    return reducerContext;
}

From source file:org.apache.tez.mapreduce.processor.MRTask.java

License:Apache License

protected static <INKEY, INVALUE, OUTKEY, OUTVALUE> org.apache.hadoop.mapreduce.Reducer<INKEY, INVALUE, OUTKEY, OUTVALUE>.Context createReduceContext(
        org.apache.hadoop.mapreduce.Reducer<INKEY, INVALUE, OUTKEY, OUTVALUE> reducer, Configuration job,
        TaskAttemptID taskId, final TezRawKeyValueIterator rIter,
        org.apache.hadoop.mapreduce.Counter inputKeyCounter,
        org.apache.hadoop.mapreduce.Counter inputValueCounter,
        org.apache.hadoop.mapreduce.RecordWriter<OUTKEY, OUTVALUE> output,
        org.apache.hadoop.mapreduce.OutputCommitter committer,
        org.apache.hadoop.mapreduce.StatusReporter reporter, RawComparator<INKEY> comparator,
        Class<INKEY> keyClass, Class<INVALUE> valueClass) throws IOException, InterruptedException {
    RawKeyValueIterator r = new RawKeyValueIterator() {

        @Override//from w  ww . j  a  va 2s.c o m
        public boolean next() throws IOException {
            return rIter.next();
        }

        @Override
        public DataInputBuffer getValue() throws IOException {
            return rIter.getValue();
        }

        @Override
        public Progress getProgress() {
            return rIter.getProgress();
        }

        @Override
        public DataInputBuffer getKey() throws IOException {
            return rIter.getKey();
        }

        @Override
        public void close() throws IOException {
            rIter.close();
        }
    };
    org.apache.hadoop.mapreduce.ReduceContext<INKEY, INVALUE, OUTKEY, OUTVALUE> reduceContext = new ReduceContextImpl<INKEY, INVALUE, OUTKEY, OUTVALUE>(
            job, taskId, r, inputKeyCounter, inputValueCounter, output, committer, reporter, comparator,
            keyClass, valueClass);
    if (LOG.isDebugEnabled()) {
        LOG.debug("Using key class: " + keyClass + ", valueClass: " + valueClass);
    }

    org.apache.hadoop.mapreduce.Reducer<INKEY, INVALUE, OUTKEY, OUTVALUE>.Context reducerContext = new WrappedReducer<INKEY, INVALUE, OUTKEY, OUTVALUE>()
            .getReducerContext(reduceContext);

    return reducerContext;
}