Example usage for org.apache.hadoop.mapreduce Reducer Reducer

List of usage examples for org.apache.hadoop.mapreduce Reducer Reducer

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Reducer Reducer.

Prototype

Reducer

Source Link

Usage

From source file:com.asakusafw.runtime.compatibility.hadoop1.JobCompatibilityHadoop1.java

License:Apache License

@Override
public <KEYIN, VALUEIN, KEYOUT, VALUEOUT> Reducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context newReducerContext(
        Configuration configuration, TaskAttemptID id, RawKeyValueIterator reader, Class<KEYIN> inputKeyClass,
        Class<VALUEIN> inputValueClass, RecordWriter<KEYOUT, VALUEOUT> writer, OutputCommitter committer,
        RawComparator<KEYIN> comparator) throws IOException, InterruptedException {
    Reducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT> reducer = new Reducer<>();
    StatusReporter reporter = new MockStatusReporter();
    return reducer.new Context(configuration, id, reader, reporter.getCounter("asakusafw", "inputKey"), //$NON-NLS-1$ //$NON-NLS-2$
            reporter.getCounter("asakusafw", "inputValue"), //$NON-NLS-1$ //$NON-NLS-2$
            writer, committer, reporter, comparator, inputKeyClass, inputValueClass);
}

From source file:com.avira.couchdoop.AppTest.java

License:Apache License

@Before
public void setUp() {
    // Setup mapper.
    Mapper<LongWritable, Text, LongWritable, Text> mapper = new Mapper<LongWritable, Text, LongWritable, Text>();
    mapDriver = MapDriver.newMapDriver(mapper);

    // Setup combiner.
    Reducer<LongWritable, Text, LongWritable, Text> combiner = new Reducer<LongWritable, Text, LongWritable, Text>();
    combineDriver = ReduceDriver.newReduceDriver(combiner);

    // Setup reducer.
    Reducer<LongWritable, Text, LongWritable, Text> reducer = new Reducer<LongWritable, Text, LongWritable, Text>();
    reduceDriver = ReduceDriver.newReduceDriver(reducer);

    // Setup MapReduce job.
    mrDriver = MapReduceDriver.newMapReduceDriver(mapper, reducer, combiner);
}

From source file:com.toddbodnar.simpleHive.subQueries.printString.java

@Override
public Reducer getReducer() {
    return new Reducer();
}

From source file:org.schedoscope.export.jdbc.JdbcExportJobMRArrayTest.java

License:Apache License

@Override
@SuppressWarnings("deprecation")
@Before//from   w  w w.ja v  a2  s .  com
public void setUp() throws Exception {
    super.setUp();
    JdbcExportMapper mapper = new JdbcExportMapper();
    mapDriver = MapDriver.newMapDriver(mapper);
    mapDriver.setConfiguration(conf);

    Reducer<LongWritable, JdbcOutputWritable, LongWritable, JdbcOutputWritable> reducer = new Reducer<>();
    reduceDriver = ReduceDriver.newReduceDriver(reducer);
    reduceDriver.setConfiguration(conf);

    mapReduceDriver = MapReduceDriver.newMapReduceDriver(mapper, reducer);
    mapReduceDriver.setConfiguration(conf);

    setUpHiveServer("src/test/resources/test_array_data.txt", "src/test/resources/test_array.hql",
            "test_array");
}

From source file:org.schedoscope.export.jdbc.JdbcExportJobMRMapTest.java

License:Apache License

@Override
@SuppressWarnings("deprecation")
@Before//from  w  w  w. j  a va  2 s .  c om
public void setUp() throws Exception {
    super.setUp();
    JdbcExportMapper mapper = new JdbcExportMapper();
    mapDriver = MapDriver.newMapDriver(mapper);
    mapDriver.setConfiguration(conf);

    Reducer<LongWritable, JdbcOutputWritable, LongWritable, JdbcOutputWritable> reducer = new Reducer<>();
    reduceDriver = ReduceDriver.newReduceDriver(reducer);
    reduceDriver.setConfiguration(conf);

    mapReduceDriver = MapReduceDriver.newMapReduceDriver(mapper, reducer);
    mapReduceDriver.setConfiguration(conf);

    setUpHiveServer("src/test/resources/test_map_data.txt", "src/test/resources/test_map.hql", "test_map");
}

From source file:org.springframework.hadoop.configuration.ConversionTests.java

License:Apache License

@SuppressWarnings({ "rawtypes", "unchecked" })
@Before//  w ww. j  a va  2  s.co m
public void init() throws Exception {
    context = new StandardEvaluationContext();
    context.setTypeConverter(new StandardTypeConverter(conversionService()));
    parser = new SpelExpressionParser();
    Reducer reducer = new Reducer();
    writer = reducer.new Context(new Configuration(), new TaskAttemptID(),
            Mockito.mock(RawKeyValueIterator.class), null, null, null, null, null, null, Text.class,
            IntWritable.class) {

        private Map<Object, Object> map = new HashMap<Object, Object>();

        @Override
        public void write(Object key, Object value) throws IOException, InterruptedException {
            map.put(key, value);
        }

        @Override
        public String toString() {
            return map.toString();
        }

    };
}

From source file:org.springframework.hadoop.mapreduce.ExpressionEvaluatingReducerTests.java

License:Apache License

private Reducer<Writable, Writable, Writable, Writable>.Context getContextForWritingToMap(
        final Map<Writable, Writable> map) throws Exception {
    Reducer<Text, IntWritable, Text, IntWritable> dummy = new Reducer<Text, IntWritable, Text, IntWritable>();
    @SuppressWarnings("rawtypes")
    Context other = dummy.new Context(new Configuration(), new TaskAttemptID(),
            Mockito.mock(RawKeyValueIterator.class), null, null, null, null, null, null, Text.class,
            IntWritable.class) {
        @Override//  w  ww. j a v a 2s  . c  om
        public void write(Text key, IntWritable value) throws IOException, InterruptedException {
            map.put(key, value);
        }
    };
    @SuppressWarnings("unchecked")
    Reducer<Writable, Writable, Writable, Writable>.Context context = other;
    return context;
}

From source file:org.springframework.hadoop.test.GenericConfiguration.java

License:Apache License

@Bean
@Override// ww w  .jav  a2 s .c  om
public Reducer<?, ?, ?, ?> reducer() {
    return new Reducer<Writable, IntWritable, Writable, Writable>() {
        private IntWritable result = new IntWritable();

        public void reduce(Writable key, Iterable<IntWritable> values, Context context)
                throws IOException, InterruptedException {
            int sum = 0;
            for (IntWritable val : values) {
                sum += val.get();
            }
            result.set(sum);
            context.write(key, result);
        }
    };
}

From source file:org.springframework.hadoop.test.StatelessConfiguration.java

License:Apache License

@Bean
@Override/*from   w  w  w . ja  v  a2s. com*/
public Reducer<?, ?, ?, ?> reducer() {
    return new Reducer<Text, IntWritable, Text, IntWritable>() {
        public void reduce(Text key, Iterable<IntWritable> values, Context context)
                throws IOException, InterruptedException {
            int sum = 0;
            for (IntWritable val : values) {
                sum += val.get();
            }
            IntWritable result = new IntWritable(sum);
            context.write(key, result);
        }
    };
}

From source file:org.springframework.hadoop.test.VanillaConfiguration.java

License:Apache License

@Bean
@Override//w w w  .java  2 s.com
public Reducer<?, ?, ?, ?> reducer() {
    return new Reducer<Text, IntWritable, Text, IntWritable>() {
        private IntWritable result = new IntWritable();

        public void reduce(Text key, Iterable<IntWritable> values, Context context)
                throws IOException, InterruptedException {
            int sum = 0;
            for (IntWritable val : values) {
                sum += val.get();
            }
            result.set(sum);
            context.write(key, result);
        }
    };
}