Example usage for org.apache.hadoop.mapreduce Reducer subclass-usage

List of usage examples for org.apache.hadoop.mapreduce Reducer subclass-usage

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Reducer subclass-usage.

Usage

From source file co.nubetech.hiho.merge.MergeValueReducer.java

public class MergeValueReducer<K, V> extends Reducer<HihoTuple, HihoValue, K, V> {
    @Override
    public void reduce(HihoTuple hihoTuple, Iterable<HihoValue> hihoValues, Context context)
            throws IOException, InterruptedException {
        context.getCounter(MergeRecordCounter.OUTPUT).increment(1l);
        V val = (V) hihoTuple.getKey();

From source file co.nubetech.hiho.similarity.ngram.NGramReducer.java

public class NGramReducer extends Reducer<Text, Text, ValuePair, IntWritable> {
    final static Logger logger = Logger.getLogger(co.nubetech.hiho.similarity.ngram.NGramReducer.class);

    @Override
    public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {

From source file co.nubetech.hiho.similarity.ngram.ScoreReducer.java

public class ScoreReducer extends Reducer<ValuePair, IntWritable, ValuePair, LongWritable> {
    final static Logger logger = Logger.getLogger(co.nubetech.hiho.similarity.ngram.ScoreReducer.class);

    @Override
    public void reduce(ValuePair key, Iterable<IntWritable> values, Context context)
            throws IOException, InterruptedException {

From source file com.ailk.oci.ocnosql.tools.load.mutiple.MutipleColumnReducer.java

/**
 * Emits sorted Puts.
 * Reads in all Puts from passed Iterator, sorts them, then emits
 * Puts in sorted order.  If lots of columns per row, it will use lots of
 * memory sorting.
 * @see HFileOutputFormat

From source file com.ailk.oci.ocnosql.tools.load.single.SingleColumnReducer.java

/**
 * Emits sorted Puts.
 * Reads in all Puts from passed Iterator, sorts them, then emits
 * Puts in sorted order.  If lots of columns per row, it will use lots of
 * memory sorting.
 * @see HFileOutputFormat

From source file com.alectenharmsel.research.FileCombineReducer.java

public class FileCombineReducer extends Reducer<Text, LongWritable, Text, LongWritable> {
    public void reduce(Text key, Iterable<LongWritable> vals, Context context)
            throws IOException, InterruptedException {
        for (LongWritable tmp : vals) {
            context.write(key, tmp);
        }

From source file com.alectenharmsel.research.LineCountReducer.java

public class LineCountReducer extends Reducer<Text, LongWritable, Text, LongWritable> {
    public void reduce(Text key, Iterable<LongWritable> counts, Context context)
            throws IOException, InterruptedException {
        long total = 0;

        for (LongWritable tmp : counts) {

From source file com.alectenharmsel.research.MoabLicensesReducer.java

public class MoabLicensesReducer extends Reducer<Text, Text, Text, Text> {
    public void reduce(Text key, Iterable<Text> counts, Context context) throws IOException, InterruptedException {
        int sum = 0;
        int num = 0;
        int total = 0;

From source file com.alectenharmsel.research.MoabLogSearchReducer.java

public class MoabLogSearchReducer extends Reducer<LongWritable, Text, LongWritable, Text> {
    public void reduce(LongWritable key, Iterable<Text> counts, Context context)
            throws IOException, InterruptedException {
        for (Text tmp : counts) {
            context.write(key, tmp);
        }

From source file com.alectenharmsel.research.SrcTokReducer.java

public class SrcTokReducer extends Reducer<Text, LongWritable, Text, Text> {
    public void reduce(Text key, Iterable<LongWritable> counts, Context context)
            throws IOException, InterruptedException {
        long sum = 0;
        for (LongWritable tmp : counts) {
            sum += tmp.get();