Example usage for org.apache.hadoop.mapreduce Reducer subclass-usage

List of usage examples for org.apache.hadoop.mapreduce Reducer subclass-usage

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Reducer subclass-usage.

Usage

From source file gov.llnl.ontology.mapreduce.stats.WordSumReducer.java

/**
 * This {@link Reducer} counts the number of times that they {@code key} occures
 * with each of it's {@code value}s.  It emits each a tuple of the form: 
 *   (key,other_item), co-occurrence_count
 * Where the first item is emited as a single {@link Text} object and the count
 * is a {@link IntWritable} object.

From source file gov.va.research.red.ex.hadoop.BioCReducer.java

/**
 * @author doug
 *
 */
public class BioCReducer extends Reducer<Text, MatchedElementWritable, Text, NullWritable> {
    private BioCFactory biocFactory;

From source file gr.ntua.ece.cslab.modissense.queries.clients.mr.GeneralHotIntQueryCombiner.java

/**
 *
 * @author giannis
 */
public class GeneralHotIntQueryCombiner
        extends Reducer<LongWritable, HotnessInterestWritable, LongWritable, HotnessInterestWritable> {

From source file gr.ntua.h2rdf.byteImport.Combiner.java

public class Combiner extends
        Reducer<ImmutableBytesWritable, ImmutableBytesWritable, ImmutableBytesWritable, ImmutableBytesWritable> {

    private byte[] non = Bytes.toBytes("");

    public void reduce(ImmutableBytesWritable key, Iterable<ImmutableBytesWritable> values, Context context)

From source file gr.ntua.h2rdf.inputFormat.SimpleReducer.java

public class SimpleReducer extends Reducer<ImmutableBytesWritable, Text, Text, Text> {
    protected void reduce(ImmutableBytesWritable c, Iterable<Text> hashes,
            org.apache.hadoop.mapreduce.Reducer<ImmutableBytesWritable, Text, Text, Text>.Context context)
            throws java.io.IOException, InterruptedException {
        for (Text curhash : hashes) {
            context.write(curhash, new Text(""));

From source file gr.ntua.h2rdf.loadTriples.HexaStoreHistogramsReduce.java

public class HexaStoreHistogramsReduce
        extends Reducer<ImmutableBytesWritable, NullWritable, ImmutableBytesWritable, KeyValue> {
    private long[] countJoin, prev;
    private IndexHistogramm hist1, hist2, hist3;
    private boolean first;
    private byte[] lastkey;

From source file gr.ntua.h2rdf.loadTriples.HexaStoreReduce.java

public class HexaStoreReduce
        extends Reducer<ImmutableBytesWritable, NullWritable, ImmutableBytesWritable, KeyValue> {
    private byte[] prev2firstByte, prevfirstByte;
    private ImmutableBytesWritable lastKey;
    private long[] prev2first;
    private long[] prevfirst;

From source file gr.ntua.h2rdf.partialJoin.HbaseJoinBGPReducer.java

public class HbaseJoinBGPReducer extends Reducer<Text, Text, Text, Text> {
    private Text outKey = new Text();
    private Text outValue = new Text("");
    private static Configuration hconf = HBaseConfiguration.create();
    private static HTable table;
    private String joinVars;

From source file gr.ntua.h2rdf.partialJoin.IdReducer.java

public class IdReducer extends Reducer<Text, Text, Text, Text> {
    private Text outKey = new Text();
    private Text outValue = new Text("");

    public void reduce(Text key, Iterable<Text> values, Context context) throws IOException {

From source file gr.ntua.h2rdf.partialJoin.IdReducer1.java

public class IdReducer1 extends Reducer<ImmutableBytesWritable, Text, Text, Text> {
    private Text outKey = new Text();
    private Text outValue = new Text("");

    public void reduce(ImmutableBytesWritable key, Iterable<Text> values, Context context) throws IOException {