Example usage for org.apache.hadoop.mapreduce Reducer subclass-usage

List of usage examples for org.apache.hadoop.mapreduce Reducer subclass-usage

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Reducer subclass-usage.

Usage

From source file fi.tkk.ics.hadoop.bam.cli.plugins.VCFSort.java

final class VCFSortReducer
        extends Reducer<LongWritable, VariantContextWritable, NullWritable, VariantContextWritable> {
    @Override
    protected void reduce(LongWritable ignored, Iterable<VariantContextWritable> records,
            Reducer<LongWritable, VariantContextWritable, NullWritable, VariantContextWritable>.Context ctx)
            throws IOException, InterruptedException {

From source file fm.last.darling.mapred.ZohmgCombiner.java

public class ZohmgCombiner extends Reducer<NSpacePoint, IntWritable, NSpacePoint, IntWritable> {
    public void reduce(NSpacePoint k, Iterator<IntWritable> values,
            OutputCollector<NSpacePoint, IntWritable> output, Reporter reporter) throws IOException {
        int sum = 0;
        while (values.hasNext())
            sum += values.next().get();

From source file fm.last.darling.mapred.ZohmgReducer.java

public class ZohmgReducer extends Reducer<NSpacePoint, IntWritable, ImmutableBytesWritable, Put> {
    public void reduce(NSpacePoint point, Iterator<IntWritable> values,
            OutputCollector<ImmutableBytesWritable, Put> output, Reporter reporter) throws IOException {
        // sum the values.
        int sum = 0;
        while (values.hasNext())

From source file fr.ens.biologie.genomique.eoulsan.modules.expression.hadoop.HTSeqCountReducer.java

/**
 * Reducer for the expression estimation with htseq-count.
 * @since 1.2
 * @author Claire Wallon
 */
public class HTSeqCountReducer extends Reducer<Text, LongWritable, Text, LongWritable> {

From source file fr.ens.biologie.genomique.eoulsan.modules.expression.hadoop.PreTreatmentExpressionReducer.java

/**
 * This class define a reducer for the pretreatment of paired-end data before
 * the expression estimation step.
 * @since 1.2
 * @author Claire Wallon
 */

From source file fr.ens.biologie.genomique.eoulsan.modules.mapping.hadoop.PreTreatmentReducer.java

/**
 * This class define a reducer for the pretreatment of paired-end data before
 * the reads filtering step.
 * @since 1.2
 * @author Claire Wallon
 */

From source file fr.ens.biologie.genomique.eoulsan.modules.mapping.hadoop.SAMFilterReducer.java

/**
 * This class define a reducer for alignments filtering.
 * @since 1.0
 * @author Laurent Jourdren
 */
public class SAMFilterReducer extends Reducer<Text, Text, Text, Text> {

From source file full_MapReduce.AttributeInfoReducer.java

public class AttributeInfoReducer extends Reducer<Text, AttributeCounterWritable, Text, MapWritable> {

    public void reduce(Text key, Iterable<AttributeCounterWritable> values, Context context)
            throws IOException, InterruptedException {
        MapWritable res = new MapWritable();
        Text value;

From source file full_MapReduce.FindBestAttributeReducer.java

public class FindBestAttributeReducer
        extends Reducer<NullWritable, AttributeGainRatioWritable, NullWritable, Text> {

    public void reduce(NullWritable key, Iterable<AttributeGainRatioWritable> values, Context context)
            throws IOException, InterruptedException {
        int nb_attributes_left = -1;

From source file full_MapReduce.SummarizeReducer.java

public class SummarizeReducer extends Reducer<TextArrayWritable, IntWritable, TextArrayWritable, IntWritable> {

    public void reduce(TextArrayWritable key, Iterable<IntWritable> values, Context context)
            throws IOException, InterruptedException {

        int count = 0;