Example usage for org.apache.hadoop.mapreduce Reducer subclass-usage

List of usage examples for org.apache.hadoop.mapreduce Reducer subclass-usage

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Reducer subclass-usage.

Usage

From source file edu.indiana.d2i.htrc.corpus.wordset.ComposeWordsetReducer.java

public class ComposeWordsetReducer extends Reducer<Text, NullWritable, Text, NullWritable> {

    private NullWritable value = NullWritable.get();

    @Override
    public void reduce(Text key, Iterable<NullWritable> values, Context context)

From source file edu.indiana.d2i.htrc.kmeans.MemKMeansReducer.java

public class MemKMeansReducer extends Reducer<Text, ClusterObservations, Text, Cluster> {
    private Map<String, Cluster> clusterMap;
    private double convergenceDelta;
    private KMeansClusterer clusterer;

    private Text identifier = new Text();

From source file edu.indiana.d2i.htrc.skmeans.StreamingKMeansReducer.java

class StreamingKMeansReducer extends Reducer<IntWritable, VectorWritable, Text, StreamingKMeansCluster> {
    private StreamingKMeansAdapter skmeans = null;
    private DistanceMeasure distance = null;

    @Override
    public void reduce(IntWritable key, Iterable<VectorWritable> values, Context context)

From source file edu.indiana.soic.ts.mapreduce.pwd.SWGReduce.java

public class SWGReduce extends Reducer<LongWritable, SWGWritable, LongWritable, SWGWritable> {
    private static final Logger LOG = LoggerFactory.getLogger(SWGReduce.class);

    public void reduce(LongWritable key, Iterable<SWGWritable> values, Context context) throws IOException {
        long startTime = System.nanoTime();
        Configuration conf = context.getConfiguration();

From source file edu.nyu.vida.data_polygamy.feature_identification.IndexCreationReducer.java

public class IndexCreationReducer extends
        Reducer<AttributeResolutionWritable, SpatioTemporalFloatWritable, AttributeResolutionWritable, TopologyTimeSeriesWritable> {
    //public class IndexCreationReducer extends Reducer<AttributeResolutionWritable, SpatioTemporalFloatWritable, Text, Text> {

    public static FrameworkUtils utils = new FrameworkUtils();
    boolean s3 = true;

From source file edu.nyu.vida.data_polygamy.pre_processing.PreProcessingCombiner.java

/**
 * 
 * @author fchirigati
 *
 */
public class PreProcessingCombiner extends

From source file edu.nyu.vida.data_polygamy.pre_processing.PreProcessingReducer.java

/**
 * 
 * @author fchirigati
 *
 */
public class PreProcessingReducer extends

From source file edu.nyu.vida.data_polygamy.relationship_computation.CorrelationReducer.java

public class CorrelationReducer extends Reducer<PairAttributeWritable, TopologyTimeSeriesWritable, Text, Text> {

    public static FrameworkUtils utils = new FrameworkUtils();

    Configuration conf;
    int dataset1, dataset2, spatial, temporal;

From source file edu.nyu.vida.data_polygamy.scalar_function_computation.AggregationCombiner.java

public class AggregationCombiner extends
        Reducer<SpatioTemporalWritable, AggregationArrayWritable, SpatioTemporalWritable, AggregationArrayWritable> {

    AggregationArrayWritable valueWritable = new AggregationArrayWritable();

    @Override

From source file edu.nyu.vida.data_polygamy.scalar_function_computation.AggregationReducer.java

public class AggregationReducer extends
        Reducer<SpatioTemporalWritable, AggregationArrayWritable, SpatioTemporalWritable, FloatArrayWritable> {

    public static FrameworkUtils utils = new FrameworkUtils();

    HashMap<Integer, String> idToDataset = new HashMap<Integer, String>();