Example usage for org.apache.hadoop.mapreduce Mapper subclass-usage

List of usage examples for org.apache.hadoop.mapreduce Mapper subclass-usage

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Mapper subclass-usage.

Usage

From source file com.zinnia.nectar.regression.hadoop.primitive.mapreduce.SigmaXYMapper.java

public class SigmaXYMapper<K> extends Mapper<K, Text, Text, DoubleWritable> {
    protected void map(K key, Text value, Context context) throws IOException, InterruptedException {
        // TODO Auto-generated method stub
        String[] values = value.toString().split("\t");

        double x = Double.parseDouble(values[0]);

From source file com.zinnia.nectar.regression.hadoop.primitive.mapreduce.SortMapper.java

public class SortMapper<K> extends Mapper<K, Text, DoubleWritable, DoubleWritable> {
    protected void map(K key, Text value, Context context) throws IOException {
        // TODO Auto-generated method stub
        double doubleValue = Double.parseDouble(value.toString());
        try {
            context.write(new DoubleWritable(doubleValue), new DoubleWritable(doubleValue));

From source file com.zinnia.nectar.regression.hadoop.primitive.mapreduce.YDiffMapper.java

public class YDiffMapper<K, V> extends Mapper<K, V, Text, DoubleWritable> {
    private String[] paramValues;

    protected void map(K key, V value, Context context) throws IOException, InterruptedException {
        // TODO Auto-generated method stub
        String columnValues[] = value.toString().split("\t");

From source file com.zinnia.nectar.util.hadoop.IdentityMapper.java

public class IdentityMapper<K, V> extends Mapper<K, V, K, V> {

    @Override
    protected void map(K key, V value, Context context) throws IOException, InterruptedException {
        context.write(key, value);
    }

From source file com.zqh.hadoop.mr.Financial.HighLowDayMapper.java

/**
 * Hadoop MapReduce example showing high and low for a day across all stock symbols
 * 
 */
public class HighLowDayMapper extends Mapper<LongWritable, Text, Text, DoubleWritable> {
    /**

From source file com.zqh.hadoop.mr.Financial.HighLowStockMapper.java

/**
 * Hadoop MapReduce example showing high and low for a stock symbol
 *
 */
public class HighLowStockMapper extends Mapper<LongWritable, Text, Text, DoubleWritable> {
    /**

From source file com.zqh.hadoop.mr.Financial.HighLowWritableMapper.java

/**
 * Hadoop MapReduce example showing a custom Writable
 *
 */
public class HighLowWritableMapper extends Mapper<LongWritable, Text, Text, StockWritable> {
    /**

From source file cosmos.mapred.MediawikiMapper.java

/**
 * 
 */
public class MediawikiMapper extends Mapper<LongWritable, Text, Text, Mutation> {
    private static final Text tableName = new Text("sortswiki");
    private static final Text empty = new Text("");

From source file crunch.MaxTemperature.java

public class MaxTemperatureMapper
  extends Mapper<LongWritable, Text, Text, IntWritable> { // XXX extends Mapper<I1, I2, O1, O2>

  private static final int MISSING = 9999;
  
  @Override

From source file DAAL.CovarianceDenseStep1Mapper.java

public class CovarianceDenseStep1Mapper extends Mapper<Object, Text, IntWritable, WriteableData> {

    private static final int nFeatures = 10;
    private static final int nVectorsInBlock = 50;

    /* Index is supposed to be a sequence number for the split */