Example usage for org.apache.hadoop.mapreduce Mapper subclass-usage

List of usage examples for org.apache.hadoop.mapreduce Mapper subclass-usage

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Mapper subclass-usage.

Usage

From source file com.yassergonzalez.pagerank.PageRankMatrixMapper.java

public class PageRankMatrixMapper extends Mapper<LongWritable, Text, ShortArrayWritable, ShortArrayWritable> {

    @Override
    public void map(LongWritable inKey, Text inValue, Context context) throws IOException, InterruptedException {

        // This task gets a line from links-simple-sorted.txt that contains the

From source file com.yassergonzalez.pagerank.PageRankTopNMapper.java

public class PageRankTopNMapper extends Mapper<ShortWritable, FloatArrayWritable, FloatWritable, IntWritable> {

    // TODO: Create base classes TopN{Mapper,Reducer} to avoid duplicate
    // code in {PageRank,InLinks}TopN{Mapper,Reducer}.

    private PriorityQueue<Map.Entry<Float, Integer>> topN;

From source file com.yosanai.tutorial.hadoop.hellohadoop.WordCountMapper.java

/**
 * @author Saravana P Shanmugam
 * 
 */
public class WordCountMapper extends Mapper<LongWritable, Text, Text, IntWritable> {

From source file com.yourcompany.hadoop.mapreduce.aggregate.UnionMapper.java

/**
 * Sample Mapper
 *
 * @author Edward KIM
 * @version 0.1
 */

From source file com.yourcompany.hadoop.mapreduce.KoreanWordcountMapper.java

/**
 *     ? Mapper
 *
 * @author Edward KIM
 * @version 0.1
 */

From source file com.yourcompany.hadoop.mapreduce.lexical.LexicalAnalyzerMapper.java

/**
 * Lexical Analyzer Mapper
 *
 * @author Edward KIM
 * @version 0.1
 */

From source file com.yourcompany.hadoop.mapreduce.WordCount.KoreanWordcountMapper.java

/**
 *     ? Mapper
 *
 * @author Edward KIM
 * @version 0.1
 */

From source file com.zinnia.nectar.regression.hadoop.primitive.mapreduce.MeanMapper.java

public class MeanMapper<K> extends Mapper<K, Text, Text, DoubleWritable> {
    private int n;

    protected void map(K key, Text value, Context context) throws IOException, InterruptedException {
        // TODO Auto-generated method stub
        double doubleValue = Double.parseDouble(value.toString());

From source file com.zinnia.nectar.regression.hadoop.primitive.mapreduce.SigmaMapper.java

public class SigmaMapper<K> extends Mapper<K, Text, Text, DoubleWritable> {
    protected void map(K key, Text value, Context context) throws IOException {
        // TODO Auto-generated method stub
        double doubleValue = Double.parseDouble(value.toString());
        try {
            context.write(new Text("sigmax"), new DoubleWritable(doubleValue));

From source file com.zinnia.nectar.regression.hadoop.primitive.mapreduce.SigmaSqMapper.java

public class SigmaSqMapper<K> extends Mapper<K, Text, Text, DoubleWritable> {
    protected void map(K key, Text value, Context context) throws IOException, InterruptedException {
        // TODO Auto-generated method stub
        double doubleValue = Double.parseDouble(value.toString());
        double square = doubleValue * doubleValue;
        context.write(new Text("SigmaxSquare"), new DoubleWritable(square));