Example usage for org.apache.hadoop.mapreduce Mapper subclass-usage

List of usage examples for org.apache.hadoop.mapreduce Mapper subclass-usage

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Mapper subclass-usage.

Usage

From source file com.sreejithpillai.excel.mapreduce.ExcelMapper.java

public class ExcelMapper extends Mapper<LongWritable, Text, Text, Text> {

    private static Logger LOG = LoggerFactory.getLogger(ExcelMapper.class);

    /**
     * Excel Spreadsheet is supplied in string form to the mapper.

From source file com.sreejithpillai.hbase.bulkload.HBaseMapper.java

public class HBaseMapper extends Mapper<LongWritable, Text, ImmutableBytesWritable, KeyValue> {
    final static byte[] COL_FAMILY = "bookFamily".getBytes();

    List<String> columnList = new ArrayList<String>();
    ParseXml parseXml = new ParseXml();
    ImmutableBytesWritable hKey = new ImmutableBytesWritable();

From source file com.streamsets.pipeline.hadoop.PipelineMapper.java

public class PipelineMapper extends Mapper {
    private static final Logger LOG = LoggerFactory.getLogger(PipelineMapper.class);

    @Override
    protected void setup(Mapper.Context context) throws IOException, InterruptedException {
        throw new UnsupportedOperationException();

From source file com.streamsets.pipeline.stage.destination.mapreduce.jobtype.avroconvert.AvroConversionBaseMapper.java

public abstract class AvroConversionBaseMapper extends Mapper<String, String, NullWritable, NullWritable> {

    private static final Logger LOG = LoggerFactory.getLogger(AvroConversionBaseMapper.class);

    public enum Counters {
        PROCESSED_RECORDS

From source file com.sudarmuthu.hadoop.countwords.CountWordsMapper.java

/**
 * Mapper Class
 * 
 */
public class CountWordsMapper extends Mapper<LongWritable, Text, Text, IntWritable> {

From source file com.synerzip.analytics.commoncrawl.googleads.counter.GoogleAdsCounterMapper.java

/**
 * Mapper for emitting Google Ads Information in HTML Pages from Common Crawl
 * Warc files
 * 
 * @author Rohit Ghatol
 *

From source file com.talis.hadoop.rdf.collation.CollationMapper.java

public class CollationMapper extends Mapper<LongWritable, QuadWritable, Text, QuadWritable> {

    private static final Logger LOG = LoggerFactory.getLogger(CollationMapper.class);

    @Override
    public void map(LongWritable key, QuadWritable value, Context context)

From source file com.talis.labs.pagerank.mapreduce.CheckConvergenceMapper.java

public class CheckConvergenceMapper extends Mapper<LongWritable, Text, Text, DoubleWritable> {

    private Text KEY_NAME = new Text("tolerance");

    @Override
    public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {

From source file com.talis.labs.pagerank.mapreduce.CheckingDataMapper.java

public class CheckingDataMapper extends Mapper<LongWritable, Text, Text, Text> {

    public static final Text NONE = new Text("dangling");

    @Override
    public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {

From source file com.talis.labs.pagerank.mapreduce.CountPagesMapper.java

public class CountPagesMapper extends Mapper<LongWritable, Text, Text, LongWritable> {

    private Text KEY_NAME = new Text("count");
    private LongWritable ONE = new LongWritable(1);

    @Override