Example usage for org.apache.hadoop.mapreduce RecordReader subclass-usage

List of usage examples for org.apache.hadoop.mapreduce RecordReader subclass-usage

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce RecordReader subclass-usage.

Usage

From source file com.google.appengine.tools.mapreduce.DatastoreRecordReader.java

/**
 * DatastoreReader is a RecordReader for the AppEngine Datastore.
 * It's AppEngine compatible by way of implementing Writable.
 *
 */
public class DatastoreRecordReader extends RecordReader<Key, Entity> implements Writable {

From source file com.google.appengine.tools.mapreduce.RangeRecordReader.java

/**
 * The record reader class for {@link RangeInputFormat}.
 *
 */
public class RangeRecordReader extends RecordReader<Long, NullWritable> implements Writable {

From source file com.google.appengine.tools.mapreduce.StubRecordReader.java

/**
 * Reader that just passes through the values from a {@link StubInputSplit}.
 *
 *
 */
public class StubRecordReader extends RecordReader<IntWritable, IntWritable> implements Writable {

From source file com.hadoop.mapreduce.FourMcLineRecordReader.java

/**
 * Reads line from a 4mc compressed text file.
 * Treats keys as offset in file and value as line.
 */
public class FourMcLineRecordReader extends RecordReader<LongWritable, Text> {

From source file com.hadoop.mapreduce.FourMzLineRecordReader.java

/**
 * Reads line from a 4mz compressed text file.
 * Treats keys as offset in file and value as line.
 */
public class FourMzLineRecordReader extends RecordReader<LongWritable, Text> {

From source file com.hadoop.mapreduce.LzoLineRecordReader.java

/**
 * Reads line from an lzo compressed text file. Treats keys as offset in file
 * and value as line.
 */
public class LzoLineRecordReader extends RecordReader<LongWritable, Text> {

From source file com.HadoopDemo.inputFormat.db.DBRecordReader.java

/**
 * A RecordReader that reads records from a SQL table.
 * Emits LongWritables containing the record number as
 * key and DBWritables as value.
 */
@InterfaceAudience.Public

From source file com.ifesdjeen.cascading.cassandra.hadoop.ColumnFamilyRecordReader.java

public class ColumnFamilyRecordReader extends RecordReader<ByteBuffer, SortedMap<ByteBuffer, IColumn>>
        implements org.apache.hadoop.mapred.RecordReader<ByteBuffer, SortedMap<ByteBuffer, IColumn>> {
    private static final Logger logger = LoggerFactory.getLogger(ColumnFamilyRecordReader.class);

    public static final int CASSANDRA_HADOOP_MAX_KEY_SIZE_DEFAULT = 8192;

From source file com.ikanow.aleph2.analytics.hadoop.assets.BeFileInputReader.java

/** The input reader specific to batch enrichment modules
 * @author Alex
 */
public class BeFileInputReader extends RecordReader<String, Tuple2<Long, IBatchRecord>>
        implements IBeJobConfigurable {

From source file com.ikanow.aleph2.analytics.r.assets.BeFileInputReader.java

/** The input reader specific to batch enrichment modules
 * @author Alex
 */
public class BeFileInputReader extends RecordReader<String, Tuple2<Long, IBatchRecord>>
        implements IBeJobConfigurable {