Example usage for org.apache.hadoop.mapreduce RecordReader subclass-usage

List of usage examples for org.apache.hadoop.mapreduce RecordReader subclass-usage

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce RecordReader subclass-usage.

Usage

From source file com.bigdata.mapreduce.seqtotext.beta.ZipFileRecordReader.java

/**
 * This RecordReader implementation extracts individual files from a ZIP
 * file and hands them over to the Mapper. The "key" is the decompressed
 * file name, the "value" is the file contents.
 */
public class ZipFileRecordReader extends RecordReader<Text, BytesWritable> {

From source file com.blackberry.logdriver.mapreduce.avro.AvroBlockRecordReader.java

public class AvroBlockRecordReader extends RecordReader<AvroFileHeader, BytesWritable> {
    private static final Logger LOG = LoggerFactory.getLogger(AvroBlockRecordReader.class);

    private static final BytesWritable EMPTY_BYTES = new BytesWritable();

    private FileSystem fs = null;

From source file com.blackberry.logdriver.mapreduce.boom.BoomIndividualRecordReader.java

public class BoomIndividualRecordReader extends RecordReader<LogLineData, Text> {
    private static final Logger LOG = LoggerFactory.getLogger(BoomIndividualRecordReader.class);

    private FileSplit split;

    private long start = 0;

From source file com.blackberry.logdriver.mapreduce.boom.BoomRecordReader.java

public class BoomRecordReader extends RecordReader<LogLineData, Text> {
    private static final Logger LOG = LoggerFactory.getLogger(BoomRecordReader.class);

    private CombineFileSplit split;
    private TaskAttemptContext context;

From source file com.blackberry.logdriver.mapreduce.gzip.GzipLineRecordReader.java

public class GzipLineRecordReader extends RecordReader<LongWritable, Text> {
    private static final Logger LOG = LoggerFactory.getLogger(GzipLineRecordReader.class);

    private BufferedReader in = null;

    private long pos = 0;

From source file com.bonc.mr_roamRecognition_hjpt.comm.PathRecordReader.java

/**
 * Treats keys as offset in file and value as line.
 */
@InterfaceAudience.LimitedPrivate({ "MapReduce", "Pig" })
@InterfaceStability.Evolving
public class PathRecordReader extends RecordReader<Text, Text> {

From source file com.ci.backports.avro.mapreduce.AvroRecordReader.java

/**
 * A {@link org.apache.hadoop.mapreduce.RecordReader} for Avro container files.
 */
public class AvroRecordReader<T> extends RecordReader<AvroKey<T>, NullWritable> {

    /** An Avro file reader that knows how to seek to record boundries. */

From source file com.clojurewerkz.cascading.cassandra.hadoop.ColumnFamilyRecordReader.java

public class ColumnFamilyRecordReader extends RecordReader<ByteBuffer, SortedMap<ByteBuffer, IColumn>>
        implements org.apache.hadoop.mapred.RecordReader<ByteBuffer, SortedMap<ByteBuffer, IColumn>> {
    private static final Logger logger = LoggerFactory.getLogger(ColumnFamilyRecordReader.class);

    public static final int CASSANDRA_HADOOP_MAX_KEY_SIZE_DEFAULT = 8192;

From source file com.cloudera.bigdata.analysis.dataload.mapreduce.SplitableRecordReader.java

/**
 * Treats keys as offset in file and value as line.
 */
public abstract class SplitableRecordReader extends RecordReader<LongWritable, Text> {
    private static final Log LOG = LogFactory.getLog(SplitableRecordReader.class);
    public static final int PHONE_NUM_LENGTH = 24;

From source file com.cloudera.ByteBufferRecordReader.java

/**
 * A reader that returns the split as a single ByteBuffer.
 * <p>
 * Borrowed heavily from FixedLengthRecordReader.
 */
@InterfaceAudience.Private