Example usage for org.apache.hadoop.mapreduce RecordReader subclass-usage

List of usage examples for org.apache.hadoop.mapreduce RecordReader subclass-usage

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce RecordReader subclass-usage.

Usage

From source file com.facebook.hiveio.input.RecordReaderImpl.java

/**
 * RecordReader for Hive data
 */
public class RecordReaderImpl extends RecordReader<WritableComparable, HiveReadableRecord> {
    // CHECKSTYLE: stop LineLength
    /** Base record reader */

From source file com.facebook.hiveio.mapreduce.output.SplitReader.java

/**
 * InputSplit reader
 */
public class SplitReader extends RecordReader<NullWritable, MapWritable> {
    /** Iterator over input */
    private final Iterator<MapWritable> iter;

From source file com.flipkart.fdp.migration.distcp.core.MirrorFileRecordReader.java

public class MirrorFileRecordReader extends RecordReader<Text, Text> {

    private String srcPath = null;
    private long progressByteCount = 0;

    private MirrorInputSplit fSplit = null;

From source file com.fullcontact.sstable.hadoop.mapreduce.SSTableRecordReader.java

/**
 * Handle reading individual records from an Cassandra SSTable.
 *
 * Uses an SSTableSplit in combination with a CompressedRandomAccessReader to read a section of each SSTable.
 *
 * @author ben <ben.vanberg@fullcontact.com>

From source file com.gemstone.gemfire.cache.hdfs.internal.hoplog.mapreduce.AbstractGFRecordReader.java

public class AbstractGFRecordReader extends RecordReader<GFKey, PersistedEventImpl> {

    // constant overhead of each KV in hfile. This is used in computing the
    // progress of record reader
    protected long RECORD_OVERHEAD = 8;

From source file com.geneix.bottle.WordRecordReader.java

/**
 * Treats keys as offset in file and value as line.
 */
@InterfaceAudience.LimitedPrivate({ "MapReduce", "Pig" })
@InterfaceStability.Evolving
public class WordRecordReader extends RecordReader<LongWritable, Word> {

From source file com.github.bskaggs.avro_json_hadoop.AvroAsJsonRecordReader.java

/**
 * Modified from {@link AvroRecordReaderBase}
 * 
 * Abstract base class for <code>RecordReader</code>s that read Avro container files.
 *
 * @param <K> The type of key the record reader should generate.

From source file com.github.bskaggs.mapreduce.flowfile.AbstractFlowFileV3RecordReader.java

public abstract class AbstractFlowFileV3RecordReader<V> extends RecordReader<Map<String, String>, V> {
    public static final byte[] MAGIC_HEADER = { 'N', 'i', 'F', 'i', 'F', 'F', '3' };
    private final byte[] headerBuffer = new byte[MAGIC_HEADER.length];

    private FSDataInputStream fileStream;
    private final Map<String, String> key = new HashMap<>();

From source file com.github.jmabuin.blaspark.io.RowPerLineRecordReader.java

/**
 * This class define a custom RecordReader for RowPerLine files for the
 * Hadoop MapReduce framework.
    
 * @author Jos M. Abun
 */

From source file com.google.appengine.tools.mapreduce.BlobstoreRecordReader.java

/**
 * BlobstoreRecordReader is a RecordReader for the AppEngine Blobstore.
 * It's AppEngine compatible by way of implementing Writable.
 *
 */
class BlobstoreRecordReader extends RecordReader<BlobstoreRecordKey, byte[]> implements Writable {