Example usage for org.apache.hadoop.mapred RecordReader interface-usage

List of usage examples for org.apache.hadoop.mapred RecordReader interface-usage

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred RecordReader interface-usage.

Usage

From source file DeprecatedBAMBaseRecordReader.java

public class DeprecatedBAMBaseRecordReader implements RecordReader<LongWritable, SAMBaseRecord> {
    private final RecordReader<LongWritable, SAMRecordWritable> samRR;

    private SAMRecordWritable current;

    public DeprecatedBAMBaseRecordReader(RecordReader<LongWritable, SAMRecordWritable> rr) {

From source file DeprecatedBAMRecordReader.java

public class DeprecatedBAMRecordReader implements RecordReader<LongWritable, SAMRecordWritable> {
    private final BAMRecordReader rr = new BAMRecordReader();

    private final long splitLength;

    public DeprecatedBAMRecordReader(InputSplit split, final JobConf job, Reporter reporter) throws IOException {

From source file Job1RecordReader.java

/** An {@link InputFormat} for plain text files.  Files are broken into lines.
 * Either linefeed or carriage-return are used to signal end of line.  Keys are
 * the position in the file, and values are the line of text.. */

class Job1RecordReader implements RecordReader<Text, Text> {
    private LineRecordReader lineReader;

From source file ca.sparkera.adapters.mapred.MainframeVBRecordReader.java

/**
 * A reader to read VB length records from a split. Record offset is returned as
 * key and the record as bytes is returned in value.
 */
@InterfaceAudience.Private
@InterfaceStability.Evolving

From source file cascading.hbase.helper.TableRecordReader.java

/**
 * Iterate over an HBase table data, return (Text, RowResult) pairs
 */
public class TableRecordReader implements RecordReader<ImmutableBytesWritable, Result> {

    private TableRecordReaderImpl recordReaderImpl = new TableRecordReaderImpl();

From source file cascading.tap.hadoop.io.CombineFileRecordReaderWrapper.java

/**
 * A wrapper class for a record reader that handles a single file split. It delegates most of the
 * methods to the wrapped instance. We need this wrapper to satisfy the constructor requirement to
 * be used with hadoop's CombineFileRecordReader class.
 *
 * @see org.apache.hadoop.mapred.lib.CombineFileRecordReader

From source file cascading.tap.hadoop.util.MeasuredRecordReader.java

/**
 *
 */
public class MeasuredRecordReader implements RecordReader {
    private final FlowProcess flowProcess;
    private final Enum counter;

From source file co.cask.cdap.hive.stream.StreamRecordReader.java

/**
 * A {@link org.apache.hadoop.mapred.RecordReader} for reading stream events in hive queries. This is different
 * enough from the mapreduce version that there is not a common class for the two.
 */
final class StreamRecordReader implements RecordReader<Void, ObjectWritable> {
    private static final Logger LOG = LoggerFactory.getLogger(StreamRecordReader.class);

From source file co.nubetech.hiho.mapred.input.FileStreamRecordReader.java

/**
 * Updated version of {@link co.nubetech.hiho.mapreduce.lib.input.FileStreamInputFormat}.
 *
 */
public class FileStreamRecordReader implements RecordReader<Text, FSDataInputStream> {
    private FileSplit split;

From source file com.aerospike.hadoop.mapreduce.AerospikeRecordReader.java

public class AerospikeRecordReader extends RecordReader<AerospikeKey, AerospikeRecord>
        implements org.apache.hadoop.mapred.RecordReader<AerospikeKey, AerospikeRecord> {

    private class KeyRecPair {
        public AerospikeKey key;
        public AerospikeRecord rec;