List of usage examples for org.apache.hadoop.mapreduce RecordReader subclass-usage
From source file org.apache.tinkerpop.gremlin.hadoop.structure.io.gryo.GryoRecordReader.java
/** * @author Marko A. Rodriguez (http://markorodriguez.com) */ public final class GryoRecordReader extends RecordReader<NullWritable, VertexWritable> { private FSDataInputStream inputStream;
From source file org.apache.tinkerpop.gremlin.hadoop.structure.io.script.ScriptRecordReader.java
/** * @author Daniel Kuppitz (http://gremlin.guru) * @author Marko A. Rodriguez (http://markorodriguez.com) */ public final class ScriptRecordReader extends RecordReader<NullWritable, VertexWritable> {
From source file org.apache.trevni.avro.mapreduce.AvroTrevniRecordReaderBase.java
/**
* Abstract base class for <code>RecordReader</code>s that read Trevni container files.
*
* @param <K> The type of key the record reader should generate.
* @param <V> The type of value the record reader should generate.
* @param <T> The type of the entries within the Trevni container file being read.
From source file org.archive.hadoop.mapreduce.LineDereferencingRecordReader.java
/**
* RecordReader which reads pointers to actual files from an internal
* LineRecordReader, producing a LineRecordReader for the files pointed to by
* the actual input.
*
* @author brad
From source file org.archive.wayback.hadoop.LineDereferencingRecordReader.java
/**
* RecordReader which reads pointers to actual files from an internal
* LineRecordReader, producing a LineRecordReader for the files pointed to by
* the actual input.
*
* @author brad
From source file org.bdgenomics.adam.io.FastqRecordReader.java
/**
* A record reader for the interleaved FASTQ format.
*
* Reads over an input file and parses interleaved FASTQ read pairs into
* a single Text output. This is then fed into the FastqConverter, which
* converts the single Text instance into two AlignmentRecords.
From source file org.bgi.flexlab.gaea.data.mapreduce.input.adaptor.AdaptorRecordReader.java
public class AdaptorRecordReader extends RecordReader<Text, Text> { protected static final Log LOG = LogFactory.getLog(AdaptorRecordReader.class.getName()); protected CompressionCodecFactory compressionCodecs = null; private long start; protected long pos; protected long end;
From source file org.bgi.flexlab.gaea.data.mapreduce.input.bam.GaeaBamRecordReader.java
public class GaeaBamRecordReader extends RecordReader<LongWritable, SamRecordWritable> { private final LongWritable key = new LongWritable(); private final SamRecordWritable record = new SamRecordWritable(); private ValidationStringency stringency;
From source file org.bgi.flexlab.gaea.data.mapreduce.input.cram.GaeaCombineCramFileRecordReader.java
public class GaeaCombineCramFileRecordReader extends RecordReader<LongWritable, SAMRecordWritable> { protected GaeaCramRecordReader currentReader = null; protected CombineFileSplit split; protected int fileIndex; protected TaskAttemptContext context;
From source file org.bgi.flexlab.gaea.data.mapreduce.input.cram.GaeaCramRecordReader.java
public class GaeaCramRecordReader extends RecordReader<LongWritable, SAMRecordWritable> { public final static String INPUTFORMAT_REFERENCE = "inputformat.reference"; public final static String CRAM_FILE_SPLITABLE = "cram.file.splitable"; protected final LongWritable key = new LongWritable(); protected final SAMRecordWritable record = new SAMRecordWritable();