Example usage for org.apache.hadoop.mapreduce RecordReader subclass-usage

List of usage examples for org.apache.hadoop.mapreduce RecordReader subclass-usage

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce RecordReader subclass-usage.

Usage

From source file org.mrgeo.mapreduce.ingestvector.IngestVectorRecordReader.java

public class IngestVectorRecordReader extends RecordReader<LongWritable, GeometryWritable> {
    private static Logger log = LoggerFactory.getLogger(IngestVectorRecordReader.class);

    private GeotoolsVectorReader reader;

    private LongWritable key = new LongWritable(0);

From source file org.msgpack.hadoop.mapreduce.input.MessagePackRecordReader.java

public class MessagePackRecordReader extends RecordReader<LongWritable, MessagePackWritable> {
    private static final Log LOG = LogFactory.getLog(MessagePackRecordReader.class.getName());

    private MessageUnpacker unpacker_;

    private final LongWritable key_ = new LongWritable(0);

From source file org.oclc.firefly.hadoop.backup.BackupRecordReader.java

/**
 * Sequence File record reader for backup utility.
 * Copied from SequenceFileRecordReader, but modified for this utility. There is some code in HRegionInfo
 * that doesn't allow HRegionInfo objects to be reused. So, this class instantiates a new object
 * every time it reads a new pair of key value pairs
 */

From source file org.ojai.json.mapreduce.JSONFileRecordReader.java

public class JSONFileRecordReader extends RecordReader<LongWritable, Document> {

    private FSDataInputStream inputStream;
    private DocumentStream<Document> documentStream;
    private Iterator<Document> it;
    private long documentCount;

From source file org.pig.storage.TFileRecordReader.java

/**
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the

From source file org.pooledtimeseries.seqfile.FullFileRecordReader.java

public class FullFileRecordReader extends RecordReader<Text, BytesWritable> {
    public static final byte[] VECTOR_SEPERATOR = PoTConstants.VECTOR_SEPERATOR.getBytes();

    private FileSplit fileSplit;
    private Configuration conf;
    private BytesWritable value = new BytesWritable();

From source file org.position.parse.ZipFileRecordReader.java

/**
 * This RecordReader implementation extracts individual files from a ZIP
 * file and hands them over to the Mapper. The "key" is the decompressed
 * file name, the "value" is the file contents.
 */
public class ZipFileRecordReader extends RecordReader<Text, BytesWritable> {

From source file org.rassee.omniture.hadoop.mapreduce.OmnitureDataFileRecordReader.java

/**
 * A record reader for splits generated by an Omniture hit_data.tsv daily data file.
 *
 * @author Mike Sukmanowsky (<a href="mailto:mike.sukmanowsky@gmail.com">mike.sukmanowsky@gmail.com</a>)
 */
public class OmnitureDataFileRecordReader extends RecordReader<LongWritable, Text> {

From source file org.sample.ZipFileRecordReader.java

/**
 * This RecordReader implementation extracts individual files from a ZIP
 * file and hands them over to the Mapper. The "key" is the decompressed
 * file name, the "value" is the file contents.
 */
public class ZipFileRecordReader extends RecordReader<Text, BytesWritable> {

From source file org.seqdoop.hadoop_bam.BAMRecordReader.java

/** The key is the bitwise OR of the reference sequence ID in the upper 32 bits
 * and the 0-based leftmost coordinate in the lower.
 */
public class BAMRecordReader extends RecordReader<LongWritable, SAMRecordWritable> {
    private final LongWritable key = new LongWritable();
    private final SAMRecordWritable record = new SAMRecordWritable();