Example usage for org.apache.hadoop.mapred InputSplit interface-usage

List of usage examples for org.apache.hadoop.mapred InputSplit interface-usage

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred InputSplit interface-usage.

Usage

From source file DeprecatedFileVirtualSplit.java

public class DeprecatedFileVirtualSplit implements InputSplit {
    public final FileVirtualSplit vs;

    public DeprecatedFileVirtualSplit(FileVirtualSplit v) {
        vs = v;
    }

From source file cascading.tap.hadoop.io.MultiInputSplit.java

/** Class MultiInputSplit is used by MultiInputFormat */
public class MultiInputSplit implements InputSplit, JobConfigurable {
    public static final String CASCADING_SOURCE_PATH = "cascading.source.path";
    private static final Logger LOG = LoggerFactory.getLogger(MultiInputSplit.class);

    /** Field jobConf */

From source file cascading.tap.hadoop.MultiInputSplit.java

/** Class MultiInputSplit is used by MultiInputFormat */
public class MultiInputSplit implements InputSplit, JobConfigurable {
    private static final Logger LOG = Logger.getLogger(MultiInputSplit.class);

    /** Field jobConf */
    private transient JobConf jobConf;

From source file com.acme.extensions.data.SeededSplit.java

/**
 * 
 */
public class SeededSplit implements InputSplit {

    private long seed;

From source file com.aerospike.hadoop.mapreduce.AerospikeSplit.java

public class AerospikeSplit extends InputSplit implements org.apache.hadoop.mapred.InputSplit {

    private String type;
    private String node;
    private String host;
    private int port;

From source file com.aliyun.openservices.tablestore.hive.TableStoreInputSplit.java

public class TableStoreInputSplit extends FileSplit implements InputSplit, Writable {
    private static final Logger logger = LoggerFactory.getLogger(TableStoreInputSplit.class);
    private com.aliyun.openservices.tablestore.hadoop.TableStoreInputSplit delegated;

    public TableStoreInputSplit() {
        super(null, 0, 0, (String[]) null);

From source file com.bah.culvert.hive.CulvertInputSplit.java

/**
 * Store an InputSplit to read from an index table. Used then to create a
 * scanner over the data table
 */
@SuppressWarnings("deprecation")
public class CulvertInputSplit implements InputSplit, Writable {

From source file com.cloudera.recordservice.mapred.RecordServiceInputSplit.java

/**
 * Wrapper around mapreduce.InputSplit which implements the mapred interface.
 */
public class RecordServiceInputSplit implements InputSplit {
    private com.cloudera.recordservice.mapreduce.RecordServiceInputSplit split_;

From source file com.davidgildeh.hadoop.input.simpledb.SimpleDBInputSplit.java

/**
 * InputSplit represents the slice of data to be processed by a Record Reader which in
 * turn sends each row in the slice to the Mappers.
 * 
 * Typically, it presents a byte-oriented view on the input and is the responsibility of 
 * RecordReader of the job to process this and present a record-oriented view.

From source file com.facebook.hiveio.input.HInputSplit.java

/**
 * InputSplit for Hive
 */
class HInputSplit extends InputSplit implements org.apache.hadoop.mapred.InputSplit, Configurable {
    /** Logger */
    private static final Logger LOG = LoggerFactory.getLogger(HInputSplit.class);