List of usage examples for org.apache.hadoop.io Writable interface-usage
From source file com.google.appengine.tools.mapreduce.BlobstoreRecordReader.java
/** * BlobstoreRecordReader is a RecordReader for the AppEngine Blobstore. * It's AppEngine compatible by way of implementing Writable. * */ class BlobstoreRecordReader extends RecordReader<BlobstoreRecordKey, byte[]> implements Writable {
From source file com.google.appengine.tools.mapreduce.DatastoreInputSplit.java
/** * Represents an {@code InputSplit} over AppEngine datastore entities. * Represents the range between a start key inclusive and an end key exclusive. * Also stores a batch size to be used by the RecordReader. */ public class DatastoreInputSplit extends InputSplit implements Writable {
From source file com.google.appengine.tools.mapreduce.DatastoreRecordReader.java
/** * DatastoreReader is a RecordReader for the AppEngine Datastore. * It's AppEngine compatible by way of implementing Writable. * */ public class DatastoreRecordReader extends RecordReader<Key, Entity> implements Writable {
From source file com.google.appengine.tools.mapreduce.RangeInputSplit.java
/**
* The input split class for {@link RangeInputFormat}.
*
* The inputs generated by this split are inclusive of start and exclusive of
* end (i.e. [start, end) ).
*
From source file com.google.appengine.tools.mapreduce.RangeRecordReader.java
/** * The record reader class for {@link RangeInputFormat}. * */ public class RangeRecordReader extends RecordReader<Long, NullWritable> implements Writable {
From source file com.google.appengine.tools.mapreduce.StubInputSplit.java
/** * Stub input split. Contains the info for generating a predetermined series * of records. * */ public class StubInputSplit extends InputSplit implements Writable {
From source file com.google.appengine.tools.mapreduce.StubRecordReader.java
/** * Reader that just passes through the values from a {@link StubInputSplit}. * * */ public class StubRecordReader extends RecordReader<IntWritable, IntWritable> implements Writable {
From source file com.google.mr4c.hadoop.DataKeyList.java
public class DataKeyList implements Writable { private List<DataKey> m_keys = new ArrayList<DataKey>(); private DatasetSerializer m_serializer = SerializerFactories.getSerializerFactory("application/json") .createDatasetSerializer();
From source file com.gsvic.csmr.DocumentWritable.java
public final class DocumentWritable implements Writable { private Text key; private VectorWritable value; public DocumentWritable() { key = new Text();
From source file com.hortonworks.hbase.replication.bridge.ConnectionHeader.java
/** * The IPC connection header sent by the client to the server * on connection establishment. */ class ConnectionHeader implements Writable { protected String protocol;