Example usage for org.apache.hadoop.io Writable interface-usage

List of usage examples for org.apache.hadoop.io Writable interface-usage

Introduction

In this page you can find the example usage for org.apache.hadoop.io Writable interface-usage.

Usage

From source file com.splicemachine.mrio.api.serde.ExecRowWritable.java

public class ExecRowWritable implements Writable {

    DescriptorSerializer[] serializers = null;
    MultiFieldDecoder decoder = MultiFieldDecoder.create();
    MultiFieldEncoder encoder = null;

From source file com.splicemachine.mrio.api.serde.RowLocationWritable.java

public class RowLocationWritable implements Writable, WritableComparable {
    HBaseRowLocation rowLocation;

    public RowLocationWritable() {

    }

From source file com.splicemachine.si.impl.region.ActiveTxnFilter.java

/**
 * @author Scott Fines
 *         Date: 8/18/14
 */
public class ActiveTxnFilter extends FilterBase implements Writable {
    protected final long beforeTs;

From source file com.taobao.adfs.distributed.DistributedData.java

/**
 * @author <a href=mailto:zhangwei.yangjie@gmail.com/jiwan@taobao.com>zhangwei/jiwan</a>
 */
public abstract class DistributedData implements Writable {
    public static final Logger logger = LoggerFactory.getLogger(DistributedData.class);
    public Configuration conf = null;

From source file com.taobao.adfs.distributed.DistributedLocker.java

/**
 * @author <a href=mailto:zhangwei.yangjie@gmail.com/jiwan@taobao.com>zhangwei/jiwan</a>
 */
public class DistributedLocker implements Writable {
    public static final Logger logger = LoggerFactory.getLogger(DistributedLocker.class);
    Map<DeepArray, Object[]> locks = new HashMap<DeepArray, Object[]>();

From source file com.taobao.adfs.distributed.rpc.ObjectWritable.java

/**
 * @author <a href=mailto:zhangwei.yangjie@gmail.com/jiwan@taobao.com>zhangwei/jiwan</a>
 */
public class ObjectWritable implements Writable {
    private Class<?> declaredClass;
    private Object instance;

From source file com.telefonica.iot.tidoop.apiext.hadoop.ckan.CKANInputSplit.java

/**
 * Custom InputSplit for CKAN data.
 * 
 * @author frb
 */
public class CKANInputSplit extends InputSplit implements Writable {

From source file com.toshiba.mwcloud.gs.hadoop.io.GSRowWritable.java

/**
 * <div lang="ja">
 * GridDB?RowWritable??<br/>
 * GSRowRecordReader????Map???????<br/>
 * ???MapReduce?GridDB???????????<br/>
 * ?????????????????(GSType[])?? ????

From source file com.toshiba.mwcloud.gs.hadoop.mapreduce.GSContainerSplit.java

/**
 * <div lang="ja">
 * GridDB?InputSplit??
 * </div><div lang="en">
 * InputSplit of GridDB connector.
 * </div>

From source file com.trace.hadoop.hadoopinternal.ser.Block.java

/**************************************************
 * A Block is a Hadoop FS primitive, identified by a long.
 * 
 **************************************************/
public class Block implements Writable, Comparable<Block>, Serializable {
    private static final long serialVersionUID = -1765160555754675629L;