Example usage for org.apache.hadoop.io BytesWritable readFields

List of usage examples for org.apache.hadoop.io BytesWritable readFields

Introduction

In this page you can find the example usage for org.apache.hadoop.io BytesWritable readFields.

Prototype

@Override
    public void readFields(DataInput in) throws IOException 

Source Link

Usage

From source file:com.cloudera.sqoop.lib.BlobRef.java

License:Apache License

@Override
public void readFieldsInternal(DataInput in) throws IOException {
    // For internally-stored BLOBs, the data is a BytesWritable
    // containing the actual data.

    BytesWritable data = getDataObj();

    if (null == data) {
        data = new BytesWritable();
    }/*from www  . j a v a2 s  .  c o  m*/
    data.readFields(in);
    setDataObj(data);
}

From source file:edu.yale.cs.hadoopdb.connector.DBInputSplit.java

License:Apache License

/**
 * Deserializes DBChunk //from   w w w. j  a v  a2 s  .c om
 */
private DBChunk deserializeChunk(DataInput in) throws IOException {
    BytesWritable br = new BytesWritable();
    br.readFields(in);
    byte[] buf = br.getBytes();
    ObjectInputStream byte_stream = new ObjectInputStream(new ByteArrayInputStream(buf));
    DBChunk chunk = null;
    try {
        chunk = (DBChunk) byte_stream.readObject();
    } catch (ClassNotFoundException e) {
        throw new IOException(e);
    }
    return chunk;
}

From source file:in.dream_lab.goffish.ControlMessage.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    transmissionType = WritableUtils.readEnum(in, IControlMessage.TransmissionType.class);
    extraInfo = Lists.newArrayList();/*from  ww w . j  a  va 2  s .  com*/
    int extraInfoSize;
    extraInfoSize = in.readInt();
    while (extraInfoSize-- > 0) {
        BytesWritable info = new BytesWritable();
        info.readFields(in);
        extraInfo.add(info);
    }
    if (isPartitionMessage()) {
        partitionID = in.readInt();
    } else if (isVertexMessage()) {
        vertexValues.readFields(in);
    }
}

From source file:in.dream_lab.goffish.hama.ControlMessage.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    transmissionType = WritableUtils.readEnum(in, IControlMessage.TransmissionType.class);
    extraInfo = Lists.newArrayList();//from   w w w.  j  av a2  s.c o m
    int extraInfoSize;
    extraInfoSize = in.readInt();
    while (extraInfoSize-- > 0) {
        BytesWritable info = new BytesWritable();
        info.readFields(in);
        extraInfo.add(info);
    }
    if (isPartitionMessage()) {
        sourcePartitionID = in.readInt();
    } else if (isVertexMessage()) {
        vertexValues.readFields(in);
    }
}

From source file:tachyon.client.keyvalue.hadoop.KeyValueRecordReader.java

License:Apache License

@Override
public synchronized boolean next(BytesWritable keyWritable, BytesWritable valueWritable) throws IOException {
    if (!mKeyValuePairIterator.hasNext()) {
        return false;
    }/*from   ww  w.j  av  a  2s  .  c  o m*/

    KeyValuePair pair;
    try {
        pair = mKeyValuePairIterator.next();
    } catch (TachyonException e) {
        throw new IOException(e);
    }

    // TODO(cc): Implement a ByteBufferInputStream which is backed by a ByteBuffer so we could
    // benefit from zero-copy.
    DataInputStream key = new DataInputStream(
            new ByteArrayInputStream(BufferUtils.newByteArrayFromByteBuffer(pair.getKey())));
    try {
        keyWritable.readFields(key);
    } finally {
        key.close();
    }

    DataInputStream value = new DataInputStream(
            new ByteArrayInputStream(BufferUtils.newByteArrayFromByteBuffer(pair.getValue())));
    try {
        valueWritable.readFields(value);
    } finally {
        value.close();
    }

    mKeyValuePairsBytesRead += keyWritable.getLength() + valueWritable.getLength();
    mNumVisitedKeyValuePairs++;
    return true;
}