Example usage for org.apache.hadoop.io BytesWritable get

List of usage examples for org.apache.hadoop.io BytesWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io BytesWritable get.

Prototype

@Deprecated
public byte[] get() 

Source Link

Document

Get the data from the BytesWritable.

Usage

From source file:voldemort.store.readwrite.mr.HadoopRWStoreBuilderReducer.java

License:Apache License

public void reduce(BytesWritable key, final Iterator<BytesWritable> values,
        OutputCollector<BytesWritable, BytesWritable> output, final Reporter reporter) throws IOException {
    this.transferStartTime = System.nanoTime();

    Iterator<Pair<ByteArray, Versioned<byte[]>>> iterator = new AbstractIterator<Pair<ByteArray, Versioned<byte[]>>>() {

        @Override//from   w  w  w. ja va  2 s  . c o m
        protected Pair<ByteArray, Versioned<byte[]>> computeNext() {
            while (values.hasNext()) {
                BytesWritable keyValue = values.next();
                byte[] keyValueBytes = new byte[keyValue.get().length];
                System.arraycopy(keyValue.get(), 0, keyValueBytes, 0, keyValue.get().length);

                // Reading key
                int keyBytesLength = ByteUtils.readInt(keyValueBytes, 0);
                byte[] keyBytes = new byte[keyBytesLength];
                System.arraycopy(keyValueBytes, sizeInt, keyBytes, 0, keyBytesLength);

                // Reading value
                int valueBytesLength = ByteUtils.readInt(keyValueBytes, sizeInt + keyBytesLength);
                byte[] valueBytes = new byte[valueBytesLength];
                System.arraycopy(keyValueBytes, sizeInt + sizeInt + keyBytesLength, valueBytes, 0,
                        valueBytesLength);

                // Reading vector clock
                int vectorClockBytesLength = ByteUtils.readInt(keyValueBytes,
                        sizeInt + sizeInt + keyBytesLength + valueBytesLength);
                byte[] vectorClockBytes = new byte[vectorClockBytesLength];
                System.arraycopy(keyValueBytes, sizeInt + sizeInt + sizeInt + keyBytesLength + valueBytesLength,
                        vectorClockBytes, 0, vectorClockBytesLength);
                VectorClock vectorClock = new VectorClock(vectorClockBytes);

                totalBytes += (keyBytesLength + valueBytesLength + vectorClockBytesLength);

                // Generating output
                ByteArray key = new ByteArray(keyBytes);
                Versioned<byte[]> versioned = Versioned.value(valueBytes, vectorClock);

                reporter.incrCounter(RecordCounter.RECORDS_STREAMED, 1);
                return new Pair<ByteArray, Versioned<byte[]>>(key, versioned);
            }
            return endOfData();
        }
    };
    logger.info("Connecting to admin client on " + this.nodeId + " - chunk id - " + this.chunkId);
    this.client.updateEntries(this.nodeId, getStoreName(), iterator, null);
    logger.info("Completed transfer of chunk id " + this.chunkId + " to node " + this.nodeId);
}