Example usage for org.apache.hadoop.io WritableUtils readVInt

List of usage examples for org.apache.hadoop.io WritableUtils readVInt

Introduction

In this page you can find the example usage for org.apache.hadoop.io WritableUtils readVInt.

Prototype

public static int readVInt(DataInput stream) throws IOException 

Source Link

Document

Reads a zero-compressed encoded integer from input stream and returns it.

Usage

From source file:org.apache.phoenix.util.ByteUtil.java

License:Apache License

public static int[] deserializeVIntArray(DataInput in, int length) throws IOException {
    int i = 0;//from w  w  w .j a v a 2  s . c  o  m
    int[] intArray = new int[length];
    while (i < length) {
        intArray[i++] = WritableUtils.readVInt(in);
    }
    return intArray;
}

From source file:org.apache.phoenix.util.IndexUtil.java

License:Apache License

public static ColumnReference[] deserializeDataTableColumnsToJoin(Scan scan) {
    byte[] columnsBytes = scan.getAttribute(BaseScannerRegionObserver.DATA_TABLE_COLUMNS_TO_JOIN);
    if (columnsBytes == null)
        return null;
    ByteArrayInputStream stream = new ByteArrayInputStream(columnsBytes); // TODO: size?
    try {/*from ww  w  .  java 2 s. co  m*/
        DataInputStream input = new DataInputStream(stream);
        int numColumns = WritableUtils.readVInt(input);
        ColumnReference[] dataColumns = new ColumnReference[numColumns];
        for (int i = 0; i < numColumns; i++) {
            dataColumns[i] = new ColumnReference(Bytes.readByteArray(input), Bytes.readByteArray(input));
        }
        return dataColumns;
    } catch (IOException e) {
        throw new RuntimeException(e);
    } finally {
        try {
            stream.close();
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }
}

From source file:org.apache.phoenix.util.IndexUtil.java

License:Apache License

public static byte[][] deserializeViewConstantsFromScan(Scan scan) {
    byte[] bytes = scan.getAttribute(BaseScannerRegionObserver.VIEW_CONSTANTS);
    if (bytes == null)
        return null;
    ByteArrayInputStream stream = new ByteArrayInputStream(bytes); // TODO: size?
    try {// ww w.j a  va2s . c om
        DataInputStream input = new DataInputStream(stream);
        int numConstants = WritableUtils.readVInt(input);
        byte[][] viewConstants = new byte[numConstants][];
        for (int i = 0; i < numConstants; i++) {
            viewConstants[i] = Bytes.readByteArray(input);
        }
        return viewConstants;
    } catch (IOException e) {
        throw new RuntimeException(e);
    } finally {
        try {
            stream.close();
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }
}

From source file:org.apache.phoenix.util.PrefixByteDecoder.java

License:Apache License

/**
 * Decodes bytes encoded with {@link PrefixByteEncoder}.
 * @param in Input from which bytes are read.
 * @return Pointer containing bytes that were decoded. Note that the
 * same pointer will be returned with each call, so it must be consumed
 * prior to calling decode again.//from   ww w  .  ja  v a 2 s.  c  o m
 * @throws IOException
 */
public ImmutableBytesWritable decode(DataInput in) throws IOException {
    int prefixLen = WritableUtils.readVInt(in);
    int suffixLen = WritableUtils.readVInt(in);
    int length = prefixLen + suffixLen;
    byte[] b;
    if (maxLength == -1) { // Allocate new byte array each time
        b = new byte[length];
        System.arraycopy(previous.get(), previous.getOffset(), b, 0, prefixLen);
    } else { // Reuse same buffer each time
        b = previous.get();
    }
    in.readFully(b, prefixLen, suffixLen);
    previous.set(b, 0, length);
    return previous;
}

From source file:org.apache.pig.backend.hadoop.executionengine.spark_streaming.Text.java

License:Apache License

/** deserialize 
 *//* w  w  w.  j a  v a2s.  c o m*/
public void readFields(DataInput in) throws IOException {
    int newLength = WritableUtils.readVInt(in);
    setCapacity(newLength, false);
    in.readFully(bytes, 0, newLength);
    length = newLength;
}

From source file:org.apache.pig.backend.hadoop.executionengine.spark_streaming.Text.java

License:Apache License

/** Skips over one Text in the input. */
public static void skip(DataInput in) throws IOException {
    int length = WritableUtils.readVInt(in);
    WritableUtils.skipFully(in, length);
}

From source file:org.apache.pig.backend.hadoop.executionengine.spark_streaming.Text.java

License:Apache License

/** Read a UTF8 encoded string from in
 *///from  w w w . ja  va 2 s .  c  o m
public static String readString(DataInput in) throws IOException {
    int length = WritableUtils.readVInt(in);
    byte[] bytes = new byte[length];
    in.readFully(bytes, 0, length);
    return decode(bytes);
}

From source file:org.apache.sqoop.job.io.Data.java

License:Apache License

private int readType(DataInput in) throws IOException {
    return WritableUtils.readVInt(in);
}

From source file:org.apache.sysml.runtime.transform.DistinctValue.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    // read word 
    _length = WritableUtils.readVInt(in);
    _bytes = new byte[_length];
    in.readFully(_bytes, 0, _length);//from   w w w.  j  a  v a 2  s .c om
    // read count
    _count = in.readLong();
}

From source file:org.apache.tez.common.counters.AbstractCounterGroup.java

License:Apache License

@Override
public synchronized void readFields(DataInput in) throws IOException {
    displayName = Text.readString(in);
    counters.clear();//from  w  ww  . j  a  va 2 s. co  m
    int size = WritableUtils.readVInt(in);
    for (int i = 0; i < size; i++) {
        T counter = newCounter();
        counter.readFields(in);
        counters.put(counter.getName(), counter);
        limits.incrCounters();
    }
}