Example usage for org.apache.hadoop.io WritableUtils readVInt

List of usage examples for org.apache.hadoop.io WritableUtils readVInt

Introduction

In this page you can find the example usage for org.apache.hadoop.io WritableUtils readVInt.

Prototype

public static int readVInt(DataInput stream) throws IOException 

Source Link

Document

Reads a zero-compressed encoded integer from input stream and returns it.

Usage

From source file:org.deephacks.confit.internal.hbase.MultiKeyValueComparisonFilter.java

License:Apache License

@Override
public void readFields(DataInput input) throws IOException {
    int sidSize = WritableUtils.readVInt(input);
    sid = new byte[sidSize];
    input.readFully(sid);//  w  ww  .  j av  a  2s  .c o  m
    maxResults = WritableUtils.readVInt(input);
    int restrictionLength = WritableUtils.readVInt(input);
    for (int i = 0; i < restrictionLength; i++) {
        QualifierRestriction restriction = RestrictionType.values()[WritableUtils.readVInt(input)]
                .newInstance();
        restriction.readFields(input);
        restrictions.add(restriction);
    }
}

From source file:org.goldenorb.io.input.RawSplit.java

License:Apache License

/**
 * Reads the fields in a given DataInput.
 * //from  w  ww.  j  ava  2 s . co  m
 * @param in
 *          - DataInput
 */
public void readFields(DataInput in) throws IOException {
    splitClass = Text.readString(in);
    dataLength = in.readLong();
    bytes.readFields(in);
    int len = WritableUtils.readVInt(in);
    locations = new String[len];
    for (int i = 0; i < len; ++i) {
        locations[i] = Text.readString(in);
    }
}

From source file:org.htuple.SerializationUtils.java

License:Apache License

public static Object read(DataInput stream) throws IOException {

    int type = WritableUtils.readVInt(stream);

    TupleElementSerializer serializer = staticElementIdWriters.get(type);
    if (serializer == null) {
        throw new IllegalArgumentException("Unsupported type: " + type);
    }/*www.  j  a  v  a 2 s  .  com*/

    return serializer.read(stream);
}

From source file:org.htuple.Tuple.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    fields.clear();/*from ww w . j a  va 2 s.c  o  m*/
    int elementCount = WritableUtils.readVInt(in);
    while (elementCount-- > 0) {
        fields.add(SerializationUtils.read(in));
    }
}

From source file:org.hypertable.hadoop.mapreduce.KeyWritable.java

License:Open Source License

@Override
public void readFields(DataInput in) throws IOException {
    int len = WritableUtils.readVInt(in);

    if (m_input_buffer == null || len > m_input_buffer.capacity()) {
        m_input_buffer = ByteBuffer.allocate(len + 16);
        m_input_buffer.order(ByteOrder.LITTLE_ENDIAN);
    } else {/*from w w w.  j  a va2s  .co m*/
        m_input_buffer.clear();
        m_input_buffer.limit(len);
    }

    in.readFully(m_input_buffer.array(), 0, len);

    short flagVal = m_input_buffer.getShort();
    flag = org.hypertable.thriftgen.KeyFlag.findByValue((int) flagVal);
    if (flag == null)
        throw new IOException("Can't convert KeyFlag value " + flagVal + " to enum");

    len = readVInt(m_input_buffer);
    if (len > 0) {
        row_buffer = m_input_buffer.array();
        row_buffer_offset = m_input_buffer.position();
        m_input_buffer.position(row_buffer_offset + len);
    } else {
        row_buffer = null;
        row_buffer_offset = 0;
    }
    row_buffer_length = len;
    row = null;

    len = readVInt(m_input_buffer);
    if (len > 0) {
        column_family_buffer = m_input_buffer.array();
        column_family_buffer_offset = m_input_buffer.position();
        m_input_buffer.position(column_family_buffer_offset + len);
    } else {
        column_family_buffer = null;
        column_family_buffer_offset = 0;
    }
    column_family_buffer_length = len;
    column_family = null;

    len = readVInt(m_input_buffer);
    if (len > 0) {
        column_qualifier_buffer = m_input_buffer.array();
        column_qualifier_buffer_offset = m_input_buffer.position();
        m_input_buffer.position(column_qualifier_buffer_offset + len);
    } else {
        column_qualifier_buffer = null;
        column_qualifier_buffer_offset = 0;
    }
    column_qualifier_buffer_length = len;
    column_qualifier = null;

    timestamp = m_input_buffer.getLong();
    setTimestampIsSet(true);

    revision = m_input_buffer.getLong();
    setRevisionIsSet(true);

    m_dirty = false;
}

From source file:org.imageterrier.termpayload.NNTermPayloadCoordinator.java

License:Mozilla Public License

@Override
public int[] readPayload(DataInput in) throws IOException {
    int numNeighbours = WritableUtils.readVInt(in);
    int[] neighbours = new int[numNeighbours];

    for (int j = 0; j < numNeighbours; j++)
        neighbours[j] = WritableUtils.readVInt(in);

    return neighbours;
}

From source file:org.imageterrier.termpayload.PositionTermPayloadCoordinator.java

License:Mozilla Public License

@Override
public int[] readPayload(DataInput in) throws IOException {
    int[] positionBits = positionSpec.getPositionBits();
    int[] position = new int[positionBits.length];

    for (int i = 0; i < positionBits.length; i++)
        position[i] = WritableUtils.readVInt(in);

    return position;
}

From source file:org.kiji.hive.io.KijiRowDataWritable.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    EntityIdWritable entityIdWritable = (EntityIdWritable) WritableFactories
            .newInstance(EntityIdWritable.class);
    entityIdWritable.readFields(in);/*from w w  w. ja v  a2  s.co m*/
    mEntityId = entityIdWritable;

    int numDecodedData = WritableUtils.readVInt(in);

    // We need to dirty the decoded data so that these objects can be reused.
    mDecodedData = null;

    mWritableData = Maps.newHashMap();
    for (int c = 0; c < numDecodedData; c++) {
        String columnText = WritableUtils.readString(in);
        KijiColumnName column = new KijiColumnName(columnText);

        NavigableMap<Long, KijiCellWritable> data = Maps.newTreeMap();
        int numCells = WritableUtils.readVInt(in);
        for (int d = 0; d < numCells; d++) {
            long ts = WritableUtils.readVLong(in);
            KijiCellWritable cellWritable = (KijiCellWritable) WritableFactories
                    .newInstance(KijiCellWritable.class);
            cellWritable.readFields(in);
            data.put(ts, cellWritable);
        }

        mWritableData.put(column, data);
    }

    mSchemas = Maps.newHashMap();
    int numSchemas = WritableUtils.readVInt(in);
    for (int c = 0; c < numSchemas; c++) {
        String columnText = WritableUtils.readString(in);
        KijiColumnName column = new KijiColumnName(columnText);
        String schemaString = WritableUtils.readString(in);
        Schema schema = new Schema.Parser().parse(schemaString);
        mSchemas.put(column, schema);
    }
}

From source file:org.lilyproject.mapreduce.RecordIdWritable.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    int length = WritableUtils.readVInt(in);
    byte[] bytes = new byte[length];
    in.readFully(bytes, 0, length);/*from  w  ww.  j  a v  a2  s  .  co  m*/
    this.recordId = ID_GENERATOR.fromBytes(bytes);
}

From source file:org.terrier.compression.integer.ByteInputStream.java

License:Mozilla Public License

@Override
public final int readVInt() throws IOException {

    int i = WritableUtils.readVInt(di);
    byteOffset += WritableUtils.getVIntSize(i);

    return i;/* ww w. j a v a 2 s  .  c  o  m*/
}