Example usage for org.apache.hadoop.io WritableUtils readVInt

List of usage examples for org.apache.hadoop.io WritableUtils readVInt

Introduction

In this page you can find the example usage for org.apache.hadoop.io WritableUtils readVInt.

Prototype

public static int readVInt(DataInput stream) throws IOException 

Source Link

Document

Reads a zero-compressed encoded integer from input stream and returns it.

Usage

From source file:fi.tkk.ics.hadoop.bam.SequencedFragment.java

License:Open Source License

public void readFields(DataInput in) throws IOException {
    // TODO:  reimplement with a serialization system (e.g. Avro)

    // serialization order:
    // 1) sequence
    // 2) quality
    // 3) int with flags indicating which fields are defined (see *_Present flags)
    // 4..end) the rest of the fields

    this.clear();

    sequence.readFields(in);/*from  w ww.j  ava 2  s.  c o  m*/
    quality.readFields(in);

    int presentFlags = WritableUtils.readVInt(in);
    if ((presentFlags & Instrument_Present) != 0)
        instrument = WritableUtils.readString(in);
    if ((presentFlags & RunNumber_Present) != 0)
        runNumber = WritableUtils.readVInt(in);
    if ((presentFlags & FlowcellId_Present) != 0)
        flowcellId = WritableUtils.readString(in);
    if ((presentFlags & Lane_Present) != 0)
        lane = WritableUtils.readVInt(in);
    if ((presentFlags & Tile_Present) != 0)
        tile = WritableUtils.readVInt(in);
    if ((presentFlags & Xpos_Present) != 0)
        xpos = WritableUtils.readVInt(in);
    if ((presentFlags & Ypos_Present) != 0)
        ypos = WritableUtils.readVInt(in);
    if ((presentFlags & Read_Present) != 0)
        read = WritableUtils.readVInt(in);
    if ((presentFlags & FilterPassed_Present) != 0)
        filterPassed = WritableUtils.readVInt(in) == 1;
    if ((presentFlags & ControlNumber_Present) != 0)
        controlNumber = WritableUtils.readVInt(in);
    if ((presentFlags & IndexSequence_Present) != 0)
        indexSequence = WritableUtils.readString(in);
}

From source file:gaffer.statistics.impl.CappedMinuteCount.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    minuteToCount.clear();//from  w  w w. j a v  a  2 s  .c om
    maxEntries = WritableUtils.readVInt(in);
    full = in.readBoolean();
    if (!full) {
        int numberOfEntries = WritableUtils.readVInt(in);
        int smallestInt = -1;
        for (int i = 0; i < numberOfEntries; i++) {
            if (i == 0) {
                smallestInt = WritableUtils.readVInt(in);
                minuteToCount.put(smallestInt, WritableUtils.readVLong(in));
            } else {
                minuteToCount.put(WritableUtils.readVInt(in) + smallestInt, WritableUtils.readVLong(in));
            }
        }
    }
}

From source file:gaffer.statistics.impl.DailyCount.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    int numberOfEntries = WritableUtils.readVInt(in);
    dayToCount.clear();/*w ww .  j a  va2 s  .c  om*/
    for (int i = 0; i < numberOfEntries; i++) {
        dayToCount.put(WritableUtils.readVInt(in), WritableUtils.readVLong(in));
    }
}

From source file:gaffer.statistics.impl.HourlyCount.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    int numberOfEntries = WritableUtils.readVInt(in);
    hourToCount.clear();/*from  ww w .j  av  a  2  s .  c  o  m*/
    for (int i = 0; i < numberOfEntries; i++) {
        hourToCount.put(WritableUtils.readVInt(in), WritableUtils.readVLong(in));
    }
}

From source file:ilps.hadoop.ResultObject.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {

    try {//from   ww  w  .j a  va  2s.c o m

        score = in.readLong();
        topic = in.readUTF();
        stream_id = in.readUTF();

        int length = WritableUtils.readVInt(in);
        byte[] bytes = new byte[length];
        in.readFully(bytes, 0, length);

        deserializer.deserialize(document, bytes);

    } catch (TException e) {
        e.printStackTrace();
        throw new IOException(e);
    }
}

From source file:ilps.hadoop.StreamItemWritable.java

License:Apache License

/**
 * Serializes this object./*from   ww w .  java2  s .co  m*/
 */
@Override
public void readFields(DataInput in) throws IOException {

    try {

        int length = WritableUtils.readVInt(in);
        byte[] bytes = new byte[length];
        in.readFully(bytes, 0, length);

        deserializer.deserialize(this, bytes);

    } catch (TException e) {
        e.printStackTrace();
        throw new IOException(e);
    }

}

From source file:io.bfscan.data.TermStatistics.java

License:Apache License

/**
 * Creates a {@code CfTable} object./*from   ww w .  ja  va2  s.co m*/
 *
 * @param file collection frequency data file
 * @param fs FileSystem to read from
 * @throws IOException
 */
public TermStatistics(Path file, FileSystem fs) throws IOException {
    Preconditions.checkNotNull(file);
    Preconditions.checkNotNull(fs);

    FSDataInputStream in = fs.open(new Path(file, BuildDictionary.CF_BY_ID_DATA));
    this.numTerms = in.readInt();

    cfs = new long[numTerms];

    for (int i = 0; i < numTerms; i++) {
        long cf = WritableUtils.readVLong(in);

        cfs[i] = cf;
        collectionSize += cf;

        if (cf > maxCf) {
            maxCf = cf;
            maxCfTerm = i + 1;
        }
    }

    in.close();

    in = fs.open(new Path(file, BuildDictionary.DF_BY_ID_DATA));
    if (numTerms != in.readInt()) {
        throw new IOException("df data and cf data should have the same number of entries!");
    }

    dfs = new int[numTerms];

    for (int i = 0; i < numTerms; i++) {
        int df = WritableUtils.readVInt(in);

        dfs[i] = df;

        if (df > maxDf) {
            maxDf = df;
            maxDfTerm = i + 1;
        }
    }

    in.close();
}

From source file:io.bfscan.data.VByteDocVector.java

License:Apache License

public static void fromBytesWritable(BytesWritable bytes, VByteDocVector doc) {
    try {/*  w ww. j a  va 2 s  . com*/
        ByteArrayInputStream bytesIn = new ByteArrayInputStream(bytes.getBytes());
        DataInputStream data = new DataInputStream(bytesIn);

        int length = WritableUtils.readVInt(data);
        doc.termids = new int[length];
        for (int i = 0; i < length; i++) {
            doc.termids[i] = WritableUtils.readVInt(data);
        }
    } catch (IOException e) {
        doc.termids = new int[0];
    }
}

From source file:io.fluo.accumulo.data.WriteUtilImpl.java

License:Apache License

@Override
public int readVInt(DataInput stream) throws IOException {
    return WritableUtils.readVInt(stream);
}

From source file:io.fluo.api.data.Bytes.java

License:Apache License

/**
 * Wraps data input as Bytes//from   ww w  .  ja  v  a2 s . co  m
 * 
 * @param in DataInput
 * @return Bytes
 * @throws IOException
 */
public static Bytes read(DataInput in) throws IOException {
    int len = WritableUtils.readVInt(in);
    byte b[] = new byte[len];
    in.readFully(b);
    return wrap(b);
}