Example usage for org.apache.hadoop.io WritableUtils readVInt

List of usage examples for org.apache.hadoop.io WritableUtils readVInt

Introduction

In this page you can find the example usage for org.apache.hadoop.io WritableUtils readVInt.

Prototype

public static int readVInt(DataInput stream) throws IOException 

Source Link

Document

Reads a zero-compressed encoded integer from input stream and returns it.

Usage

From source file:org.apache.hama.pipes.protocol.UplinkReader.java

License:Apache License

public void seqFileAppend() throws IOException {
    int fileID = WritableUtils.readVInt(this.inStream);
    LOG.debug("GOT MessageType.SEQFILE_APPEND - FileID: " + fileID);

    boolean result = false;

    // check if fileID is available in sequenceFileWriter
    if (this.sequenceFileWriters.containsKey(fileID)) {

        Writable sequenceKeyWritable = sequenceFileWriters.get(fileID).getValue().getKey();
        Writable sequenceValueWritable = sequenceFileWriters.get(fileID).getValue().getValue();

        // try to read key and value
        readObject(sequenceKeyWritable);
        readObject(sequenceValueWritable);

        if ((sequenceKeyWritable != null) && (sequenceValueWritable != null)) {

            // append to sequenceFile
            this.sequenceFileWriters.get(fileID).getKey().append(sequenceKeyWritable, sequenceValueWritable);

            LOG.debug("Stored data: Key: "
                    + ((sequenceKeyWritable.toString().length() < 10) ? sequenceKeyWritable.toString()
                            : sequenceKeyWritable.toString().substring(0, 9) + "...")
                    + " Value: "
                    + ((sequenceValueWritable.toString().length() < 10) ? sequenceValueWritable.toString()
                            : sequenceValueWritable.toString().substring(0, 9) + "..."));

            result = true;//ww  w  . j a  v a 2  s  .c  o  m
        }
    } else { // no fileID stored

        // Skip written data from InputStream
        int availableBytes = this.inStream.available();
        this.inStream.skip(availableBytes);
        LOG.debug("MessageType.SEQFILE_APPEND: skip " + availableBytes + " bytes");
        LOG.error("MessageType.SEQFILE_APPEND: FileID " + fileID + " not found!");
    }

    // RESPOND
    WritableUtils.writeVInt(this.outStream, MessageType.SEQFILE_APPEND.code);
    WritableUtils.writeVInt(this.outStream, result ? 1 : 0);
    binProtocol.flush();
    LOG.debug("Responded MessageType.SEQFILE_APPEND - Result: " + result);
}

From source file:org.apache.hama.pipes.protocol.UplinkReader.java

License:Apache License

public void seqFileClose() throws IOException {
    int fileID = WritableUtils.readVInt(this.inStream);
    LOG.debug("GOT MessageType.SEQFILE_CLOSE - FileID: " + fileID);

    boolean result = false;

    if (this.sequenceFileReaders.containsKey(fileID)) {
        this.sequenceFileReaders.get(fileID).getKey().close();
        this.sequenceFileReaders.remove(fileID);
        result = true;//from ww w .ja  v  a  2s.c o m
    } else if (this.sequenceFileWriters.containsKey(fileID)) {
        this.sequenceFileWriters.get(fileID).getKey().close();
        this.sequenceFileWriters.remove(fileID);
        result = true;
    } else { // no fileID stored
        LOG.error("MessageType.SEQFILE_CLOSE: FileID " + fileID + " not found!");
    }

    // RESPOND
    WritableUtils.writeVInt(this.outStream, MessageType.SEQFILE_CLOSE.code);
    WritableUtils.writeVInt(this.outStream, result ? 1 : 0);
    binProtocol.flush();
    LOG.debug("Responded MessageType.SEQFILE_CLOSE - Result: " + result);
}

From source file:org.apache.hama.pipes.protocol.UplinkReader.java

License:Apache License

public void partitionResponse() throws IOException {
    int partResponse = WritableUtils.readVInt(this.inStream);
    synchronized (binProtocol.resultLock) {
        binProtocol.setResult(partResponse);
        LOG.debug("Received MessageType.PARTITION_RESPONSE - Result: " + partResponse);
        binProtocol.resultLock.notify();
    }//from w  ww .j  av a  2s .  c o  m
}

From source file:org.apache.hama.pipes.protocol.UplinkReader.java

License:Apache License

/**
 * Read the given object from stream. If it is a IntWritable, LongWritable,
 * FloatWritable, DoubleWritable, Text or BytesWritable, read it directly.
 * Otherwise, read it to a buffer and then write the length and data to the
 * stream.//from   ww  w. ja v a2  s.c o m
 * 
 * @param obj the object to read
 * @throws IOException
 */
protected void readObject(Writable obj) throws IOException {
    byte[] buffer;
    // For BytesWritable and Text, use the specified length to set the length
    // this causes the "obvious" translations to work. So that if you emit
    // a string "abc" from C++, it shows up as "abc".
    if (obj instanceof Text) {
        int numBytes = WritableUtils.readVInt(this.inStream);
        buffer = new byte[numBytes];
        this.inStream.readFully(buffer);
        ((Text) obj).set(buffer);

    } else if (obj instanceof BytesWritable) {
        int numBytes = WritableUtils.readVInt(this.inStream);
        buffer = new byte[numBytes];
        this.inStream.readFully(buffer);
        ((BytesWritable) obj).set(buffer, 0, numBytes);

    } else if (obj instanceof IntWritable) {
        ((IntWritable) obj).set(WritableUtils.readVInt(this.inStream));

    } else if (obj instanceof LongWritable) {
        ((LongWritable) obj).set(WritableUtils.readVLong(this.inStream));

    } else {
        try {
            LOG.debug("reading type: " + obj.getClass().getName());

            // try reading object
            obj.readFields(this.inStream);

        } catch (IOException e) {
            throw new IOException("Hama Pipes is not able to read " + obj.getClass().getName(), e);
        }
    }
}

From source file:org.apache.jena.grande.mapreduce.io.NodeWritable.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    length = WritableUtils.readVInt(in);
    bytes = new byte[length];
    in.readFully(bytes, 0, length);/*from w ww  .  ja  va 2  s . com*/
    node = NodeEncoder.asNode(new String(bytes));
}

From source file:org.apache.jena.grande.mapreduce.io.QuadWritable.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    length = WritableUtils.readVInt(in);
    bytes = new byte[length];
    in.readFully(bytes, 0, length);// w w  w .  java2  s .  c  o  m
    Tokenizer tokenizer = TokenizerFactory.makeTokenizerASCII(new String(bytes));
    LangNQuads parser = new LangNQuads(tokenizer, RiotLib.profile(Lang.NQUADS, null), null);
    quad = parser.next();
}

From source file:org.apache.jena.grande.mapreduce.io.TripleWritable.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    length = WritableUtils.readVInt(in);
    bytes = new byte[length];
    in.readFully(bytes, 0, length);/*from   w w  w  . j ava  2s . co  m*/
    Tokenizer tokenizer = TokenizerFactory.makeTokenizerASCII(new String(bytes));
    LangNTriples parser = new LangNTriples(tokenizer, RiotLib.profile(Lang.NTRIPLES, null), null);
    triple = parser.next();
}

From source file:org.apache.jena.hadoop.rdf.types.AbstractNodeTupleWritable.java

License:Apache License

@Override
public void readFields(DataInput input) throws IOException {
    // Determine how many nodes
    int size = WritableUtils.readVInt(input);
    Node[] ns = new Node[size];

    NodeWritable nw = new NodeWritable();
    for (int i = 0; i < ns.length; i++) {
        nw.readFields(input);//from www.j  a v  a  2  s .  c  o  m
        ns[i] = nw.get();
    }

    // Load the tuple
    this.tuple = this.createTuple(ns);
}

From source file:org.apache.jena.hadoop.rdf.types.CharacteristicSetWritable.java

License:Apache License

@Override
public void readFields(DataInput input) throws IOException {
    // Read size, then count, then characteristics
    int size = WritableUtils.readVInt(input);
    this.count.readFields(input);
    this.characteristics.clear();
    for (int i = 0; i < size; i++) {
        CharacteristicWritable cw = CharacteristicWritable.read(input);
        this.characteristics.put(cw.getNode(), cw);
    }//ww  w  . ja  v  a  2 s . c o m
}

From source file:org.apache.mrql.Bag.java

License:Apache License

/** the input serializer for Bag */
final public static Bag read(DataInput in) throws IOException {
    int n = WritableUtils.readVInt(in);
    Bag bag = new Bag(n);
    for (int i = 0; i < n; i++)
        bag.add(MRContainer.read(in));//from w w w  .  j  av  a 2s .c  o m
    return bag;
}