Example usage for java.io DataInput readFully

List of usage examples for java.io DataInput readFully

Introduction

In this page you can find the example usage for java.io DataInput readFully.

Prototype

void readFully(byte b[]) throws IOException;

Source Link

Document

Reads some bytes from an input stream and stores them into the buffer array b .

Usage

From source file:edu.umn.cs.spatialHadoop.visualization.FrequencyMap.java

@Override
public void readFields(DataInput in) throws IOException {
    super.readFields(in);
    int length = in.readInt();
    byte[] serializedData = new byte[length];
    in.readFully(serializedData);
    ByteArrayInputStream bais = new ByteArrayInputStream(serializedData);
    GZIPInputStream gzis = new GZIPInputStream(bais);

    byte[] buffer = new byte[8];
    gzis.read(buffer);/*from   w w  w .j  a  v  a2  s.  c o  m*/
    ByteBuffer bbuffer = ByteBuffer.wrap(buffer);
    int width = bbuffer.getInt();
    int height = bbuffer.getInt();
    // Reallocate memory only if needed
    if (width != this.getWidth() || height != this.getHeight())
        frequencies = new float[width][height];
    buffer = new byte[getHeight() * 4];
    for (int x = 0; x < getWidth(); x++) {
        int size = 0;
        while (size < buffer.length) {
            size += gzis.read(buffer, size, buffer.length - size);
        }
        bbuffer = ByteBuffer.wrap(buffer);
        for (int y = 0; y < getHeight(); y++) {
            frequencies[x][y] = bbuffer.getFloat();
        }
    }
}

From source file:com.zjy.mongo.input.MongoInputSplit.java

public void readFields(final DataInput in) throws IOException {
    BSONCallback cb = new BasicBSONCallback();
    BSONObject spec;/* ww  w. j  a v a  2s . c o  m*/
    byte[] l = new byte[4];
    in.readFully(l);
    int dataLen = org.bson.io.Bits.readInt(l);
    byte[] data = new byte[dataLen + 4];
    System.arraycopy(l, 0, data, 0, 4);
    in.readFully(data, 4, dataLen - 4);
    _bsonDecoder.decode(data, cb);
    spec = (BSONObject) cb.get();
    setInputURI(new MongoClientURI((String) spec.get("inputURI")));

    if (spec.get("authURI") != null) {
        setAuthURI(new MongoClientURI((String) spec.get("authURI")));
    } else {
        setAuthURI((MongoClientURI) null);
    }

    setKeyField((String) spec.get("keyField"));
    BSONObject temp = (BSONObject) spec.get("fields");
    setFields(temp != null ? new BasicDBObject(temp.toMap()) : null);

    temp = (BSONObject) spec.get("query");
    setQuery(temp != null ? new BasicDBObject(temp.toMap()) : null);

    temp = (BSONObject) spec.get("sort");
    setSort(temp != null ? new BasicDBObject(temp.toMap()) : null);

    temp = (BSONObject) spec.get("min");
    setMin(temp != null ? new BasicDBObject(temp.toMap()) : null);

    temp = (BSONObject) spec.get("max");
    setMax(temp != null ? new BasicDBObject(temp.toMap()) : null);

    setNoTimeout((Boolean) spec.get("notimeout"));
}

From source file:com.mongodb.hadoop.io.BSONWritable.java

/**
 * {@inheritDoc}//from  w  w  w .j  a  v a 2  s  . c  om
 *
 * @see Writable#readFields(DataInput)
 */
public void readFields(DataInput in) throws IOException {
    BSONDecoder dec = new BasicBSONDecoder();
    BSONCallback cb = new BasicBSONCallback();
    // Read the BSON length from the start of the record
    byte[] l = new byte[4];
    try {
        in.readFully(l);
        int dataLen = Bits.readInt(l);
        if (log.isDebugEnabled())
            log.debug("*** Expected DataLen: " + dataLen);
        byte[] data = new byte[dataLen + 4];
        System.arraycopy(l, 0, data, 0, 4);
        in.readFully(data, 4, dataLen - 4);
        dec.decode(data, cb);
        _doc = (BSONObject) cb.get();
        if (log.isTraceEnabled())
            log.trace("Decoded a BSON Object: " + _doc);
    } catch (Exception e) {
        /* If we can't read another length it's not an error, just return quietly. */
        // TODO - Figure out how to gracefully mark this as an empty
        log.info("No Length Header available." + e);
        _doc = new BasicDBObject();
    }

}

From source file:edu.umn.cs.spatialHadoop.core.ZCurvePartitioner.java

@Override
public void readFields(DataInput in) throws IOException {
    mbr.readFields(in);/*from ww  w  .j av  a 2s .c o  m*/
    int partitionCount = in.readInt();
    zSplits = new long[partitionCount];
    int bufferLength = 8 * partitionCount;
    byte[] buffer = new byte[bufferLength];
    in.readFully(buffer);
    ByteBuffer bbuffer = ByteBuffer.wrap(buffer);
    for (int i = 0; i < partitionCount; i++) {
        zSplits[i] = bbuffer.getLong();
    }
    if (bbuffer.hasRemaining())
        throw new RuntimeException("Error reading STR partitioner");
}

From source file:edu.umn.cs.spatialHadoop.indexing.BTRPartitioner.java

@Override
public void readFields(DataInput in) throws IOException {
    mbr.readFields(in);// w ww. j  a v a 2 s  .  c  o m
    columns = in.readInt();
    rows = in.readInt();
    xSplits = new double[columns];
    ySplits = new double[columns * rows];

    int bufferLength = (xSplits.length + ySplits.length) * 8;
    byte[] buffer = new byte[bufferLength];
    in.readFully(buffer);
    ByteBuffer bbuffer = ByteBuffer.wrap(buffer);
    for (int i = 0; i < xSplits.length; i++)
        xSplits[i] = bbuffer.getDouble();
    for (int i = 0; i < ySplits.length; i++)
        ySplits[i] = bbuffer.getDouble();
    if (bbuffer.hasRemaining())
        throw new RuntimeException("Error reading STR partitioner");
}

From source file:com.zjy.mongo.io.BSONWritable.java

/**
 * {@inheritDoc}// ww w.  j a v  a2 s  .c o  m
 *
 * @see Writable#readFields(DataInput)
 */
public void readFields(final DataInput in) throws IOException {
    BSONDecoder dec = new BasicBSONDecoder();
    BSONCallback cb = new BasicBSONCallback();
    // Read the BSON length from the start of the record
    byte[] l = new byte[4];
    try {
        in.readFully(l);
        int dataLen = Bits.readInt(l);
        if (LOG.isDebugEnabled()) {
            LOG.debug("*** Expected DataLen: " + dataLen);
        }
        byte[] data = new byte[dataLen + 4];
        System.arraycopy(l, 0, data, 0, 4);
        in.readFully(data, 4, dataLen - 4);
        dec.decode(data, cb);
        doc = (BSONObject) cb.get();
        if (LOG.isTraceEnabled()) {
            LOG.trace("Decoded a BSON Object: " + doc);
        }
    } catch (Exception e) {
        /* If we can't read another length it's not an error, just return quietly. */
        // TODO - Figure out how to gracefully mark this as an empty
        LOG.info("No Length Header available." + e);
        doc = new BasicDBObject();
    }

}

From source file:com.mongodb.hadoop.input.MongoInputSplit.java

public void readFields(DataInput in) throws IOException {
    BSONDecoder dec = getBSONDecoder();//w w w.j  a v  a 2 s .co  m
    BSONCallback cb = new BasicBSONCallback();
    BSONObject spec;
    // Read the BSON length from the start of the record
    byte[] l = new byte[4];
    try {
        in.readFully(l);
        int dataLen = org.bson.io.Bits.readInt(l);
        if (log.isDebugEnabled())
            log.debug("*** Expected DataLen: " + dataLen);
        byte[] data = new byte[dataLen + 4];
        System.arraycopy(l, 0, data, 0, 4);
        in.readFully(data, 4, dataLen - 4);
        dec.decode(data, cb);
        spec = (BSONObject) cb.get();
        if (log.isTraceEnabled())
            log.trace("Decoded a BSON Object: " + spec);
    } catch (Exception e) {
        /* If we can't read another length it's not an error, just return quietly. */
        // TODO - Figure out how to gracefully mark this as an empty
        log.info("No Length Header available." + e);
        spec = new BasicDBObject();
    }

    _mongoURI = new MongoURI((String) spec.get("uri"));
    _keyField = (String) spec.get("key");
    _querySpec = new BasicDBObject(((BSONObject) spec.get("query")).toMap());
    _fieldSpec = new BasicDBObject(((BSONObject) spec.get("field")).toMap());
    _sortSpec = new BasicDBObject(((BSONObject) spec.get("sort")).toMap());
    _specialMin = spec.get("specialMin");
    _specialMax = spec.get("specialMax");
    _limit = (Integer) spec.get("limit");
    _skip = (Integer) spec.get("skip");
    _notimeout = (Boolean) spec.get("notimeout");
    getCursor();
    log.info("Deserialized MongoInputSplit ... { length = " + getLength() + ", locations = "
            + Arrays.toString(getLocations()) + ", keyField = " + _keyField + ", query = " + _querySpec
            + ", fields = " + _fieldSpec + ", sort = " + _sortSpec + ", limit = " + _limit + ", skip = " + _skip
            + ", noTimeout = " + _notimeout + ", specialMin = " + _specialMin + ", specialMax = " + _specialMax
            + "}");
}

From source file:com.nearinfinity.blur.mapreduce.BlurTask.java

@Override
public void readFields(DataInput input) throws IOException {
    _maxRecordCount = input.readLong();/*from   w  ww .j av a  2  s.co m*/
    _ramBufferSizeMB = input.readInt();
    _optimize = input.readBoolean();
    _indexingType = INDEXING_TYPE.valueOf(readString(input));
    byte[] data = new byte[input.readInt()];
    input.readFully(data);
    ByteArrayInputStream is = new ByteArrayInputStream(data);
    TIOStreamTransport trans = new TIOStreamTransport(is);
    TBinaryProtocol protocol = new TBinaryProtocol(trans);
    _tableDescriptor = new TableDescriptor();
    try {
        _tableDescriptor.read(protocol);
    } catch (TException e) {
        throw new IOException(e);
    }
}

From source file:com.buaa.cfs.security.token.Token.java

@Override
public void readFields(DataInput in) throws IOException {
    int len = WritableUtils.readVInt(in);
    if (identifier == null || identifier.length != len) {
        identifier = new byte[len];
    }//from  w  w w.j  a  v  a2 s  .co  m
    in.readFully(identifier);
    len = WritableUtils.readVInt(in);
    if (password == null || password.length != len) {
        password = new byte[len];
    }
    in.readFully(password);
    kind.readFields(in);
    service.readFields(in);
}

From source file:edu.umn.cs.spatialHadoop.nasa.HDFRasterLayer.java

@Override
public void readFields(DataInput in) throws IOException {
    super.readFields(in);
    this.timestamp = in.readLong();
    int length = in.readInt();
    byte[] serializedData = new byte[length];
    in.readFully(serializedData);
    ByteArrayInputStream bais = new ByteArrayInputStream(serializedData);
    GZIPInputStream gzis = new GZIPInputStream(bais);

    byte[] buffer = new byte[8];
    gzis.read(buffer);//from   ww  w. j a  v  a2s. c  o  m
    ByteBuffer bbuffer = ByteBuffer.wrap(buffer);
    int width = bbuffer.getInt();
    int height = bbuffer.getInt();
    // Reallocate memory only if needed
    if (width != this.getWidth() || height != this.getHeight()) {
        sum = new long[width][height];
        count = new long[width][height];
    }
    buffer = new byte[getHeight() * 2 * 8];
    for (int x = 0; x < getWidth(); x++) {
        int size = 0;
        while (size < buffer.length) {
            size += gzis.read(buffer, size, buffer.length - size);
        }
        bbuffer = ByteBuffer.wrap(buffer);
        for (int y = 0; y < getHeight(); y++) {
            sum[x][y] = bbuffer.getLong();
            count[x][y] = bbuffer.getLong();
        }
    }
}