Example usage for java.io DataInput readFully

List of usage examples for java.io DataInput readFully

Introduction

In this page you can find the example usage for java.io DataInput readFully.

Prototype

void readFully(byte b[]) throws IOException;

Source Link

Document

Reads some bytes from an input stream and stores them into the buffer array b .

Usage

From source file:org.apache.geode.pdx.internal.PdxInstanceImpl.java

private static PdxInputStream createDis(DataInput in, int len) {
    PdxInputStream dis;//from ww  w  .  j  a v  a 2s . c o  m
    if (in instanceof PdxInputStream) {
        dis = new PdxInputStream((ByteBufferInputStream) in, len);
        try {
            int bytesSkipped = in.skipBytes(len);
            int bytesRemaining = len - bytesSkipped;
            while (bytesRemaining > 0) {
                in.readByte();
                bytesRemaining--;
            }
        } catch (IOException ex) {
            throw new PdxSerializationException("Could not deserialize PDX", ex);
        }
    } else {
        byte[] bytes = new byte[len];
        try {
            in.readFully(bytes);
        } catch (IOException ex) {
            throw new PdxSerializationException("Could not deserialize PDX", ex);
        }
        dis = new PdxInputStream(bytes);
    }
    return dis;
}

From source file:org.apache.hadoop.hbase.filter.RowListFilter.java

@Override
public void readFields(DataInput din) throws IOException {
    int pos = din.readInt();
    int sz = din.readInt();
    this.bytesSet = new ArrayList<byte[]>(sz);
    for (int i = 0; i < sz; ++i) {
        short bsz = din.readShort();
        byte[] b = new byte[bsz];
        din.readFully(b);
        bytesSet.add(b);//ww w . j a  va2s  . c  o  m
    }
    log.debug("Size of bytesSet is: " + bytesSet.size());
    this.bytesSetIterator = bytesSet.listIterator(pos);
    if (bytesSetIterator.hasNext()) {
        this.rowComparator = new BinaryComparator(bytesSetIterator.next());
    } else {
        this.hasMoreRows = false;
    }
}

From source file:org.apache.hadoop.hbase.io.HbaseObjectWritable.java

/**
 * Read a {@link Writable}, {@link String}, primitive type, or an array of
 * the preceding.//from w  ww.java2  s .co m
 * @param in
 * @param objectWritable
 * @param conf
 * @return the object
 * @throws IOException
 */
@SuppressWarnings("unchecked")
public static Object readObject(DataInput in, HbaseObjectWritable objectWritable, Configuration conf)
        throws IOException {
    Class<?> declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in));
    Object instance;
    if (declaredClass.isPrimitive()) { // primitive types
        if (declaredClass == Boolean.TYPE) { // boolean
            instance = Boolean.valueOf(in.readBoolean());
        } else if (declaredClass == Character.TYPE) { // char
            instance = Character.valueOf(in.readChar());
        } else if (declaredClass == Byte.TYPE) { // byte
            instance = Byte.valueOf(in.readByte());
        } else if (declaredClass == Short.TYPE) { // short
            instance = Short.valueOf(in.readShort());
        } else if (declaredClass == Integer.TYPE) { // int
            instance = Integer.valueOf(in.readInt());
        } else if (declaredClass == Long.TYPE) { // long
            instance = Long.valueOf(in.readLong());
        } else if (declaredClass == Float.TYPE) { // float
            instance = Float.valueOf(in.readFloat());
        } else if (declaredClass == Double.TYPE) { // double
            instance = Double.valueOf(in.readDouble());
        } else if (declaredClass == Void.TYPE) { // void
            instance = null;
        } else {
            throw new IllegalArgumentException("Not a primitive: " + declaredClass);
        }
    } else if (declaredClass.isArray()) { // array
        if (declaredClass.equals(byte[].class)) {
            instance = Bytes.readByteArray(in);
        } else if (declaredClass.equals(Result[].class)) {
            instance = Result.readArray(in);
        } else {
            int length = in.readInt();
            instance = Array.newInstance(declaredClass.getComponentType(), length);
            for (int i = 0; i < length; i++) {
                Array.set(instance, i, readObject(in, conf));
            }
        }
    } else if (declaredClass.equals(Array.class)) { //an array not declared in CLASS_TO_CODE
        Class<?> componentType = readClass(conf, in);
        int length = in.readInt();
        instance = Array.newInstance(componentType, length);
        for (int i = 0; i < length; i++) {
            Array.set(instance, i, readObject(in, conf));
        }
    } else if (List.class.isAssignableFrom(declaredClass)) { // List
        int length = in.readInt();
        instance = new ArrayList(length);
        for (int i = 0; i < length; i++) {
            ((ArrayList) instance).add(readObject(in, conf));
        }
    } else if (declaredClass == String.class) { // String
        instance = Text.readString(in);
    } else if (declaredClass.isEnum()) { // enum
        instance = Enum.valueOf((Class<? extends Enum>) declaredClass, Text.readString(in));
    } else if (declaredClass == Message.class) {
        String className = Text.readString(in);
        try {
            declaredClass = getClassByName(conf, className);
            instance = tryInstantiateProtobuf(declaredClass, in);
        } catch (ClassNotFoundException e) {
            LOG.error("Can't find class " + className, e);
            throw new IOException("Can't find class " + className, e);
        }
    } else { // Writable or Serializable
        Class instanceClass = null;
        int b = (byte) WritableUtils.readVInt(in);
        if (b == NOT_ENCODED) {
            String className = Text.readString(in);
            try {
                instanceClass = getClassByName(conf, className);
            } catch (ClassNotFoundException e) {
                LOG.error("Can't find class " + className, e);
                throw new IOException("Can't find class " + className, e);
            }
        } else {
            instanceClass = CODE_TO_CLASS.get(b);
        }
        if (Writable.class.isAssignableFrom(instanceClass)) {
            Writable writable = WritableFactories.newInstance(instanceClass, conf);
            try {
                writable.readFields(in);
            } catch (Exception e) {
                LOG.error("Error in readFields", e);
                throw new IOException("Error in readFields", e);
            }
            instance = writable;
            if (instanceClass == NullInstance.class) { // null
                declaredClass = ((NullInstance) instance).declaredClass;
                instance = null;
            }
        } else {
            int length = in.readInt();
            byte[] objectBytes = new byte[length];
            in.readFully(objectBytes);
            ByteArrayInputStream bis = null;
            ObjectInputStream ois = null;
            try {
                bis = new ByteArrayInputStream(objectBytes);
                ois = new ObjectInputStream(bis);
                instance = ois.readObject();
            } catch (ClassNotFoundException e) {
                LOG.error("Class not found when attempting to deserialize object", e);
                throw new IOException("Class not found when attempting to " + "deserialize object", e);
            } finally {
                if (bis != null)
                    bis.close();
                if (ois != null)
                    ois.close();
            }
        }
    }
    if (objectWritable != null) { // store values
        objectWritable.declaredClass = declaredClass;
        objectWritable.instance = instance;
    }
    return instance;
}

From source file:org.apache.hadoop.hbase.io.HbaseObjectWritable.java

/**
 * Try to instantiate a protocol buffer of the given message class
 * from the given input stream./* w w  w  .  ja  v  a 2  s. co m*/
 *
 * @param protoClass the class of the generated protocol buffer
 * @param dataIn the input stream to read from
 * @return the instantiated Message instance
 * @throws IOException if an IO problem occurs
 */
private static Message tryInstantiateProtobuf(Class<?> protoClass, DataInput dataIn) throws IOException {

    try {
        if (dataIn instanceof InputStream) {
            // We can use the built-in parseDelimitedFrom and not have to re-copy
            // the data
            Method parseMethod = getStaticProtobufMethod(protoClass, "parseDelimitedFrom", InputStream.class);
            return (Message) parseMethod.invoke(null, (InputStream) dataIn);
        } else {
            // Have to read it into a buffer first, since protobuf doesn't deal
            // with the DataInput interface directly.

            // Read the size delimiter that writeDelimitedTo writes
            int size = ProtoUtil.readRawVarint32(dataIn);
            if (size < 0) {
                throw new IOException("Invalid size: " + size);
            }

            byte[] data = new byte[size];
            dataIn.readFully(data);
            Method parseMethod = getStaticProtobufMethod(protoClass, "parseFrom", byte[].class);
            return (Message) parseMethod.invoke(null, data);
        }
    } catch (InvocationTargetException e) {

        if (e.getCause() instanceof IOException) {
            throw (IOException) e.getCause();
        } else {
            throw new IOException(e.getCause());
        }
    } catch (IllegalAccessException iae) {
        throw new AssertionError("Could not access parse method in " + protoClass);
    }
}

From source file:org.apache.hadoop.hbase.KeyValue.java

/**
 * Create a KeyValue reading <code>length</code> from <code>in</code>
 * @param length/* w  w  w .  j  a  v a  2s  . c  o m*/
 * @param in
 * @return Created KeyValue OR if we find a length of zero, we will return null which
 * can be useful marking a stream as done.
 * @throws IOException
 */
public static KeyValue create(int length, final DataInput in) throws IOException {

    if (length <= 0) {
        if (length == 0)
            return null;
        throw new IOException("Failed read " + length + " bytes, stream corrupt?");
    }

    // This is how the old Writables.readFrom used to deserialize.  Didn't even vint.
    byte[] bytes = new byte[length];
    in.readFully(bytes);
    return new KeyValue(bytes, 0, length);
}

From source file:org.apache.hadoop.hbase.KeyValueUtil.java

/**
 * Create a KeyValue reading <code>length</code> from <code>in</code>
 *
 * @param length//from   w  w w  . ja  v a  2 s.  c  om
 * @param in
 * @return Created KeyValue OR if we find a length of zero, we will return
 *         null which can be useful marking a stream as done.
 * @throws IOException
 */
public static KeyValue create(int length, final DataInput in) throws IOException {

    if (length <= 0) {
        if (length == 0)
            return null;
        throw new IOException("Failed read " + length + " bytes, stream corrupt?");
    }

    // This is how the old Writables.readFrom used to deserialize. Didn't even
    // vint.
    byte[] bytes = new byte[length];
    in.readFully(bytes);
    return new KeyValue(bytes, 0, length);
}

From source file:org.apache.hadoop.hbase.mapreduce.TableSplit.java

/**
 * Reads the values of each field.// w  ww. j a v a 2s  .  c o  m
 *
 * @param in  The input to read from.
 * @throws IOException When reading the input fails.
 */
@Override
public void readFields(DataInput in) throws IOException {
    Version version = Version.UNVERSIONED;
    // TableSplit was not versioned in the beginning.
    // In order to introduce it now, we make use of the fact
    // that tableName was written with Bytes.writeByteArray,
    // which encodes the array length as a vint which is >= 0.
    // Hence if the vint is >= 0 we have an old version and the vint
    // encodes the length of tableName.
    // If < 0 we just read the version and the next vint is the length.
    // @see Bytes#readByteArray(DataInput)
    int len = WritableUtils.readVInt(in);
    if (len < 0) {
        // what we just read was the version
        version = Version.fromCode(len);
        len = WritableUtils.readVInt(in);
    }
    byte[] tableNameBytes = new byte[len];
    in.readFully(tableNameBytes);
    tableName = TableName.valueOf(tableNameBytes);
    startRow = Bytes.readByteArray(in);
    endRow = Bytes.readByteArray(in);
    regionLocation = Bytes.toString(Bytes.readByteArray(in));
    if (version.atLeast(Version.INITIAL)) {
        scan = Bytes.toString(Bytes.readByteArray(in));
    }
    length = WritableUtils.readVLong(in);
}

From source file:org.apache.hadoop.hbase.regionserver.wal.HLogKey.java

@Override
public void readFields(DataInput in) throws IOException {
    Version version = Version.UNVERSIONED;
    // HLogKey was not versioned in the beginning.
    // In order to introduce it now, we make use of the fact
    // that encodedRegionName was written with Bytes.writeByteArray,
    // which encodes the array length as a vint which is >= 0.
    // Hence if the vint is >= 0 we have an old version and the vint
    // encodes the length of encodedRegionName.
    // If < 0 we just read the version and the next vint is the length.
    // @see Bytes#readByteArray(DataInput)
    this.scopes = null; // writable HLogKey does not contain scopes
    int len = WritableUtils.readVInt(in);
    byte[] tablenameBytes = null;
    if (len < 0) {
        // what we just read was the version
        version = Version.fromCode(len);
        // We only compress V2 of HLogkey.
        // If compression is on, the length is handled by the dictionary
        if (compressionContext == null || !version.atLeast(Version.COMPRESSED)) {
            len = WritableUtils.readVInt(in);
        }/*from  ww w .ja va2 s  .  c om*/
    }
    if (compressionContext == null || !version.atLeast(Version.COMPRESSED)) {
        this.encodedRegionName = new byte[len];
        in.readFully(this.encodedRegionName);
        tablenameBytes = Bytes.readByteArray(in);
    } else {
        this.encodedRegionName = Compressor.readCompressed(in, compressionContext.regionDict);
        tablenameBytes = Compressor.readCompressed(in, compressionContext.tableDict);
    }

    this.logSeqNum = in.readLong();
    this.writeTime = in.readLong();

    this.clusterIds.clear();
    if (version.atLeast(Version.INITIAL)) {
        if (in.readBoolean()) {
            // read the older log
            // Definitely is the originating cluster
            clusterIds.add(new UUID(in.readLong(), in.readLong()));
        }
    } else {
        try {
            // dummy read (former byte cluster id)
            in.readByte();
        } catch (EOFException e) {
            // Means it's a very old key, just continue
        }
    }
    try {
        this.tablename = TableName.valueOf(tablenameBytes);
    } catch (IllegalArgumentException iae) {
        if (Bytes.toString(tablenameBytes).equals(TableName.OLD_META_STR)) {
            // It is a pre-namespace meta table edit, continue with new format.
            LOG.info("Got an old .META. edit, continuing with new format ");
            this.tablename = TableName.META_TABLE_NAME;
            this.encodedRegionName = HRegionInfo.FIRST_META_REGIONINFO.getEncodedNameAsBytes();
        } else if (Bytes.toString(tablenameBytes).equals(TableName.OLD_ROOT_STR)) {
            this.tablename = TableName.OLD_ROOT_TABLE_NAME;
            throw iae;
        } else
            throw iae;
    }
    // Do not need to read the clusters information as we are using protobufs from 0.95
}

From source file:org.apache.hadoop.hbase.security.access.HbaseObjectWritableFor96Migration.java

/**
 * Read a {@link Writable}, {@link String}, primitive type, or an array of
 * the preceding.//from  w w w.  j a v  a 2s.co  m
 * @param in
 * @param objectWritable
 * @param conf
 * @return the object
 * @throws IOException
 */
@SuppressWarnings("unchecked")
static Object readObject(DataInput in, HbaseObjectWritableFor96Migration objectWritable, Configuration conf)
        throws IOException {
    Class<?> declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in));
    Object instance;
    if (declaredClass.isPrimitive()) { // primitive types
        if (declaredClass == Boolean.TYPE) { // boolean
            instance = Boolean.valueOf(in.readBoolean());
        } else if (declaredClass == Character.TYPE) { // char
            instance = Character.valueOf(in.readChar());
        } else if (declaredClass == Byte.TYPE) { // byte
            instance = Byte.valueOf(in.readByte());
        } else if (declaredClass == Short.TYPE) { // short
            instance = Short.valueOf(in.readShort());
        } else if (declaredClass == Integer.TYPE) { // int
            instance = Integer.valueOf(in.readInt());
        } else if (declaredClass == Long.TYPE) { // long
            instance = Long.valueOf(in.readLong());
        } else if (declaredClass == Float.TYPE) { // float
            instance = Float.valueOf(in.readFloat());
        } else if (declaredClass == Double.TYPE) { // double
            instance = Double.valueOf(in.readDouble());
        } else if (declaredClass == Void.TYPE) { // void
            instance = null;
        } else {
            throw new IllegalArgumentException("Not a primitive: " + declaredClass);
        }
    } else if (declaredClass.isArray()) { // array
        if (declaredClass.equals(byte[].class)) {
            instance = Bytes.readByteArray(in);
        } else {
            int length = in.readInt();
            instance = Array.newInstance(declaredClass.getComponentType(), length);
            for (int i = 0; i < length; i++) {
                Array.set(instance, i, readObject(in, conf));
            }
        }
    } else if (declaredClass.equals(Array.class)) { //an array not declared in CLASS_TO_CODE
        Class<?> componentType = readClass(conf, in);
        int length = in.readInt();
        instance = Array.newInstance(componentType, length);
        for (int i = 0; i < length; i++) {
            Array.set(instance, i, readObject(in, conf));
        }
    } else if (List.class.isAssignableFrom(declaredClass)) { // List
        int length = in.readInt();
        instance = new ArrayList(length);
        for (int i = 0; i < length; i++) {
            ((ArrayList) instance).add(readObject(in, conf));
        }
    } else if (declaredClass == String.class) { // String
        instance = Text.readString(in);
    } else if (declaredClass.isEnum()) { // enum
        instance = Enum.valueOf((Class<? extends Enum>) declaredClass, Text.readString(in));
    } else if (declaredClass == Message.class) {
        String className = Text.readString(in);
        try {
            declaredClass = getClassByName(conf, className);
            instance = tryInstantiateProtobuf(declaredClass, in);
        } catch (ClassNotFoundException e) {
            LOG.error("Can't find class " + className, e);
            throw new IOException("Can't find class " + className, e);
        }
    } else if (Scan.class.isAssignableFrom(declaredClass)) {
        int length = in.readInt();
        byte[] scanBytes = new byte[length];
        in.readFully(scanBytes);
        ClientProtos.Scan.Builder scanProto = ClientProtos.Scan.newBuilder();
        instance = ProtobufUtil.toScan(scanProto.mergeFrom(scanBytes).build());
    } else { // Writable or Serializable
        Class instanceClass = null;
        int b = (byte) WritableUtils.readVInt(in);
        if (b == NOT_ENCODED) {
            String className = Text.readString(in);
            try {
                instanceClass = getClassByName(conf, className);
            } catch (ClassNotFoundException e) {
                LOG.error("Can't find class " + className, e);
                throw new IOException("Can't find class " + className, e);
            }
        } else {
            instanceClass = CODE_TO_CLASS.get(b);
        }
        if (Writable.class.isAssignableFrom(instanceClass)) {
            Writable writable = WritableFactories.newInstance(instanceClass, conf);
            try {
                writable.readFields(in);
            } catch (Exception e) {
                LOG.error("Error in readFields", e);
                throw new IOException("Error in readFields", e);
            }
            instance = writable;
            if (instanceClass == NullInstance.class) { // null
                declaredClass = ((NullInstance) instance).declaredClass;
                instance = null;
            }
        } else {
            int length = in.readInt();
            byte[] objectBytes = new byte[length];
            in.readFully(objectBytes);
            ByteArrayInputStream bis = null;
            ObjectInputStream ois = null;
            try {
                bis = new ByteArrayInputStream(objectBytes);
                ois = new ObjectInputStream(bis);
                instance = ois.readObject();
            } catch (ClassNotFoundException e) {
                LOG.error("Class not found when attempting to deserialize object", e);
                throw new IOException("Class not found when attempting to " + "deserialize object", e);
            } finally {
                if (bis != null)
                    bis.close();
                if (ois != null)
                    ois.close();
            }
        }
    }
    if (objectWritable != null) { // store values
        objectWritable.declaredClass = declaredClass;
        objectWritable.instance = instance;
    }
    return instance;
}

From source file:org.apache.hadoop.hbase.security.access.HbaseObjectWritableFor96Migration.java

/**
 * Try to instantiate a protocol buffer of the given message class
 * from the given input stream.//from   w  ww . j  av a  2s . c o m
 *
 * @param protoClass the class of the generated protocol buffer
 * @param dataIn the input stream to read from
 * @return the instantiated Message instance
 * @throws IOException if an IO problem occurs
 */
static Message tryInstantiateProtobuf(Class<?> protoClass, DataInput dataIn) throws IOException {

    try {
        if (dataIn instanceof InputStream) {
            // We can use the built-in parseDelimitedFrom and not have to re-copy
            // the data
            Method parseMethod = getStaticProtobufMethod(protoClass, "parseDelimitedFrom", InputStream.class);
            return (Message) parseMethod.invoke(null, (InputStream) dataIn);
        } else {
            // Have to read it into a buffer first, since protobuf doesn't deal
            // with the DataInput interface directly.

            // Read the size delimiter that writeDelimitedTo writes
            int size = ProtoUtil.readRawVarint32(dataIn);
            if (size < 0) {
                throw new IOException("Invalid size: " + size);
            }

            byte[] data = new byte[size];
            dataIn.readFully(data);
            Method parseMethod = getStaticProtobufMethod(protoClass, "parseFrom", byte[].class);
            return (Message) parseMethod.invoke(null, data);
        }
    } catch (InvocationTargetException e) {

        if (e.getCause() instanceof IOException) {
            throw (IOException) e.getCause();
        } else {
            throw new IOException(e.getCause());
        }
    } catch (IllegalAccessException iae) {
        throw new AssertionError("Could not access parse method in " + protoClass);
    }
}