Example usage for java.io DataInputStream readInt

List of usage examples for java.io DataInputStream readInt

Introduction

In this page you can find the example usage for java.io DataInputStream readInt.

Prototype

public final int readInt() throws IOException 

Source Link

Document

See the general contract of the readInt method of DataInput.

Usage

From source file:org.jboss.dashboard.ui.panel.export.ExportDriver.java

/**
 * <p>Determine whether a file is a ZIP File by reading the the magic bytes for the ZIP format, that must be <code>0x504b0304</code></p>
 *//*from   w w  w  .  j  av  a2s . co m*/
protected boolean isZipFile(File file) throws IOException {
    if (file.isDirectory()) {
        return false;
    }
    if (!file.canRead()) {
        throw new IOException("Cannot read file " + file.getAbsolutePath());
    }
    if (file.length() < 4) {
        return false;
    }
    DataInputStream in = new DataInputStream(new BufferedInputStream(new FileInputStream(file)));
    int test = in.readInt();
    in.close();
    return test == 0x504b0304;
}

From source file:edu.cornell.med.icb.goby.compression.HybridChunkCodec2.java

@Override
public Message decode(final byte[] bytes) throws IOException {
    final DataInputStream completeChunkData = new DataInputStream(new ByteArrayInputStream(bytes));
    final int compressedSize = completeChunkData.readInt();
    final int storedChecksum = completeChunkData.readInt();

    final byte[] compressedBytes = new byte[compressedSize];
    final int read = completeChunkData.read(compressedBytes, 0, compressedSize);
    assert read == compressedSize : "read size must match recorded size.";
    crc32.reset();//from   ww w  . j  a v  a 2s  .co m

    crc32.update(compressedBytes);
    final int computedChecksum = (int) crc32.getValue();
    if (computedChecksum != storedChecksum) {
        throw new InvalidChecksumException();
    }
    final int bytesLeft = bytes.length - 4 - compressedSize - 4;
    final byte[] leftOver = new byte[bytesLeft];
    // 8 is the number of bytes to encode the length of the compressed chunk, plus
    // the number of bytes to encode the checksum.
    System.arraycopy(bytes, 8 + compressedSize, leftOver, 0, bytesLeft);
    final Message reducedProtoBuff = bzip2Codec.decode(leftOver);
    if (reducedProtoBuff == null) {
        return null;
    }
    return handler.decompressCollection(reducedProtoBuff, compressedBytes);
}

From source file:org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageViewer.java

/**
 * Check an fsimage datainputstream's version number.
 *
 * The datainput stream is returned at the same point as it was passed in;
 * this method has no effect on the datainputstream's read pointer.
 *
 * @param in Datainputstream of fsimage/*from   w w  w  . j  a va2s .c o  m*/
 * @return Filesystem layout version of fsimage represented by stream
 * @throws IOException If problem reading from in
 */
private int findImageVersion(DataInputStream in) throws IOException {
    in.mark(42); // arbitrary amount, resetting immediately

    int version = in.readInt();
    in.reset();

    return version;
}

From source file:edu.cornell.med.icb.goby.compression.HybridChunkCodec1.java

@Override
public Message decode(final byte[] bytes) throws IOException {
    final DataInputStream completeChunkData = new DataInputStream(new FastByteArrayInputStream(bytes));
    final int compressedSize = completeChunkData.readInt();
    final int storedChecksum = completeChunkData.readInt();

    final byte[] compressedBytes = new byte[compressedSize];
    final int read = completeChunkData.read(compressedBytes, 0, compressedSize);
    assert read == compressedSize : "read size must match recorded size.";
    crc32.reset();/* w  ww  .ja va  2 s. c  om*/

    crc32.update(compressedBytes);
    final int computedChecksum = (int) crc32.getValue();
    if (computedChecksum != storedChecksum) {
        throw new InvalidChecksumException();
    }
    final int bytesLeft = bytes.length - 4 - compressedSize - 4;
    final byte[] leftOver = new byte[bytesLeft];
    // 8 is the number of bytes to encode the length of the compressed chunk, plus
    // the number of bytes to encode the checksum.
    System.arraycopy(bytes, 8 + compressedSize, leftOver, 0, bytesLeft);
    final Message reducedProtoBuff = gzipCodec.decode(leftOver);
    if (reducedProtoBuff == null) {
        return null;
    }
    return handler.decompressCollection(reducedProtoBuff, compressedBytes);
}

From source file:org.apache.hadoop.hbase.io.hfile.TestHFileEncryption.java

private long readAndVerifyBlock(long pos, HFileBlock.FSReaderV2 hbr, int size) throws IOException {
    HFileBlock b = hbr.readBlockData(pos, -1, -1, false);
    assertEquals(0, HFile.getChecksumFailuresCount());
    b.sanityCheck();//from ww  w . j ava  2s.c  o  m
    LOG.info("Read a block at " + pos + " with" + " onDiskSizeWithHeader=" + b.getOnDiskSizeWithHeader()
            + " uncompressedSizeWithoutHeader=" + b.getOnDiskSizeWithoutHeader()
            + " uncompressedSizeWithoutHeader=" + b.getUncompressedSizeWithoutHeader());
    DataInputStream dis = b.getByteStream();
    for (int i = 0; i < size; i++) {
        int read = dis.readInt();
        if (read != i) {
            fail("Block data corrupt at element " + i);
        }
    }
    return b.getOnDiskSizeWithHeader();
}

From source file:es.logongas.util.seguridad.CodigoVerificacionSeguro.java

public int getKey() {
    try {//from w  ww. ja v a  2  s  .com
        Base32 base32 = new Base32();
        byte datos[] = base32.decode(valor);
        DataInputStream dataInputStream = new DataInputStream(new ByteArrayInputStream(datos));
        int key = dataInputStream.readInt();

        return key;
    } catch (IOException ex) {
        throw new RuntimeException(ex);
    }
}

From source file:org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager.java

/**
 * Private helper method to load delegation keys from fsimage.
 * @param in//from  w  ww  .ja v  a2 s . c om
 * @throws IOException
 */
private synchronized void loadAllKeys(DataInputStream in) throws IOException {
    int numberOfKeys = in.readInt();
    for (int i = 0; i < numberOfKeys; i++) {
        DelegationKey value = new DelegationKey();
        value.readFields(in);
        addKey(value);
    }
}

From source file:org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager.java

/**
 * Private helper methods to load Delegation tokens from fsimage
 *///from   ww  w.ja  va  2 s .  c o  m
private synchronized void loadCurrentTokens(DataInputStream in) throws IOException {
    int numberOfTokens = in.readInt();
    for (int i = 0; i < numberOfTokens; i++) {
        DelegationTokenIdentifier id = new DelegationTokenIdentifier();
        id.readFields(in);
        long expiryTime = in.readLong();
        addPersistedDelegationToken(id, expiryTime);
    }
}

From source file:RetrieveAllMIDlet.java

public void changeFromByteArray(byte[] data) {
    try {/* www . j a  v  a 2  s  . c o  m*/
        ByteArrayInputStream bais = new ByteArrayInputStream(data);
        DataInputStream dis = new DataInputStream(bais);

        name = dis.readUTF();
        chineseScore = dis.readInt();
        englishScore = dis.readInt();
        mathScore = dis.readInt();

        bais.close();
        dis.close();
    } catch (Exception e) {
    }
}

From source file:com.serenegiant.media.TLMediaEncoder.java

static TLMediaFrameHeader readHeader(final DataInputStream in, final TLMediaFrameHeader header)
        throws IOException {
    header.size = 0;//w  ww  .j  a v a  2s.  c o m
    header.sequence = in.readInt();
    header.frameNumber = in.readInt(); // frame number
    header.presentationTimeUs = in.readLong();
    header.size = in.readInt();
    header.flags = in.readInt();
    in.skipBytes(40); // long x 5
    return header;
}