Example usage for java.nio ByteBuffer getInt

List of usage examples for java.nio ByteBuffer getInt

Introduction

In this page you can find the example usage for java.nio ByteBuffer getInt.

Prototype

public abstract int getInt(int index);

Source Link

Document

Returns the int at the specified index.

Usage

From source file:org.apache.hadoop.hive.serde2.compression.SnappyCompDe.java

/**
 * Decompress a set of columns from a ByteBuffer and update the position of the buffer.
 *
 * @param input A ByteBuffer with `position` indicating the starting point of the compressed chunk.
 * @param chunkSize The length of the compressed chunk to be decompressed from the input buffer.
 *
 * @return The set of columns./*from  w w w  .j ava2  s. c  o m*/
 */
@Override
public ColumnBuffer[] decompress(ByteBuffer input, int chunkSize) {
    int startPos = input.position();
    try {
        // Read the footer.
        int footerSize = input.getInt(startPos + chunkSize - 4);
        Iterator<Integer> compressedSize = Arrays
                .asList(ArrayUtils.toObject(Snappy.uncompressIntArray(input.array(),
                        input.arrayOffset() + startPos + chunkSize - Integer.SIZE / Byte.SIZE - footerSize,
                        footerSize)))
                .iterator();

        // Read the header.
        int[] dataType = readIntegers(compressedSize.next(), input);
        int numOfCols = dataType.length;

        // Read the columns.
        ColumnBuffer[] outputCols = new ColumnBuffer[numOfCols];
        for (int colNum = 0; colNum < numOfCols; colNum++) {
            byte[] nulls = readBytes(compressedSize.next(), input);

            switch (TTypeId.findByValue(dataType[colNum])) {
            case BOOLEAN_TYPE: {
                int numRows = input.getInt();
                byte[] vals = readBytes(compressedSize.next(), input);
                BitSet bsBools = BitSet.valueOf(vals);

                boolean[] bools = new boolean[numRows];
                for (int rowNum = 0; rowNum < numRows; rowNum++) {
                    bools[rowNum] = bsBools.get(rowNum);
                }

                TBoolColumn column = new TBoolColumn(Arrays.asList(ArrayUtils.toObject(bools)),
                        ByteBuffer.wrap(nulls));
                outputCols[colNum] = new ColumnBuffer(TColumn.boolVal(column));
                break;
            }
            case TINYINT_TYPE: {
                byte[] vals = readBytes(compressedSize.next(), input);
                TByteColumn column = new TByteColumn(Arrays.asList(ArrayUtils.toObject(vals)),
                        ByteBuffer.wrap(nulls));
                outputCols[colNum] = new ColumnBuffer(TColumn.byteVal(column));
                break;
            }
            case SMALLINT_TYPE: {
                short[] vals = readShorts(compressedSize.next(), input);
                TI16Column column = new TI16Column(Arrays.asList(ArrayUtils.toObject(vals)),
                        ByteBuffer.wrap(nulls));
                outputCols[colNum] = new ColumnBuffer(TColumn.i16Val(column));
                break;
            }
            case INT_TYPE: {
                int[] vals = readIntegers(compressedSize.next(), input);
                TI32Column column = new TI32Column(Arrays.asList(ArrayUtils.toObject(vals)),
                        ByteBuffer.wrap(nulls));
                outputCols[colNum] = new ColumnBuffer(TColumn.i32Val(column));
                break;
            }
            case BIGINT_TYPE: {
                long[] vals = readLongs(compressedSize.next(), input);
                TI64Column column = new TI64Column(Arrays.asList(ArrayUtils.toObject(vals)),
                        ByteBuffer.wrap(nulls));
                outputCols[colNum] = new ColumnBuffer(TColumn.i64Val(column));
                break;
            }
            case DOUBLE_TYPE: {
                double[] vals = readDoubles(compressedSize.next(), input);
                TDoubleColumn column = new TDoubleColumn(Arrays.asList(ArrayUtils.toObject(vals)),
                        ByteBuffer.wrap(nulls));
                outputCols[colNum] = new ColumnBuffer(TColumn.doubleVal(column));
                break;
            }
            case BINARY_TYPE: {
                int[] rowSize = readIntegers(compressedSize.next(), input);

                ByteBuffer flattenedData = ByteBuffer.wrap(readBytes(compressedSize.next(), input));
                ByteBuffer[] vals = new ByteBuffer[rowSize.length];

                for (int rowNum = 0; rowNum < rowSize.length; rowNum++) {
                    vals[rowNum] = ByteBuffer.wrap(flattenedData.array(), flattenedData.position(),
                            rowSize[rowNum]);
                    flattenedData.position(flattenedData.position() + rowSize[rowNum]);
                }

                TBinaryColumn column = new TBinaryColumn(Arrays.asList(vals), ByteBuffer.wrap(nulls));
                outputCols[colNum] = new ColumnBuffer(TColumn.binaryVal(column));
                break;
            }
            case STRING_TYPE: {
                int[] rowSize = readIntegers(compressedSize.next(), input);

                ByteBuffer flattenedData = ByteBuffer.wrap(readBytes(compressedSize.next(), input));

                String[] vals = new String[rowSize.length];

                for (int rowNum = 0; rowNum < rowSize.length; rowNum++) {
                    vals[rowNum] = new String(flattenedData.array(), flattenedData.position(), rowSize[rowNum],
                            StandardCharsets.UTF_8);
                    flattenedData.position(flattenedData.position() + rowSize[rowNum]);
                }

                TStringColumn column = new TStringColumn(Arrays.asList(vals), ByteBuffer.wrap(nulls));
                outputCols[colNum] = new ColumnBuffer(TColumn.stringVal(column));
                break;
            }
            default:
                throw new IllegalStateException(
                        "Unrecognized column type: " + TTypeId.findByValue(dataType[colNum]));
            }
        }
        input.position(startPos + chunkSize);
        return outputCols;
    } catch (IOException e) {
        e.printStackTrace();
        return (ColumnBuffer[]) null;
    }
}

From source file:ar.com.qbe.siniestros.model.utils.MimeMagic.MagicMatcher.java

/**
 * convert a byte array to a long//from ww w. j ava 2 s  .com
 *
 * @param data buffer of byte data
 *
 * @return byte arrays (high and low bytes) converted to a long value
 */
private long byteArrayToLong(ByteBuffer data) {
    return (long) data.getInt(0);
}

From source file:com.openteach.diamond.network.waverider.network.Packet.java

/**
 * ??Packet, ??/*from ww  w .ja  v  a  2  s  .  c om*/
 * @param inputBuffer
 * @return
 * @throws IOException, InterruptedException
 */
public static Packet parse(BlockingQueue<ByteBuffer> inputBuffer, NetWorkEndPoint endPoint,
        SocketChannel channel) throws IOException, InterruptedException {
    // Buffer for packet header
    byte[] tmpBuf = new byte[NetWorkConstants.DEFAULT_NETWORK_BUFFER_SIZE];
    ByteBuffer header = ByteBuffer.allocate(Packet.getHeaderSize());
    ByteBuffer currentBuffer = null;
    int rest = 0;
    boolean isRemove = false;

    // ?
    while (true) {
        while ((currentBuffer = inputBuffer.peek()) == null) {
            if (!endPoint.notifyRead(channel)) {
                throw new IOException("Socket closed by other thread");
            }
            // ?
            //endPoint.waitMoreData(5);
            // FIXME 2ms
            //Thread.sleep(1);
            Thread.yield();
        }
        isRemove = false;
        rest = header.capacity() - header.position();
        if (currentBuffer.remaining() >= rest) {
            if (currentBuffer.remaining() == rest) {
                isRemove = true;
            }
            currentBuffer.get(tmpBuf, 0, rest);
            header.put(tmpBuf, 0, rest);
            if (isRemove) {
                inputBuffer.remove();
            }
            break;
        } else {
            header.put(currentBuffer);
            inputBuffer.remove();
        }
    }

    header.flip();

    // , ???

    // ?
    Integer size = header.getInt(Packet.getLengthPosition());
    // For test
    /*if(size < 0 || size > 100000) {
       logger.info("Error");
    }*/
    //logger.debug(new StringBuilder("Try to allocate ").append(size).append(" bytes memory"));
    ByteBuffer buffer = ByteBuffer.allocate(size);
    buffer.put(header);
    header.clear();

    // ?
    while (true) {
        while ((currentBuffer = inputBuffer.peek()) == null) {
            endPoint.notifyRead(channel);
            Thread.sleep(1000);
        }
        isRemove = false;
        rest = buffer.capacity() - buffer.position();
        if (currentBuffer.remaining() >= rest) {
            if (currentBuffer.remaining() == rest) {
                isRemove = true;
            }
            currentBuffer.get(tmpBuf, 0, rest);
            buffer.put(tmpBuf, 0, rest);
            if (isRemove) {
                inputBuffer.remove();
            }
            break;
        } else {
            buffer.put(currentBuffer);
            inputBuffer.remove();
        }
    }
    //buffer.position(0);
    buffer.flip();
    Packet packet = Packet.unmarshall(buffer);
    //logger.info("Parse one packet from network");
    //packet.dump();
    return packet;
}

From source file:io.fabric8.maven.docker.access.log.LogRequestor.java

private boolean readStreamFrame(InputStream is) throws IOException, LogCallback.DoneException {
    // Read the header, which is composed of eight bytes. The first byte is an integer
    // indicating the stream type (0 = stdin, 1 = stdout, 2 = stderr), the next three are thrown
    // out, and the final four are the size of the remaining stream as an integer.
    ByteBuffer headerBuffer = ByteBuffer.allocate(8);
    headerBuffer.order(ByteOrder.BIG_ENDIAN);
    try {//from w  ww .  j  a v  a 2s.c o m
        this.readFully(is, headerBuffer.array());
    } catch (NoBytesReadException e) {
        // Not bytes read for stream. Return false to stop consuming stream.
        return false;
    } catch (EOFException e) {
        throw new IOException("Failed to read log header. Could not read all 8 bytes. " + e.getMessage(), e);
    }

    // Grab the stream type (stdout, stderr, stdin) from first byte and throw away other 3 bytes.
    int type = headerBuffer.get();

    // Skip three bytes, then read size from remaining four bytes.
    int size = headerBuffer.getInt(4);

    // Ignore empty messages and keep reading.
    if (size <= 0) {
        return true;
    }

    // Read the actual message
    ByteBuffer payload = ByteBuffer.allocate(size);
    try {
        ByteStreams.readFully(is, payload.array());
    } catch (EOFException e) {
        throw new IOException("Failed to read log message. Could not read all " + size + " bytes. "
                + e.getMessage() + " [ Header: " + Hex.encodeHexString(headerBuffer.array()) + "]", e);
    }

    String message = Charsets.UTF_8.newDecoder().decode(payload).toString();
    callLogCallback(type, message);
    return true;
}

From source file:org.apache.hadoop.hbase.io.hfile.HFileBlock.java

/**
 * Parse total ondisk size including header and checksum.
 * @param headerBuf Header ByteBuffer. Presumed exact size of header.
 * @return Size of the block with header included.
 *//*w ww  .  j  a va 2s  . com*/
private static int getOnDiskSizeWithHeader(final ByteBuffer headerBuf) {
    // Set hbase checksum to true always calling headerSize.
    return headerBuf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX) + headerSize(true);
}

From source file:com.koda.integ.hbase.blockcache.OffHeapBlockCacheOld.java

/**
 * Read external.//from ww w  .j a v a 2  s . c o m
 *
 * @param blockName the block name
 * @return the cacheable
 * @throws IOException Signals that an I/O exception has occurred.
 */
@SuppressWarnings("unused")
private Cacheable readExternal(String blockName) throws IOException {
    if (overflowExtEnabled == false)
        return null;
    // Check if we have  already this block in external storage cache
    try {
        StorageHandle handle = (StorageHandle) extStorageCache.get(blockName);
        if (handle == null)
            return null;
        ByteBuffer buffer = extStorageCache.getLocalBufferWithAddress().getBuffer();

        buffer.clear();

        StorageHandle newHandle = storage.getData(handle, buffer);
        int size = buffer.getInt(0);
        if (size == 0)
            return null;
        boolean inMemory = buffer.get(4) == (byte) 1;
        buffer.position(5);
        buffer.limit(size + 4);
        if (deserializer.get() == null)
            return null;
        CacheableDeserializer<Cacheable> deserializer = this.deserializer.get();
        Cacheable obj = deserializer.deserialize(buffer);
        if (inMemory) {
            permGenCache.put(blockName, obj);
        } else {
            tenGenCache.put(blockName, obj);
        }

        if (newHandle.equals(handle) == false) {
            extStorageCache.put(blockName, newHandle);
        }

        return obj;

    } catch (NativeMemoryException e) {
        throw new IOException(e);
    }

}

From source file:edu.mbl.jif.imaging.mmtiff.MultipageTiffReader.java

private long readHeader() throws IOException {
    ByteBuffer tiffHeader = ByteBuffer.allocate(8);
    fileChannel_.read(tiffHeader, 0);//from w ww.ja  v a 2s  .  co m
    char zeroOne = tiffHeader.getChar(0);
    if (zeroOne == 0x4949) {
        byteOrder_ = ByteOrder.LITTLE_ENDIAN;
    } else if (zeroOne == 0x4d4d) {
        byteOrder_ = ByteOrder.BIG_ENDIAN;
    } else {
        throw new IOException("Error reading Tiff header");
    }
    tiffHeader.order(byteOrder_);
    short twoThree = tiffHeader.getShort(2);
    if (twoThree != 42) {
        throw new IOException("Tiff identifier code incorrect");
    }
    return unsignInt(tiffHeader.getInt(4));
}

From source file:edu.mbl.jif.imaging.mmtiff.MultipageTiffReader.java

private IFDData readIFD(long byteOffset) throws IOException {
    ByteBuffer buff = readIntoBuffer(byteOffset, 2);
    int numEntries = buff.getChar(0);

    ByteBuffer entries = readIntoBuffer(byteOffset + 2, numEntries * 12 + 4).order(byteOrder_);
    IFDData data = new IFDData();
    for (int i = 0; i < numEntries; i++) {
        IFDEntry entry = readDirectoryEntry(i * 12, entries);
        if (entry.tag == MM_METADATA) {
            data.mdOffset = entry.value;
            data.mdLength = entry.count;
        } else if (entry.tag == STRIP_OFFSETS) {
            data.pixelOffset = entry.value;
        } else if (entry.tag == STRIP_BYTE_COUNTS) {
            data.bytesPerImage = entry.value;
        }/*  w w w .  j  a va2 s  .c  om*/
    }
    data.nextIFD = unsignInt(entries.getInt(numEntries * 12));
    data.nextIFDOffsetLocation = byteOffset + 2 + numEntries * 12;
    return data;
}

From source file:com.healthmarketscience.jackcess.IndexData.java

/**
 * Creates an IndexData appropriate for the given table, using information
 * from the given table definition buffer.
 *///from  w ww  .java2  s  .c o  m
public static IndexData create(Table table, ByteBuffer tableBuffer, int number, JetFormat format)
        throws IOException {
    int uniqueEntryCountOffset = (format.OFFSET_INDEX_DEF_BLOCK + (number * format.SIZE_INDEX_DEFINITION) + 4);
    int uniqueEntryCount = tableBuffer.getInt(uniqueEntryCountOffset);

    return (table.doUseBigIndex() ? new BigIndexData(table, number, uniqueEntryCount, uniqueEntryCountOffset)
            : new SimpleIndexData(table, number, uniqueEntryCount, uniqueEntryCountOffset));
}

From source file:com.koda.integ.hbase.storage.FileExtStorage.java

/**
 * Stores multiple objects in one transaction
 * Format of a buffer:/*from   w  w  w  . j a  v a  2s  . co m*/
 * 0..3 - total size of a batch
 * 4.. - batch of blocks
 *
 * @param buf the buf
 * @return the list
 */
public List<StorageHandle> storeDataBatch(ByteBuffer buf) {
    List<StorageHandle> handles = storeDataNoReleaseLock(buf);
    if (handles == null) {

        handles = new ArrayList<StorageHandle>();

        int size = buf.getInt(0);
        buf.position(4);

        while (buf.position() < size + 4) {
            buf.limit(buf.capacity());
            StorageHandle fsh = storeData(buf);
            handles.add(fsh);
        }
    }
    return handles;

}