Example usage for java.nio ByteBuffer getInt

List of usage examples for java.nio ByteBuffer getInt

Introduction

In this page you can find the example usage for java.nio ByteBuffer getInt.

Prototype

public abstract int getInt();

Source Link

Document

Returns the int at the current position and increases the position by 4.

Usage

From source file:org.apache.carbondata.core.util.CarbonUtil.java

public static int[] getUnCompressColumnIndex(int totalLength, ByteBuffer buffer, int offset) {
    buffer.position(offset);/* w  ww  .j a  va2 s. co m*/
    int indexDataLength = buffer.getInt();
    int indexMapLength = totalLength - indexDataLength - CarbonCommonConstants.INT_SIZE_IN_BYTE;
    int[] indexData = getIntArray(buffer, buffer.position(), indexDataLength);
    int[] indexMap = getIntArray(buffer, buffer.position(), indexMapLength);
    return UnBlockIndexer.uncompressIndex(indexData, indexMap);
}

From source file:org.obm.push.protocol.data.TZDecoder.java

public TimeZone decode(String b64) {
    // Doc : [MS-ASDTYPE] 2.7 TimeZone
    // Doc about types :
    // http://msdn.microsoft.com/fr-fr/library/bb469811.aspx
    // 1 LONG = 4 bytes
    // 1 WCHAR = 2 bytes
    // 1 SYSTEMTIME = 8 SHORT = 8 X 2 bytes
    // TOTAL TIMEZONE STRUCT must be 172 bytes

    byte tzstruct[] = Base64.decodeBase64(b64);

    ByteBuffer bfBias = ByteBuffer.wrap(tzstruct, 0, 4);
    // NOT YET USED
    ////  ww  w .ja  v  a  2 s  . c om
    // ByteBuffer bfStandardName = ByteBuffer.wrap(tzstruct, 4, 64);
    // ByteBuffer bfStandardDate = ByteBuffer.wrap(tzstruct, 68, 16);
    // ByteBuffer bfStandardBias = ByteBuffer.wrap(tzstruct, 84, 4);
    // ByteBuffer bfDaylightName = ByteBuffer.wrap(tzstruct, 88, 64);
    // ByteBuffer bfDaylightDate = ByteBuffer.wrap(tzstruct, 152, 16);
    // ByteBuffer bfDaylightBias = ByteBuffer.wrap(tzstruct, 168, 4);

    bfBias.order(ByteOrder.LITTLE_ENDIAN);
    int bias = bfBias.getInt(); // Java integer is 4-bytes-long

    // NOT YET USED
    //
    // bfStandardBias.order(ByteOrder.LITTLE_ENDIAN);
    // int standardBias = bfStandardBias.getInt();
    //      
    // bfDaylightBias.order(ByteOrder.LITTLE_ENDIAN);
    // int daylightBias = bfDaylightBias.getInt();

    TimeZone timezone = TimeZone.getDefault();
    timezone.setRawOffset(bias * 60 * 1000);

    String timezones[] = TimeZone.getAvailableIDs(bias * 60 * 1000);
    if (timezones.length > 0) {
        timezone = TimeZone.getTimeZone(timezones[0]);
    }

    // USEFUL DEBUG LINES
    //
    // StringBuffer sb = new StringBuffer();
    // for (int i = 0; i < 172; i+=1) {
    // sb.append(Byte.valueOf(tzstruct[i]).intValue());
    // }
    //      
    // logger.info("b64: " + b64);
    // logger.info("tzstruct: "+ sb.toString());
    // logger.info("bias: " + bias);
    // logger.info("standardbias: " + standardBias);
    // logger.info("standardname: " +
    // bfStandardName.asCharBuffer().toString());
    // logger.info("daylightBias: " + daylightBias);

    return timezone;
}

From source file:alignment.BinaryToCSV.java

/**
 * //w  w  w  .  ja  v  a2s. c  om
 *
 * @param 
 * @return 
 */
public int readSlaveBlocks() {
    byte[] buffer = new byte[Bp];
    byte[] tempBytes = new byte[4];
    int total = 0;
    int nRead = 0;

    int sign = 0;
    double magnitude = 0;
    counter = 0;
    int samplesBetween = 0;
    boolean firstTS = true;

    try {
        while ((nRead = inputStream.read(buffer)) != -1) {
            for (int i = 0; i < nRead; i++) {
                //byteToBits(buffer[i], i+HEADER_SIZE);
            }

            sign = buffer[0];
            tempBytes = new byte[4];
            //byteToBits(buffer[1], 1);
            tempBytes[3] = buffer[1];
            tempBytes[2] = buffer[2];
            tempBytes[1] = buffer[3];
            tempBytes[0] = buffer[4];

            // unsigned int 0xFFFFFFFF is equal to -1
            ByteBuffer bb = ByteBuffer.wrap(tempBytes);
            magnitude = (double) bb.getInt();

            tempBytes = new byte[4];
            tempBytes[3] = buffer[5];
            tempBytes[2] = buffer[6];
            bb = ByteBuffer.wrap(tempBytes);
            double tsI = bb.getInt();

            if (firstTS) {
                initialTS16 = tsI;
                firstTS = false;
                System.out.println("Intial Timestamp 16b: " + initialTS16);
            }
            addTS(tsI); // end of the list

            if (magnitude != -1.0) {
                //System.out.println("Timestamp: " + tsI);
                // offset = (1 - 2*offsetSign) * offsetMagnitude
                //System.out.println("Sign: " + sign + " Magnitude: " + magnitude);
                double offset = (1 - 2 * sign) * magnitude;
                offsets.put(prev, offset);

                intervalSizes.add((double) samplesBetween);
                samplesBetween = 0;
            }

            boolean first = true;
            for (int i = 5; i < nRead; i += (Bs + 2)) {
                samplesBetween++;
                if (first) {
                    first = false;
                    continue;
                }
                tempBytes = new byte[4];
                tempBytes[3] = buffer[i];
                tempBytes[2] = buffer[i + 1];
                bb = ByteBuffer.wrap(tempBytes);
                tsI = bb.getInt();
                addTS(tsI);

                //System.out.println("Timestamp: " + tsI);
            }
            total += nRead;
        }
    } catch (IOException e) {
        e.printStackTrace();
    }

    intervalSizes.add((double) samplesBetween);

    if (master == 0) {
        printOffsetMap();
        computeLinearRegression();
        alignTimestamps();
        calibrateTimestamps();
    } else {
        calibrateMasterTimestamps();
    }

    return total;
}

From source file:org.apache.carbondata.core.util.CarbonUtil.java

public static int[] getUnCompressColumnIndex(int totalLength, byte[] columnIndexData,
        NumberCompressor numberCompressor, int offset) {
    ByteBuffer buffer = ByteBuffer.wrap(columnIndexData, offset, totalLength);
    int indexDataLength = buffer.getInt();
    byte[] indexData = new byte[indexDataLength];
    byte[] indexMap = new byte[totalLength - indexDataLength - CarbonCommonConstants.INT_SIZE_IN_BYTE];
    buffer.get(indexData);/*  w  ww .j a va 2 s  . c  o  m*/
    buffer.get(indexMap);
    return UnBlockIndexer.uncompressIndex(numberCompressor.unCompress(indexData, 0, indexData.length),
            numberCompressor.unCompress(indexMap, 0, indexMap.length));
}

From source file:org.zuinnote.hadoop.bitcoin.format.BitcoinBlockReader.java

/**
* This function is used to read from a raw Bitcoin block some identifier. Note: Does not change ByteBuffer position
*
* @param rawByteBuffer ByteBuffer as read by readRawBlock
* @return byte array containing hashMerkleRoot and prevHashBlock
*
*///from  w  w  w  .j  a  va 2s  .com
public byte[] getKeyFromRawBlock(ByteBuffer rawByteBuffer) {
    rawByteBuffer.mark();
    byte[] magicNo = new byte[4];
    byte[] hashMerkleRoot = new byte[32];
    byte[] hashPrevBlock = new byte[32];
    // magic no (skip)
    rawByteBuffer.get(magicNo, 0, 4);
    // blocksize (skip)
    int currentBlockSize = rawByteBuffer.getInt();
    // version (skip)
    int currentVersion = rawByteBuffer.getInt();
    // hashPrevBlock
    rawByteBuffer.get(hashPrevBlock, 0, 32);
    // hashMerkleRoot
    rawByteBuffer.get(hashMerkleRoot, 0, 32);
    byte[] result = new byte[hashMerkleRoot.length + hashPrevBlock.length];
    for (int i = 0; i < hashMerkleRoot.length; i++) {
        result[i] = hashMerkleRoot[i];
    }
    for (int j = 0; j < hashPrevBlock.length; j++) {
        result[j + hashMerkleRoot.length] = hashPrevBlock[j];
    }
    rawByteBuffer.reset();
    return result;
}

From source file:org.apache.hadoop.hbase.io.hfile.TestHFileBlockIndex.java

/**
 * Testing block index through the HFile writer/reader APIs. Allows to test
 * setting index block size through configuration, intermediate-level index
 * blocks, and caching index blocks on write.
 *
 * @throws IOException/*ww w  . jav  a  2 s .c  om*/
 */
//@Test
public void testHFileWriterAndReader() throws IOException {
    Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), "hfile_for_block_index");
    CacheConfig cacheConf = new CacheConfig(conf);
    BlockCache blockCache = cacheConf.getBlockCache();

    for (int testI = 0; testI < INDEX_CHUNK_SIZES.length; ++testI) {
        int indexBlockSize = INDEX_CHUNK_SIZES[testI];
        int expectedNumLevels = EXPECTED_NUM_LEVELS[testI];
        LOG.info("Index block size: " + indexBlockSize + ", compression: " + compr);
        // Evict all blocks that were cached-on-write by the previous invocation.
        blockCache.evictBlocksByHfileName(hfilePath.getName());

        conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, indexBlockSize);
        Set<String> keyStrSet = new HashSet<String>();
        byte[][] keys = new byte[NUM_KV][];
        byte[][] values = new byte[NUM_KV][];

        // Write the HFile
        {
            HFileContext meta = new HFileContextBuilder().withBlockSize(SMALL_BLOCK_SIZE).withCompression(compr)
                    .build();
            HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf).withPath(fs, hfilePath)
                    .withFileContext(meta).create();
            Random rand = new Random(19231737);

            for (int i = 0; i < NUM_KV; ++i) {
                byte[] row = TestHFileWriterV2.randomOrderedKey(rand, i);

                // Key will be interpreted by KeyValue.KEY_COMPARATOR
                byte[] k = KeyValueUtil.createFirstOnRow(row, 0, row.length, row, 0, 0, row, 0, 0).getKey();

                byte[] v = TestHFileWriterV2.randomValue(rand);
                writer.append(k, v, HConstants.EMPTY_BYTE_ARRAY);
                keys[i] = k;
                values[i] = v;
                keyStrSet.add(Bytes.toStringBinary(k));

                if (i > 0) {
                    assertTrue(KeyValue.COMPARATOR.compareFlatKey(keys[i - 1], keys[i]) < 0);
                }
            }

            writer.close();
        }

        // Read the HFile
        HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, conf);
        assertEquals(expectedNumLevels, reader.getTrailer().getNumDataIndexLevels());

        assertTrue(Bytes.equals(keys[0], reader.getFirstKey()));
        assertTrue(Bytes.equals(keys[NUM_KV - 1], reader.getLastKey()));
        LOG.info("Last key: " + Bytes.toStringBinary(keys[NUM_KV - 1]));

        for (boolean pread : new boolean[] { false, true }) {
            HFileScanner scanner = reader.getScanner(true, pread);
            for (int i = 0; i < NUM_KV; ++i) {
                checkSeekTo(keys, scanner, i);
                checkKeyValue("i=" + i, keys[i], values[i], scanner.getKey(), scanner.getValue());
            }
            assertTrue(scanner.seekTo());
            for (int i = NUM_KV - 1; i >= 0; --i) {
                checkSeekTo(keys, scanner, i);
                checkKeyValue("i=" + i, keys[i], values[i], scanner.getKey(), scanner.getValue());
            }
        }

        // Manually compute the mid-key and validate it.
        HFileReaderV2 reader2 = (HFileReaderV2) reader;
        HFileBlock.FSReader fsReader = reader2.getUncachedBlockReader();

        HFileBlock.BlockIterator iter = fsReader.blockRange(0, reader.getTrailer().getLoadOnOpenDataOffset());
        HFileBlock block;
        List<byte[]> blockKeys = new ArrayList<byte[]>();
        while ((block = iter.nextBlock()) != null) {
            if (block.getBlockType() != BlockType.LEAF_INDEX)
                return;
            ByteBuffer b = block.getBufferReadOnly();
            int n = b.getInt();
            // One int for the number of items, and n + 1 for the secondary index.
            int entriesOffset = Bytes.SIZEOF_INT * (n + 2);

            // Get all the keys from the leaf index block. S
            for (int i = 0; i < n; ++i) {
                int keyRelOffset = b.getInt(Bytes.SIZEOF_INT * (i + 1));
                int nextKeyRelOffset = b.getInt(Bytes.SIZEOF_INT * (i + 2));
                int keyLen = nextKeyRelOffset - keyRelOffset;
                int keyOffset = b.arrayOffset() + entriesOffset + keyRelOffset
                        + HFileBlockIndex.SECONDARY_INDEX_ENTRY_OVERHEAD;
                byte[] blockKey = Arrays.copyOfRange(b.array(), keyOffset, keyOffset + keyLen);
                String blockKeyStr = Bytes.toString(blockKey);
                blockKeys.add(blockKey);

                // If the first key of the block is not among the keys written, we
                // are not parsing the non-root index block format correctly.
                assertTrue("Invalid block key from leaf-level block: " + blockKeyStr,
                        keyStrSet.contains(blockKeyStr));
            }
        }

        // Validate the mid-key.
        assertEquals(Bytes.toStringBinary(blockKeys.get((blockKeys.size() - 1) / 2)),
                Bytes.toStringBinary(reader.midkey()));

        assertEquals(UNCOMPRESSED_INDEX_SIZES[testI], reader.getTrailer().getUncompressedDataIndexSize());

        reader.close();
        reader2.close();
    }
}

From source file:com.github.ambry.utils.UtilsTest.java

@Test
public void testSerializeNullableString() {
    String randomString = getRandomString(10);
    ByteBuffer outputBuffer = ByteBuffer.allocate(4 + randomString.getBytes().length);
    Utils.serializeNullableString(outputBuffer, randomString);
    outputBuffer.flip();//from ww w. j a v a2 s .  c  o  m
    int length = outputBuffer.getInt();
    assertEquals("Input and output string lengths don't match ", randomString.getBytes().length, length);
    byte[] output = new byte[length];
    outputBuffer.get(output);
    assertFalse("Output buffer shouldn't have any remaining, but has " + outputBuffer.remaining() + " bytes",
            outputBuffer.hasRemaining());
    String outputString = new String(output);
    assertEquals("Input and output strings don't match", randomString, outputString);

    randomString = null;
    outputBuffer = ByteBuffer.allocate(4);
    Utils.serializeNullableString(outputBuffer, randomString);
    outputBuffer.flip();
    length = outputBuffer.getInt();
    assertEquals("Input and output string lengths don't match", 0, length);
    output = new byte[length];
    outputBuffer.get(output);
    assertFalse("Output buffer shouldn't have any remaining, but has " + outputBuffer.remaining() + " bytes",
            outputBuffer.hasRemaining());
    outputString = new String(output);
    assertEquals("Output string \"" + outputString + "\" expected to be empty", outputString, "");
}

From source file:voldemort.store.cachestore.impl.ChannelStore.java

private boolean checkSignature(FileChannel channel) throws IOException {
    ByteBuffer intBytes = ByteBuffer.allocate(OFFSET);
    if (channel.size() == 0) {
        intBytes.putInt(MAGIC);//from  w  ww  .  j  a v  a 2 s. c om
        intBytes.flip();
        channel.write(intBytes);
    } else {
        channel.read(intBytes);
        intBytes.rewind();
        int s = intBytes.getInt();
        if (s != MAGIC)
            throw new StoreException(
                    "Header mismatch expect " + Integer.toHexString(MAGIC) + " read " + Integer.toHexString(s));
    }
    return true;
}

From source file:hivemall.recommend.SlimUDTF.java

private void replayTrain(@Nonnull final ByteBuffer buf) {
    final int itemI = buf.getInt();
    final int knnSize = buf.getInt();

    final Int2ObjectMap<Int2FloatMap> knnItems = new Int2ObjectOpenHashMap<>(1024);
    final IntSet pairItems = new IntOpenHashSet();
    for (int i = 0; i < knnSize; i++) {
        int user = buf.getInt();
        int ruSize = buf.getInt();
        Int2FloatMap ru = new Int2FloatOpenHashMap(ruSize);
        ru.defaultReturnValue(0.f);/* w  ww.  jav  a2s  .  co  m*/

        for (int j = 0; j < ruSize; j++) {
            int itemK = buf.getInt();
            pairItems.add(itemK);
            float ruk = buf.getFloat();
            ru.put(itemK, ruk);
        }
        knnItems.put(user, ru);
    }

    for (int itemJ : pairItems) {
        train(itemI, knnItems, itemJ);
    }
}

From source file:org.apache.htrace.impl.PackedBufferManager.java

private void readAndValidateResponseFrame(SelectionKey sockKey, ByteBuffer buf, long expectedSeq,
        int expectedMethodId) throws IOException {
    buf.clear();//from www. java 2 s. c  o m
    buf.limit(PackedBuffer.HRPC_RESP_FRAME_LENGTH);
    doRecv(sockKey, buf);
    buf.flip();
    buf.order(ByteOrder.LITTLE_ENDIAN);
    long seq = buf.getLong();
    if (seq != expectedSeq) {
        throw new IOException("Expected sequence number " + expectedSeq + ", but got sequence number " + seq);
    }
    int methodId = buf.getInt();
    if (expectedMethodId != methodId) {
        throw new IOException("Expected method id " + expectedMethodId + ", but got " + methodId);
    }
    int errorLength = buf.getInt();
    buf.getInt();
    if ((errorLength < 0) || (errorLength > PackedBuffer.MAX_HRPC_ERROR_LENGTH)) {
        throw new IOException("Got server error with invalid length " + errorLength);
    } else if (errorLength > 0) {
        buf.clear();
        buf.limit(errorLength);
        doRecv(sockKey, buf);
        buf.flip();
        CharBuffer charBuf = StandardCharsets.UTF_8.decode(buf);
        String serverErrorStr = charBuf.toString();
        throw new IOException("Got server error " + serverErrorStr);
    }
}