Example usage for java.nio ByteBuffer hasRemaining

List of usage examples for java.nio ByteBuffer hasRemaining

Introduction

In this page you can find the example usage for java.nio ByteBuffer hasRemaining.

Prototype

public final boolean hasRemaining() 

Source Link

Document

Indicates if there are elements remaining in this buffer, that is if position < limit .

Usage

From source file:org.apache.qpid.amqp_1_0.client.Filereceiver.java

@Override
protected void run() {
    final String queue = getArgs()[0];
    final String directoryName = getArgs()[1];

    try {/*from w ww .ja  v a  2  s . c o  m*/
        Connection conn = newConnection();

        Session session = conn.createSession();

        final File directory = new File(directoryName);
        if (directory.isDirectory() && directory.canWrite()) {
            File tmpDirectory = new File(directoryName, ".tmp");
            if (!tmpDirectory.exists()) {
                tmpDirectory.mkdir();
            }

            String[] unsettledFiles = tmpDirectory.list();

            Map<Binary, Outcome> unsettled = new HashMap<Binary, Outcome>();
            final Map<Binary, String> unsettledFileNames = new HashMap<Binary, String>();

            Accepted accepted = new Accepted();

            for (String fileName : unsettledFiles) {
                File theFile = new File(tmpDirectory, fileName);
                if (theFile.isFile()) {
                    if (fileName.startsWith("~") && fileName.endsWith("~")) {
                        theFile.delete();
                    } else {
                        int splitPoint = fileName.indexOf(".");
                        String deliveryTagStr = fileName.substring(0, splitPoint);
                        String actualFileName = fileName.substring(splitPoint + 1);

                        byte[] bytes = new byte[deliveryTagStr.length() / 2];

                        for (int i = 0; i < bytes.length; i++) {
                            char c = deliveryTagStr.charAt(2 * i);
                            char d = deliveryTagStr.charAt(1 + (2 * i));

                            bytes[i] = (byte) (((c <= '9' ? c - '0' : c - 'W') << 4)
                                    | (d <= '9' ? d - '0' : d - 'W'));

                        }
                        Binary deliveryTag = new Binary(bytes);
                        unsettled.put(deliveryTag, accepted);
                        unsettledFileNames.put(deliveryTag, fileName);
                    }
                }

            }

            Receiver r = session.createReceiver(queue, AcknowledgeMode.EO, getLinkName(), isDurableLink(),
                    unsettled);

            Map<Binary, Outcome> remoteUnsettled = r.getRemoteUnsettled();

            for (Map.Entry<Binary, String> entry : unsettledFileNames.entrySet()) {
                if (remoteUnsettled == null || !remoteUnsettled.containsKey(entry.getKey())) {

                    File tmpFile = new File(tmpDirectory, entry.getValue());
                    final File dest = new File(directory,
                            entry.getValue().substring(entry.getValue().indexOf(".") + 1));
                    if (dest.exists()) {
                        System.err.println("Duplicate detected - filename " + dest.getName());
                    }

                    tmpFile.renameTo(dest);
                }
            }

            int credit = 10;

            r.setCredit(UnsignedInteger.valueOf(credit), true);

            int received = 0;
            Message m = null;
            do {
                m = isBlock() && received == 0 ? r.receive() : r.receive(10000);
                if (m != null) {
                    if (m.isResume() && unsettled.containsKey(m.getDeliveryTag())) {
                        final String tmpFileName = unsettledFileNames.get(m.getDeliveryTag());
                        final File unsettledFile = new File(tmpDirectory, tmpFileName);
                        r.acknowledge(m, new Receiver.SettledAction() {
                            public void onSettled(final Binary deliveryTag) {
                                int splitPoint = tmpFileName.indexOf(".");

                                String fileName = tmpFileName.substring(splitPoint + 1);

                                final File dest = new File(directory, fileName);
                                if (dest.exists()) {
                                    System.err.println("Duplicate detected - filename " + dest.getName());
                                }
                                unsettledFile.renameTo(dest);
                                unsettledFileNames.remove(deliveryTag);
                            }
                        });
                    } else {
                        received++;
                        List<Section> sections = m.getPayload();
                        Binary deliveryTag = m.getDeliveryTag();
                        StringBuilder tagNameBuilder = new StringBuilder();

                        ByteBuffer dtbuf = deliveryTag.asByteBuffer();
                        while (dtbuf.hasRemaining()) {
                            tagNameBuilder.append(String.format("%02x", dtbuf.get()));
                        }

                        ApplicationProperties properties = null;
                        List<Binary> data = new ArrayList<Binary>();
                        int totalSize = 0;
                        for (Section section : sections) {
                            if (section instanceof ApplicationProperties) {
                                properties = (ApplicationProperties) section;
                            } else if (section instanceof AmqpValue) {
                                AmqpValue value = (AmqpValue) section;
                                if (value.getValue() instanceof Binary) {
                                    Binary binary = (Binary) value.getValue();
                                    data.add(binary);
                                    totalSize += binary.getLength();

                                } else {
                                    // TODO exception
                                }
                            } else if (section instanceof Data) {
                                Data value = (Data) section;
                                Binary binary = value.getValue();
                                data.add(binary);
                                totalSize += binary.getLength();

                            }
                        }
                        if (properties != null) {
                            final String fileName = (String) properties.getValue().get("filename");
                            byte[] fileData = new byte[totalSize];
                            ByteBuffer buf = ByteBuffer.wrap(fileData);
                            int offset = 0;
                            for (Binary bin : data) {
                                buf.put(bin.asByteBuffer());
                            }
                            File outputFile = new File(tmpDirectory, "~" + fileName + "~");
                            if (outputFile.exists()) {
                                outputFile.delete();
                            }
                            FileOutputStream fos = new FileOutputStream(outputFile);
                            fos.write(fileData);
                            fos.flush();
                            fos.close();

                            final File unsettledFile = new File(tmpDirectory,
                                    tagNameBuilder.toString() + "." + fileName);
                            outputFile.renameTo(unsettledFile);
                            r.acknowledge(m, new Receiver.SettledAction() {
                                public void onSettled(final Binary deliveryTag) {
                                    final File dest = new File(directory, fileName);
                                    if (dest.exists()) {
                                        System.err.println("Duplicate detected - filename " + dest.getName());
                                    }
                                    unsettledFile.renameTo(dest);

                                }
                            });

                        }
                    }
                }
            } while (m != null);

            r.close();
        } else {
            System.err.println("No such directory: " + directoryName);
        }
        session.close();
        conn.close();
    } catch (Connection.ConnectionException e) {
        e.printStackTrace();
    } catch (FileNotFoundException e) {
        e.printStackTrace(); //TODO.
    } catch (IOException e) {
        e.printStackTrace(); //TODO.
    } catch (AmqpErrorException e) {
        e.printStackTrace(); //TODO.
    }

}

From source file:com.netflix.astyanax.thrift.AbstractThriftMutationBatchImpl.java

@Override
public <K, C> ColumnListMutation<C> withRow(ColumnFamily<K, C> columnFamily, K rowKey) {
    Preconditions.checkNotNull(columnFamily, "columnFamily cannot be null");
    Preconditions.checkNotNull(rowKey, "Row key cannot be null");

    // Upon adding the first row into the mutation get the latest time from the clock
    if (timestamp == UNSET_TIMESTAMP)
        timestamp = clock.getCurrentTime();

    ByteBuffer bbKey = columnFamily.getKeySerializer().toByteBuffer(rowKey);
    if (!bbKey.hasRemaining()) {
        throw new RuntimeException("Row key cannot be empty");
    }/*from   w ww.j a v a2 s.  c o  m*/

    KeyAndColumnFamily kacf = new KeyAndColumnFamily(columnFamily.getName(), bbKey);
    ColumnListMutation<C> clm = (ColumnListMutation<C>) rowLookup.get(kacf);
    if (clm == null) {
        Map<String, List<Mutation>> innerMutationMap = mutationMap.get(bbKey);
        if (innerMutationMap == null) {
            innerMutationMap = Maps.newHashMap();
            mutationMap.put(bbKey, innerMutationMap);
        }

        List<Mutation> innerMutationList = innerMutationMap.get(columnFamily.getName());
        if (innerMutationList == null) {
            innerMutationList = Lists.newArrayList();
            innerMutationMap.put(columnFamily.getName(), innerMutationList);
        }

        clm = new ThriftColumnFamilyMutationImpl<C>(timestamp, innerMutationList,
                columnFamily.getColumnSerializer());
        rowLookup.put(kacf, clm);
    }
    return clm;
}

From source file:io.github.dsheirer.record.wave.WaveWriter.java

/**
 * Opens the file and writes a wave header.
 *///from  www  .  ja v  a  2  s . c o  m
private void open() throws IOException {
    int version = 2;

    while (Files.exists(mFile)) {
        mFile = Paths.get(mFile.toFile().getAbsolutePath().replace(".wav", "_" + version + ".wav"));
        version++;
    }

    mFileChannel = (FileChannel.open(mFile, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW));

    ByteBuffer header = getWaveHeader(mAudioFormat);

    while (header.hasRemaining()) {
        mFileChannel.write(header);
    }
}

From source file:org.apache.htrace.impl.PackedBuffer.java

public String toHexString() {
    String prefix = "";
    StringBuilder bld = new StringBuilder();
    ByteBuffer b = bb.duplicate();
    b.flip();/*from  w w  w .  j a  va2s.  c  om*/
    while (b.hasRemaining()) {
        bld.append(String.format("%s%02x", prefix, b.get()));
        prefix = " ";
    }
    return bld.toString();
}

From source file:org.jumpmind.vaadin.ui.common.ReadOnlyTextAreaDialog.java

protected void updateTextField(String display, String value) {
    if (display.equals("Hex")) {
        textField.setValue(value);// www.ja v  a 2  s .  c o m
    } else if (display.equals("Text")) {
        try {
            byte[] bytes = Hex.decodeHex(value.toCharArray());
            textField.setValue(new String(bytes));
        } catch (Exception e) {
            log.warn("Failed to decode hex string for display", e);
        }
    } else if (display.equals("Decimal")) {
        try {
            byte[] bytes = Hex.decodeHex(value.toCharArray());
            String newValue = "";
            ByteBuffer buffer = ByteBuffer.wrap(bytes);
            while (buffer.hasRemaining()) {
                if (!newValue.equals("")) {
                    newValue += " ";
                }
                newValue += buffer.get() & 0xff;
            }
            textField.setValue(newValue);
        } catch (Exception e) {
            log.warn("Failed to decode hex string for display", e);
        }
    }
}

From source file:org.apache.hadoop.hbase.io.hfile.TestHFileWriterV2.java

private void writeDataAndReadFromHFile(Path hfilePath, Algorithm compressAlgo, int entryCount,
        boolean findMidKey) throws IOException {

    HFileContext context = new HFileContextBuilder().withBlockSize(4096).withCompression(compressAlgo).build();
    HFileWriterV2 writer = (HFileWriterV2) new HFileWriterV2.WriterFactoryV2(conf, new CacheConfig(conf))
            .withPath(fs, hfilePath).withFileContext(context).create();

    Random rand = new Random(9713312); // Just a fixed seed.
    List<KeyValue> keyValues = new ArrayList<KeyValue>(entryCount);

    for (int i = 0; i < entryCount; ++i) {
        byte[] keyBytes = randomOrderedKey(rand, i);

        // A random-length random value.
        byte[] valueBytes = randomValue(rand);
        KeyValue keyValue = new KeyValue(keyBytes, null, null, valueBytes);
        writer.append(keyValue);// ww w. j  a  v a 2 s  .  c om
        keyValues.add(keyValue);
    }

    // Add in an arbitrary order. They will be sorted lexicographically by
    // the key.
    writer.appendMetaBlock("CAPITAL_OF_USA", new Text("Washington, D.C."));
    writer.appendMetaBlock("CAPITAL_OF_RUSSIA", new Text("Moscow"));
    writer.appendMetaBlock("CAPITAL_OF_FRANCE", new Text("Paris"));

    writer.close();

    FSDataInputStream fsdis = fs.open(hfilePath);

    // A "manual" version of a new-format HFile reader. This unit test was
    // written before the V2 reader was fully implemented.

    long fileSize = fs.getFileStatus(hfilePath).getLen();
    FixedFileTrailer trailer = FixedFileTrailer.readFromStream(fsdis, fileSize);

    assertEquals(2, trailer.getMajorVersion());
    assertEquals(entryCount, trailer.getEntryCount());

    HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(true).withIncludesMvcc(false)
            .withIncludesTags(false).withCompression(compressAlgo).build();

    HFileBlock.FSReader blockReader = new HFileBlock.FSReaderV2(fsdis, fileSize, meta);
    // Comparator class name is stored in the trailer in version 2.
    KVComparator comparator = trailer.createComparator();
    HFileBlockIndex.BlockIndexReader dataBlockIndexReader = new HFileBlockIndex.BlockIndexReader(comparator,
            trailer.getNumDataIndexLevels());
    HFileBlockIndex.BlockIndexReader metaBlockIndexReader = new HFileBlockIndex.BlockIndexReader(
            KeyValue.RAW_COMPARATOR, 1);

    HFileBlock.BlockIterator blockIter = blockReader.blockRange(trailer.getLoadOnOpenDataOffset(),
            fileSize - trailer.getTrailerSize());
    // Data index. We also read statistics about the block index written after
    // the root level.
    dataBlockIndexReader.readMultiLevelIndexRoot(blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX),
            trailer.getDataIndexCount());

    if (findMidKey) {
        byte[] midkey = dataBlockIndexReader.midkey();
        assertNotNull("Midkey should not be null", midkey);
    }

    // Meta index.
    metaBlockIndexReader.readRootIndex(blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX).getByteStream(),
            trailer.getMetaIndexCount());
    // File info
    FileInfo fileInfo = new FileInfo();
    fileInfo.read(blockIter.nextBlockWithBlockType(BlockType.FILE_INFO).getByteStream());
    byte[] keyValueFormatVersion = fileInfo.get(HFileWriterV2.KEY_VALUE_VERSION);
    boolean includeMemstoreTS = keyValueFormatVersion != null && Bytes.toInt(keyValueFormatVersion) > 0;

    // Counters for the number of key/value pairs and the number of blocks
    int entriesRead = 0;
    int blocksRead = 0;
    long memstoreTS = 0;

    // Scan blocks the way the reader would scan them
    fsdis.seek(0);
    long curBlockPos = 0;
    while (curBlockPos <= trailer.getLastDataBlockOffset()) {
        HFileBlock block = blockReader.readBlockData(curBlockPos, -1, -1, false);
        assertEquals(BlockType.DATA, block.getBlockType());
        ByteBuffer buf = block.getBufferWithoutHeader();
        while (buf.hasRemaining()) {
            int keyLen = buf.getInt();
            int valueLen = buf.getInt();

            byte[] key = new byte[keyLen];
            buf.get(key);

            byte[] value = new byte[valueLen];
            buf.get(value);

            if (includeMemstoreTS) {
                ByteArrayInputStream byte_input = new ByteArrayInputStream(buf.array(),
                        buf.arrayOffset() + buf.position(), buf.remaining());
                DataInputStream data_input = new DataInputStream(byte_input);

                memstoreTS = WritableUtils.readVLong(data_input);
                buf.position(buf.position() + WritableUtils.getVIntSize(memstoreTS));
            }

            // A brute-force check to see that all keys and values are correct.
            assertTrue(Bytes.compareTo(key, keyValues.get(entriesRead).getKey()) == 0);
            assertTrue(Bytes.compareTo(value, keyValues.get(entriesRead).getValue()) == 0);

            ++entriesRead;
        }
        ++blocksRead;
        curBlockPos += block.getOnDiskSizeWithHeader();
    }
    LOG.info("Finished reading: entries=" + entriesRead + ", blocksRead=" + blocksRead);
    assertEquals(entryCount, entriesRead);

    // Meta blocks. We can scan until the load-on-open data offset (which is
    // the root block index offset in version 2) because we are not testing
    // intermediate-level index blocks here.

    int metaCounter = 0;
    while (fsdis.getPos() < trailer.getLoadOnOpenDataOffset()) {
        LOG.info("Current offset: " + fsdis.getPos() + ", scanning until " + trailer.getLoadOnOpenDataOffset());
        HFileBlock block = blockReader.readBlockData(curBlockPos, -1, -1, false);
        assertEquals(BlockType.META, block.getBlockType());
        Text t = new Text();
        ByteBuffer buf = block.getBufferWithoutHeader();
        if (Writables.getWritable(buf.array(), buf.arrayOffset(), buf.limit(), t) == null) {
            throw new IOException(
                    "Failed to deserialize block " + this + " into a " + t.getClass().getSimpleName());
        }
        Text expectedText = (metaCounter == 0 ? new Text("Paris")
                : metaCounter == 1 ? new Text("Moscow") : new Text("Washington, D.C."));
        assertEquals(expectedText, t);
        LOG.info("Read meta block data: " + t);
        ++metaCounter;
        curBlockPos += block.getOnDiskSizeWithHeader();
    }

    fsdis.close();
}

From source file:org.apache.hadoop.hbase.io.encoding.EncodedDataBlock.java

/**
 * Do the encoding, but do not cache the encoded data.
 * @return encoded data block with header and checksum
 *//*from   w w w.ja v  a2 s .  c  om*/
public byte[] encodeData() {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    try {
        baos.write(HConstants.HFILEBLOCK_DUMMY_HEADER);
        DataOutputStream out = new DataOutputStream(baos);
        this.dataBlockEncoder.startBlockEncoding(encodingCtx, out);
        ByteBuffer in = getUncompressedBuffer();
        in.rewind();
        int klength, vlength;
        short tagsLength = 0;
        long memstoreTS = 0L;
        KeyValue kv = null;
        while (in.hasRemaining()) {
            int kvOffset = in.position();
            klength = in.getInt();
            vlength = in.getInt();
            ByteBufferUtils.skip(in, klength + vlength);
            if (this.meta.isIncludesTags()) {
                tagsLength = in.getShort();
                ByteBufferUtils.skip(in, tagsLength);
            }
            if (this.meta.isIncludesMvcc()) {
                memstoreTS = ByteBufferUtils.readVLong(in);
            }
            kv = new KeyValue(in.array(), kvOffset,
                    (int) KeyValue.getKeyValueDataStructureSize(klength, vlength, tagsLength));
            kv.setMvccVersion(memstoreTS);
            this.dataBlockEncoder.encode(kv, encodingCtx, out);
        }
        BufferGrabbingByteArrayOutputStream stream = new BufferGrabbingByteArrayOutputStream();
        baos.writeTo(stream);
        this.dataBlockEncoder.endBlockEncoding(encodingCtx, out, stream.buf);
    } catch (IOException e) {
        throw new RuntimeException(String.format("Bug in encoding part of algorithm %s. "
                + "Probably it requested more bytes than are available.", toString()), e);
    }
    return baos.toByteArray();
}

From source file:record.wave.WaveWriter.java

/**
* Writes the buffer contents to the file.  Assumes that the buffer is full 
* and the first byte of data is at position 0.
*//*  w w  w  .j a  v a  2 s .  com*/
public void write(ByteBuffer buffer) throws IOException {
    buffer.position(0);

    /* Write the full buffer if there is room, respecting the max file size */
    if (mFileChannel.size() + buffer.capacity() < mMaxSize) {
        while (buffer.hasRemaining()) {
            mFileChannel.write(buffer);
        }

        updateWaveFileSize();
    } else {
        /* Split the buffer to finish filling the current file and then put
         * the leftover into a new file */
        int remaining = (int) (mMaxSize - mFileChannel.size());

        /* Ensure we write full frames to fill up the remaining size */
        remaining -= (int) (remaining % mAudioFormat.getFrameSize());

        byte[] bytes = buffer.array();

        ByteBuffer current = ByteBuffer.wrap(Arrays.copyOf(bytes, remaining));

        ByteBuffer next = ByteBuffer.wrap(Arrays.copyOfRange(bytes, remaining, bytes.length));

        while (current.hasRemaining()) {
            mFileChannel.write(current);
        }

        updateWaveFileSize();

        rollover();

        while (next.hasRemaining()) {
            mFileChannel.write(next);
        }

        updateWaveFileSize();
    }
}

From source file:org.apache.hadoop.hbase.io.hfile.TestHFileWriterV3.java

private void writeDataAndReadFromHFile(Path hfilePath, Algorithm compressAlgo, int entryCount,
        boolean findMidKey, boolean useTags) throws IOException {
    HFileContext context = new HFileContextBuilder().withBlockSize(4096).withIncludesTags(useTags)
            .withCompression(compressAlgo).build();
    HFileWriterV3 writer = (HFileWriterV3) new HFileWriterV3.WriterFactoryV3(conf, new CacheConfig(conf))
            .withPath(fs, hfilePath).withFileContext(context).withComparator(KeyValue.COMPARATOR).create();

    Random rand = new Random(9713312); // Just a fixed seed.
    List<KeyValue> keyValues = new ArrayList<KeyValue>(entryCount);

    for (int i = 0; i < entryCount; ++i) {
        byte[] keyBytes = TestHFileWriterV2.randomOrderedKey(rand, i);

        // A random-length random value.
        byte[] valueBytes = TestHFileWriterV2.randomValue(rand);
        KeyValue keyValue = null;
        if (useTags) {
            ArrayList<Tag> tags = new ArrayList<Tag>();
            for (int j = 0; j < 1 + rand.nextInt(4); j++) {
                byte[] tagBytes = new byte[16];
                rand.nextBytes(tagBytes);
                tags.add(new Tag((byte) 1, tagBytes));
            }//from  w w w. ja  v  a  2 s  .  c  o m
            keyValue = new KeyValue(keyBytes, null, null, HConstants.LATEST_TIMESTAMP, valueBytes, tags);
        } else {
            keyValue = new KeyValue(keyBytes, null, null, HConstants.LATEST_TIMESTAMP, valueBytes);
        }
        writer.append(keyValue);
        keyValues.add(keyValue);
    }

    // Add in an arbitrary order. They will be sorted lexicographically by
    // the key.
    writer.appendMetaBlock("CAPITAL_OF_USA", new Text("Washington, D.C."));
    writer.appendMetaBlock("CAPITAL_OF_RUSSIA", new Text("Moscow"));
    writer.appendMetaBlock("CAPITAL_OF_FRANCE", new Text("Paris"));

    writer.close();

    FSDataInputStream fsdis = fs.open(hfilePath);

    long fileSize = fs.getFileStatus(hfilePath).getLen();
    FixedFileTrailer trailer = FixedFileTrailer.readFromStream(fsdis, fileSize);

    assertEquals(3, trailer.getMajorVersion());
    assertEquals(entryCount, trailer.getEntryCount());
    HFileContext meta = new HFileContextBuilder().withCompression(compressAlgo).withIncludesMvcc(false)
            .withIncludesTags(useTags).withHBaseCheckSum(true).build();
    HFileBlock.FSReader blockReader = new HFileBlock.FSReaderV2(fsdis, fileSize, meta);
    // Comparator class name is stored in the trailer in version 2.
    KVComparator comparator = trailer.createComparator();
    HFileBlockIndex.BlockIndexReader dataBlockIndexReader = new HFileBlockIndex.BlockIndexReader(comparator,
            trailer.getNumDataIndexLevels());
    HFileBlockIndex.BlockIndexReader metaBlockIndexReader = new HFileBlockIndex.BlockIndexReader(
            KeyValue.RAW_COMPARATOR, 1);

    HFileBlock.BlockIterator blockIter = blockReader.blockRange(trailer.getLoadOnOpenDataOffset(),
            fileSize - trailer.getTrailerSize());
    // Data index. We also read statistics about the block index written after
    // the root level.
    dataBlockIndexReader.readMultiLevelIndexRoot(blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX),
            trailer.getDataIndexCount());

    if (findMidKey) {
        byte[] midkey = dataBlockIndexReader.midkey();
        assertNotNull("Midkey should not be null", midkey);
    }

    // Meta index.
    metaBlockIndexReader.readRootIndex(blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX).getByteStream(),
            trailer.getMetaIndexCount());
    // File info
    FileInfo fileInfo = new FileInfo();
    fileInfo.read(blockIter.nextBlockWithBlockType(BlockType.FILE_INFO).getByteStream());
    byte[] keyValueFormatVersion = fileInfo.get(HFileWriterV3.KEY_VALUE_VERSION);
    boolean includeMemstoreTS = keyValueFormatVersion != null && Bytes.toInt(keyValueFormatVersion) > 0;

    // Counters for the number of key/value pairs and the number of blocks
    int entriesRead = 0;
    int blocksRead = 0;
    long memstoreTS = 0;

    // Scan blocks the way the reader would scan them
    fsdis.seek(0);
    long curBlockPos = 0;
    while (curBlockPos <= trailer.getLastDataBlockOffset()) {
        HFileBlock block = blockReader.readBlockData(curBlockPos, -1, -1, false);
        assertEquals(BlockType.DATA, block.getBlockType());
        ByteBuffer buf = block.getBufferWithoutHeader();
        int keyLen = -1;
        while (buf.hasRemaining()) {

            keyLen = buf.getInt();

            int valueLen = buf.getInt();

            byte[] key = new byte[keyLen];
            buf.get(key);

            byte[] value = new byte[valueLen];
            buf.get(value);
            byte[] tagValue = null;
            if (useTags) {
                int tagLen = buf.getShort();
                tagValue = new byte[tagLen];
                buf.get(tagValue);
            }

            if (includeMemstoreTS) {
                ByteArrayInputStream byte_input = new ByteArrayInputStream(buf.array(),
                        buf.arrayOffset() + buf.position(), buf.remaining());
                DataInputStream data_input = new DataInputStream(byte_input);

                memstoreTS = WritableUtils.readVLong(data_input);
                buf.position(buf.position() + WritableUtils.getVIntSize(memstoreTS));
            }

            // A brute-force check to see that all keys and values are correct.
            assertTrue(Bytes.compareTo(key, keyValues.get(entriesRead).getKey()) == 0);
            assertTrue(Bytes.compareTo(value, keyValues.get(entriesRead).getValue()) == 0);
            if (useTags) {
                assertNotNull(tagValue);
                KeyValue tkv = keyValues.get(entriesRead);
                assertEquals(tagValue.length, tkv.getTagsLength());
                assertTrue(Bytes.compareTo(tagValue, 0, tagValue.length, tkv.getTagsArray(),
                        tkv.getTagsOffset(), tkv.getTagsLength()) == 0);
            }
            ++entriesRead;
        }
        ++blocksRead;
        curBlockPos += block.getOnDiskSizeWithHeader();
    }
    LOG.info("Finished reading: entries=" + entriesRead + ", blocksRead=" + blocksRead);
    assertEquals(entryCount, entriesRead);

    // Meta blocks. We can scan until the load-on-open data offset (which is
    // the root block index offset in version 2) because we are not testing
    // intermediate-level index blocks here.

    int metaCounter = 0;
    while (fsdis.getPos() < trailer.getLoadOnOpenDataOffset()) {
        LOG.info("Current offset: " + fsdis.getPos() + ", scanning until " + trailer.getLoadOnOpenDataOffset());
        HFileBlock block = blockReader.readBlockData(curBlockPos, -1, -1, false);
        assertEquals(BlockType.META, block.getBlockType());
        Text t = new Text();
        ByteBuffer buf = block.getBufferWithoutHeader();
        if (Writables.getWritable(buf.array(), buf.arrayOffset(), buf.limit(), t) == null) {
            throw new IOException(
                    "Failed to deserialize block " + this + " into a " + t.getClass().getSimpleName());
        }
        Text expectedText = (metaCounter == 0 ? new Text("Paris")
                : metaCounter == 1 ? new Text("Moscow") : new Text("Washington, D.C."));
        assertEquals(expectedText, t);
        LOG.info("Read meta block data: " + t);
        ++metaCounter;
        curBlockPos += block.getOnDiskSizeWithHeader();
    }

    fsdis.close();
}

From source file:org.springframework.messaging.simp.stomp.StompDecoder.java

/**
 * Decodes one or more STOMP frames from the given {@code buffer} and returns
 * a list of {@link Message}s.//from   w w w .ja va  2s .co  m
 * <p>If the given ByteBuffer contains only partial STOMP frame content and no
 * complete STOMP frames, an empty list is returned, and the buffer is reset to
 * to where it was.
 * <p>If the buffer contains one ore more STOMP frames, those are returned and
 * the buffer reset to point to the beginning of the unused partial content.
 * <p>The output partialMessageHeaders map is used to store successfully parsed
 * headers in case of partial content. The caller can then check if a
 * "content-length" header was read, which helps to determine how much more
 * content is needed before the next attempt to decode.
 * @param byteBuffer the buffer to decode the STOMP frame from
 * @param partialMessageHeaders an empty output map that will store the last
 * successfully parsed partialMessageHeaders in case of partial message content
 * in cases where the partial buffer ended with a partial STOMP frame
 * @return the decoded messages, or an empty list if none
 * @throws StompConversionException raised in case of decoding issues
 */
public List<Message<byte[]>> decode(ByteBuffer byteBuffer,
        @Nullable MultiValueMap<String, String> partialMessageHeaders) {

    List<Message<byte[]>> messages = new ArrayList<>();
    while (byteBuffer.hasRemaining()) {
        Message<byte[]> message = decodeMessage(byteBuffer, partialMessageHeaders);
        if (message != null) {
            messages.add(message);
        } else {
            break;
        }
    }
    return messages;
}