Example usage for java.nio ByteBuffer slice

List of usage examples for java.nio ByteBuffer slice

Introduction

In this page you can find the example usage for java.nio ByteBuffer slice.

Prototype

public abstract ByteBuffer slice();

Source Link

Document

Returns a sliced buffer that shares its content with this buffer.

Usage

From source file:org.apache.arrow.vector.util.Text.java

/**
 * Returns the Unicode Scalar Value (32-bit integer value) for the character at <code>position</code>. Note that this
 * method avoids using the converter or doing String instantiation
 *
 * @return the Unicode scalar value at position or -1 if the position is invalid or points to a trailing byte
 *//*w  w w. j av  a2  s . c om*/
public int charAt(int position) {
    if (position > this.length) {
        return -1; // too long
    }
    if (position < 0) {
        return -1; // duh.
    }

    ByteBuffer bb = (ByteBuffer) ByteBuffer.wrap(bytes).position(position);
    return bytesToCodePoint(bb.slice());
}

From source file:org.wso2.andes.amqp.QpidAMQPBridge.java

/**
 * message content chunk received to the server
 *
 * @param messageID       id of message to which content belongs
 * @param offsetInMessage chunk offset/* w w w.j av a 2 s. co  m*/
 * @param src             Bytebuffer with content bytes
 */
public AndesMessagePart messageContentChunkReceived(long messageID, int offsetInMessage, ByteBuffer src) {

    if (log.isDebugEnabled()) {
        log.debug("Content Part Received id " + messageID + ", offset " + offsetInMessage);
    }
    AndesMessagePart part = new AndesMessagePart();
    src = src.slice();
    final byte[] chunkData = new byte[src.limit()];
    src.duplicate().get(chunkData);

    part.setData(chunkData);
    part.setMessageID(messageID);
    part.setOffSet(offsetInMessage);
    part.setDataLength(chunkData.length);

    return part;
}

From source file:org.apache.tajo.storage.orc.OrcScanner.java

private static FileMetaInfo extractMetaInfoFromFooter(FileSystem fs, Path path, long maxFileLength)
        throws IOException {
    FSDataInputStream file = fs.open(path);

    // figure out the size of the file using the option or filesystem
    long size;//from  w w  w . ja  v  a 2s  .c  om
    if (maxFileLength == Long.MAX_VALUE) {
        size = fs.getFileStatus(path).getLen();
    } else {
        size = maxFileLength;
    }

    //read last bytes into buffer to get PostScript
    int readSize = (int) Math.min(size, DIRECTORY_SIZE_GUESS);
    ByteBuffer buffer = ByteBuffer.allocate(readSize);
    assert buffer.position() == 0;
    file.readFully((size - readSize), buffer.array(), buffer.arrayOffset(), readSize);
    buffer.position(0);

    //read the PostScript
    //get length of PostScript
    int psLen = buffer.get(readSize - 1) & 0xff;
    ensureOrcFooter(file, path, psLen, buffer);
    int psOffset = readSize - 1 - psLen;
    OrcProto.PostScript ps = extractPostScript(buffer, path, psLen, psOffset);

    int footerSize = (int) ps.getFooterLength();
    int metadataSize = (int) ps.getMetadataLength();

    //check if extra bytes need to be read
    ByteBuffer fullFooterBuffer = null;
    int extra = Math.max(0, psLen + 1 + footerSize + metadataSize - readSize);
    if (extra > 0) {
        //more bytes need to be read, seek back to the right place and read extra bytes
        ByteBuffer extraBuf = ByteBuffer.allocate(extra + readSize);
        file.readFully((size - readSize - extra), extraBuf.array(),
                extraBuf.arrayOffset() + extraBuf.position(), extra);
        extraBuf.position(extra);
        //append with already read bytes
        extraBuf.put(buffer);
        buffer = extraBuf;
        buffer.position(0);
        fullFooterBuffer = buffer.slice();
        buffer.limit(footerSize + metadataSize);
    } else {
        //footer is already in the bytes in buffer, just adjust position, length
        buffer.position(psOffset - footerSize - metadataSize);
        fullFooterBuffer = buffer.slice();
        buffer.limit(psOffset);
    }

    // remember position for later
    buffer.mark();

    file.close();

    return new FileMetaInfo(ps.getCompression().toString(), (int) ps.getCompressionBlockSize(),
            (int) ps.getMetadataLength(), buffer, ps.getVersionList(),
            org.apache.orc.OrcFile.WriterVersion.FUTURE, fullFooterBuffer);
}

From source file:org.eclipse.packagedrone.utils.rpm.build.PayloadRecorder.java

public Result addFile(final String targetPath, final ByteBuffer data,
        final Consumer<CpioArchiveEntry> customizer) throws IOException {
    final long size = data.remaining();

    final CpioArchiveEntry entry = new CpioArchiveEntry(targetPath);
    entry.setSize(size);//from   w  ww . ja  v  a  2s .c  om

    if (customizer != null) {
        customizer.accept(entry);
    }

    this.archiveStream.putArchiveEntry(entry);

    // record digest

    MessageDigest digest;
    try {
        digest = createDigest();
        digest.update(data.slice());
    } catch (final NoSuchAlgorithmException e) {
        throw new IOException(e);
    }

    // write data

    final WritableByteChannel channel = Channels.newChannel(this.archiveStream);
    while (data.hasRemaining()) {
        channel.write(data);
    }

    // close archive entry

    this.archiveStream.closeArchiveEntry();

    return new Result(size, digest.digest());
}

From source file:org.commoncrawl.util.TextBytes.java

/**
 * Returns the Unicode Scalar Value (32-bit integer value) for the character
 * at <code>position</code>. Note that this method avoids using the converter
 * or doing String instatiation//from   w  w w . j  ava 2 s  .c  om
 * 
 * @return the Unicode scalar value at position or -1 if the position is
 *         invalid or points to a trailing byte
 */
public int charAt(int position) {
    if (position > bytes.getCount())
        return -1; // too long
    if (position < 0)
        return -1; // duh.

    ByteBuffer bb = (ByteBuffer) ByteBuffer.wrap(bytes.get(), bytes.getOffset(), bytes.getCount())
            .position(position);
    return bytesToCodePoint(bb.slice());
}

From source file:org.ojai.json.impl.JsonDocumentBuilder.java

@Override
public JsonDocumentBuilder put(String field, ByteBuffer value) {
    byte[] bytes = new byte[value.remaining()];
    value.slice().get(bytes);
    return put(field, bytes);
}

From source file:org.ojai.json.impl.JsonDocumentBuilder.java

@Override
public JsonDocumentBuilder add(ByteBuffer value) {
    byte[] bytes = new byte[value.remaining()];
    value.slice().get(bytes);
    return add(bytes);
}

From source file:com.linkedin.databus.core.DbusEventV1.java

@Override
public ByteBuffer value() {
    ByteBuffer value = _buf.asReadOnlyBuffer().order(_buf.order());
    value.position(_position + LongKeyOffset + keyLength());
    value = value.slice().order(_buf.order());
    int valueSize = valueLength();
    value.limit(valueSize);/*from   w  ww  .  j a  v  a 2s  . c o  m*/
    value.rewind();
    return value;
}

From source file:org.apache.hadoop.hbase.io.hfile.TestHFileBlockCompatibility.java

/**
 * Test encoding/decoding data blocks./*  w w w . ja  v  a 2 s.  c o m*/
 * @throws IOException a bug or a problem with temporary files.
 */
@Test
public void testDataBlockEncoding() throws IOException {
    if (includesTag) {
        TEST_UTIL.getConfiguration().setInt("hfile.format.version", 3);
    }
    final int numBlocks = 5;
    for (Compression.Algorithm algo : COMPRESSION_ALGORITHMS) {
        for (boolean pread : new boolean[] { false, true }) {
            for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
                LOG.info("testDataBlockEncoding algo " + algo + " pread = " + pread + " encoding " + encoding);
                Path path = new Path(TEST_UTIL.getDataTestDir(),
                        "blocks_v2_" + algo + "_" + encoding.toString());
                FSDataOutputStream os = fs.create(path);
                HFileDataBlockEncoder dataBlockEncoder = (encoding != DataBlockEncoding.NONE)
                        ? new HFileDataBlockEncoderImpl(encoding)
                        : NoOpDataBlockEncoder.INSTANCE;
                TestHFileBlockCompatibility.Writer hbw = new TestHFileBlockCompatibility.Writer(algo,
                        dataBlockEncoder, includesMemstoreTS, includesTag);
                long totalSize = 0;
                final List<Integer> encodedSizes = new ArrayList<Integer>();
                final List<ByteBuffer> encodedBlocks = new ArrayList<ByteBuffer>();
                for (int blockId = 0; blockId < numBlocks; ++blockId) {
                    hbw.startWriting(BlockType.DATA);
                    TestHFileBlock.writeTestKeyValues(hbw, blockId, pread, includesTag);
                    hbw.writeHeaderAndData(os);
                    int headerLen = HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM;
                    byte[] encodedResultWithHeader = hbw.getUncompressedDataWithHeader();
                    final int encodedSize = encodedResultWithHeader.length - headerLen;
                    if (encoding != DataBlockEncoding.NONE) {
                        // We need to account for the two-byte encoding algorithm ID that
                        // comes after the 24-byte block header but before encoded KVs.
                        headerLen += DataBlockEncoding.ID_SIZE;
                    }
                    byte[] encodedDataSection = new byte[encodedResultWithHeader.length - headerLen];
                    System.arraycopy(encodedResultWithHeader, headerLen, encodedDataSection, 0,
                            encodedDataSection.length);
                    final ByteBuffer encodedBuf = ByteBuffer.wrap(encodedDataSection);
                    encodedSizes.add(encodedSize);
                    encodedBlocks.add(encodedBuf);
                    totalSize += hbw.getOnDiskSizeWithHeader();
                }
                os.close();

                FSDataInputStream is = fs.open(path);
                HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false)
                        .withIncludesMvcc(includesMemstoreTS).withIncludesTags(includesTag)
                        .withCompression(algo).build();
                HFileBlock.FSReaderV2 hbr = new HFileBlock.FSReaderV2(new FSDataInputStreamWrapper(is),
                        totalSize, fs, path, meta);
                hbr.setDataBlockEncoder(dataBlockEncoder);
                hbr.setIncludesMemstoreTS(includesMemstoreTS);

                HFileBlock b;
                int pos = 0;
                for (int blockId = 0; blockId < numBlocks; ++blockId) {
                    b = hbr.readBlockData(pos, -1, -1, pread);
                    b.sanityCheck();
                    pos += b.getOnDiskSizeWithHeader();

                    assertEquals((int) encodedSizes.get(blockId), b.getUncompressedSizeWithoutHeader());
                    ByteBuffer actualBuffer = b.getBufferWithoutHeader();
                    if (encoding != DataBlockEncoding.NONE) {
                        // We expect a two-byte big-endian encoding id.
                        assertEquals(0, actualBuffer.get(0));
                        assertEquals(encoding.getId(), actualBuffer.get(1));
                        actualBuffer.position(2);
                        actualBuffer = actualBuffer.slice();
                    }

                    ByteBuffer expectedBuffer = encodedBlocks.get(blockId);
                    expectedBuffer.rewind();

                    // test if content matches, produce nice message
                    TestHFileBlock.assertBuffersEqual(expectedBuffer, actualBuffer, algo, encoding, pread);
                }
                is.close();
            }
        }
    }
}

From source file:org.apache.hadoop.hbase.io.hfile.TestHFileBlock.java

private void testInternals() throws IOException {
    final int numBlocks = 5;
    if (includesTag) {
        TEST_UTIL.getConfiguration().setInt("hfile.format.version", 3);
    }/* w w  w .ja v a  2s .  c o  m*/
    for (Compression.Algorithm algo : COMPRESSION_ALGORITHMS) {
        for (boolean pread : new boolean[] { false, true }) {
            for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
                Path path = new Path(TEST_UTIL.getDataTestDir(),
                        "blocks_v2_" + algo + "_" + encoding.toString());
                FSDataOutputStream os = fs.create(path);
                HFileDataBlockEncoder dataBlockEncoder = (encoding != DataBlockEncoding.NONE)
                        ? new HFileDataBlockEncoderImpl(encoding)
                        : NoOpDataBlockEncoder.INSTANCE;
                HFileContext meta = new HFileContextBuilder().withCompression(algo)
                        .withIncludesMvcc(includesMemstoreTS).withIncludesTags(includesTag)
                        .withBytesPerCheckSum(HFile.DEFAULT_BYTES_PER_CHECKSUM)
                        .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE).build();
                HFileBlock.Writer hbw = new HFileBlock.Writer(dataBlockEncoder, meta);
                long totalSize = 0;
                final List<Integer> encodedSizes = new ArrayList<Integer>();
                final List<ByteBuffer> encodedBlocks = new ArrayList<ByteBuffer>();
                for (int blockId = 0; blockId < numBlocks; ++blockId) {
                    hbw.startWriting(BlockType.DATA);
                    writeTestKeyValues(hbw, blockId, includesMemstoreTS, includesTag);
                    hbw.writeHeaderAndData(os);
                    int headerLen = HConstants.HFILEBLOCK_HEADER_SIZE;
                    byte[] encodedResultWithHeader = hbw.getUncompressedBufferWithHeader().array();
                    final int encodedSize = encodedResultWithHeader.length - headerLen;
                    if (encoding != DataBlockEncoding.NONE) {
                        // We need to account for the two-byte encoding algorithm ID that
                        // comes after the 24-byte block header but before encoded KVs.
                        headerLen += DataBlockEncoding.ID_SIZE;
                    }
                    byte[] encodedDataSection = new byte[encodedResultWithHeader.length - headerLen];
                    System.arraycopy(encodedResultWithHeader, headerLen, encodedDataSection, 0,
                            encodedDataSection.length);
                    final ByteBuffer encodedBuf = ByteBuffer.wrap(encodedDataSection);
                    encodedSizes.add(encodedSize);
                    encodedBlocks.add(encodedBuf);
                    totalSize += hbw.getOnDiskSizeWithHeader();
                }
                os.close();

                FSDataInputStream is = fs.open(path);
                meta = new HFileContextBuilder().withHBaseCheckSum(true).withCompression(algo)
                        .withIncludesMvcc(includesMemstoreTS).withIncludesTags(includesTag).build();
                HFileBlock.FSReaderV2 hbr = new HFileBlock.FSReaderV2(is, totalSize, meta);
                hbr.setDataBlockEncoder(dataBlockEncoder);
                hbr.setIncludesMemstoreTS(includesMemstoreTS);
                HFileBlock b;
                int pos = 0;
                for (int blockId = 0; blockId < numBlocks; ++blockId) {
                    b = hbr.readBlockData(pos, -1, -1, pread);
                    assertEquals(0, HFile.getChecksumFailuresCount());
                    b.sanityCheck();
                    pos += b.getOnDiskSizeWithHeader();
                    assertEquals((int) encodedSizes.get(blockId), b.getUncompressedSizeWithoutHeader());
                    ByteBuffer actualBuffer = b.getBufferWithoutHeader();
                    if (encoding != DataBlockEncoding.NONE) {
                        // We expect a two-byte big-endian encoding id.
                        assertEquals(0, actualBuffer.get(0));
                        assertEquals(encoding.getId(), actualBuffer.get(1));
                        actualBuffer.position(2);
                        actualBuffer = actualBuffer.slice();
                    }

                    ByteBuffer expectedBuffer = encodedBlocks.get(blockId);
                    expectedBuffer.rewind();

                    // test if content matches, produce nice message
                    assertBuffersEqual(expectedBuffer, actualBuffer, algo, encoding, pread);
                }
                is.close();
            }
        }
    }
}