Example usage for java.nio ByteBuffer allocateDirect

List of usage examples for java.nio ByteBuffer allocateDirect

Introduction

In this page you can find the example usage for java.nio ByteBuffer allocateDirect.

Prototype

public static ByteBuffer allocateDirect(int capacity) 

Source Link

Document

Creates a direct byte buffer based on a newly allocated memory block.

Usage

From source file:org.apache.druid.hll.HyperLogLogCollectorTest.java

@Test
public void testEstimationReadOnlyByteBuffers() {
    Random random = new Random(0L);

    final int[] valsToCheck = { 10, 20, 50, 100, 1000, 2000, 5000, 10000, 20000, 50000, 100000, 1000000,
            2000000 };//from  www.  jav a  2  s  .  c o m
    final double[] expectedVals = { 11.029647221949576, 21.108407720752034, 51.64575281885815,
            100.42231726408892, 981.8579991802412, 1943.1337257462792, 4946.192042635218, 9935.088157579434,
            20366.1486889433, 49433.56029693898, 100615.26273314281, 980831.624899156000, 1982408.2608981386 };

    int valsToCheckIndex = 0;
    HyperLogLogCollector collector = HyperLogLogCollector
            .makeCollector(ByteBuffer.allocateDirect(HyperLogLogCollector.getLatestNumBytesForDenseStorage()));
    for (int i = 0; i < valsToCheck[valsToCheck.length - 1]; ++i) {
        collector.add(fn.hashLong(random.nextLong()).asBytes());
        if (i == valsToCheck[valsToCheckIndex]) {
            Assert.assertEquals(expectedVals[valsToCheckIndex], collector.estimateCardinality(), 0.0d);
            ++valsToCheckIndex;
        }
    }
    Assert.assertEquals(expectedVals.length, valsToCheckIndex + 1);
    Assert.assertEquals(expectedVals[valsToCheckIndex], collector.estimateCardinality(), 0.0d);
}

From source file:io.druid.hll.HyperLogLogCollectorTest.java

@Test
public void testEstimationReadOnlyByteBuffers() throws Exception {
    Random random = new Random(0L);

    final int[] valsToCheck = { 10, 20, 50, 100, 1000, 2000, 5000, 10000, 20000, 50000, 100000, 1000000,
            2000000 };//from  ww w .  j  a va 2s  .c o  m
    final double[] expectedVals = { 11.029647221949576, 21.108407720752034, 51.64575281885815,
            100.42231726408892, 981.8579991802412, 1943.1337257462792, 4946.192042635218, 9935.088157579434,
            20366.1486889433, 49433.56029693898, 100615.26273314281, 980831.624899156000, 1982408.2608981386 };

    int valsToCheckIndex = 0;
    HyperLogLogCollector collector = HyperLogLogCollector
            .makeCollector(ByteBuffer.allocateDirect(HyperLogLogCollector.getLatestNumBytesForDenseStorage()));
    for (int i = 0; i < valsToCheck[valsToCheck.length - 1]; ++i) {
        collector.add(fn.hashLong(random.nextLong()).asBytes());
        if (i == valsToCheck[valsToCheckIndex]) {
            Assert.assertEquals(expectedVals[valsToCheckIndex], collector.estimateCardinality(), 0.0d);
            ++valsToCheckIndex;
        }
    }
    Assert.assertEquals(expectedVals.length, valsToCheckIndex + 1);
    Assert.assertEquals(expectedVals[valsToCheckIndex], collector.estimateCardinality(), 0.0d);
}

From source file:de.digitalcollections.streaming.euphoria.controller.StreamingController.java

/**
 * Stream the given input to the given output via NIO {@link Channels} and a directly allocated NIO
 * {@link ByteBuffer}. Both the input and output streams will implicitly be closed after streaming, regardless of
 * whether an exception is been thrown or not.
 *
 * @param input The input stream.//w  ww  . j a v a2 s  .c o m
 * @param output The output stream.
 * @return The length of the written bytes.
 * @throws IOException When an I/O error occurs.
 */
private long stream(InputStream input, OutputStream output) throws IOException {
    try (ReadableByteChannel inputChannel = Channels.newChannel(input);
            WritableByteChannel outputChannel = Channels.newChannel(output)) {
        ByteBuffer buffer = ByteBuffer.allocateDirect(DEFAULT_STREAM_BUFFER_SIZE);
        long size = 0;

        while (inputChannel.read(buffer) != -1) {
            buffer.flip();
            size += outputChannel.write(buffer);
            buffer.clear();
        }

        return size;
    }
}

From source file:org.alfresco.repo.content.AbstractWritableContentStoreTest.java

/**
 * Tests random access writing// www.j a v  a  2  s  .c o  m
 * <p>
 * Only executes if the writer implements {@link RandomAccessContent}.
 */
@Test
public void testRandomAccessWrite() throws Exception {
    ContentWriter writer = getWriter();

    FileChannel fileChannel = writer.getFileChannel(true);
    assertNotNull("No channel given", fileChannel);

    // check that no other content access is allowed
    try {
        writer.getWritableChannel();
        fail("Second channel access allowed");
    } catch (RuntimeException e) {
        // expected
    }

    // write some content in a random fashion (reverse order)
    byte[] content = new byte[] { 1, 2, 3 };
    for (int i = content.length - 1; i >= 0; i--) {
        ByteBuffer buffer = ByteBuffer.wrap(content, i, 1);
        fileChannel.write(buffer, i);
    }

    // close the channel
    fileChannel.close();
    assertTrue("Writer not closed", writer.isClosed());

    // check the content
    ContentReader reader = writer.getReader();
    ReadableByteChannel channelReader = reader.getReadableChannel();
    ByteBuffer buffer = ByteBuffer.allocateDirect(3);
    int count = channelReader.read(buffer);
    assertEquals("Incorrect number of bytes read", 3, count);
    for (int i = 0; i < content.length; i++) {
        assertEquals("Content doesn't match", content[i], buffer.get(i));
    }

    // get a new writer from the store, using the existing content and perform a truncation check
    ContentContext writerTruncateCtx = new ContentContext(writer.getReader(), null);
    ContentWriter writerTruncate = getStore().getWriter(writerTruncateCtx);
    assertEquals("Content size incorrect", 0, writerTruncate.getSize());
    // get the channel with truncation
    FileChannel fcTruncate = writerTruncate.getFileChannel(true);
    fcTruncate.close();
    assertEquals("Content not truncated", 0, writerTruncate.getSize());

    // get a new writer from the store, using the existing content and perform a non-truncation check
    ContentContext writerNoTruncateCtx = new ContentContext(writer.getReader(), null);
    ContentWriter writerNoTruncate = getStore().getWriter(writerNoTruncateCtx);
    assertEquals("Content size incorrect", 0, writerNoTruncate.getSize());
    // get the channel without truncation
    FileChannel fcNoTruncate = writerNoTruncate.getFileChannel(false);
    fcNoTruncate.close();
    assertEquals("Content was truncated", writer.getSize(), writerNoTruncate.getSize());
}

From source file:com.emc.ecs.smart.SmartUploader.java

private synchronized ByteBuffer getBuffer() {
    if (buffers.isEmpty()) {
        return ByteBuffer.allocateDirect(segmentSize);
    } else {/*  w  ww . j  a va2  s  .  c om*/
        return buffers.remove();
    }
}

From source file:org.alfresco.repo.transfer.HttpClientTransmitterImpl.java

private static void channelCopy(final ReadableByteChannel src, final WritableByteChannel dest)
        throws IOException {
    final ByteBuffer buffer = ByteBuffer.allocateDirect(2 * 1024);
    while (src.read(buffer) != -1) {
        // prepare the buffer to be drained
        buffer.flip();// www  . j a v a  2  s  .co  m
        // write to the channel, may block
        dest.write(buffer);

        // If partial transfer, shift remainder down
        // If buffer is empty, same as doing clear()
        buffer.compact();
    }

    // EOF will leave buffer in fill state
    buffer.flip();

    // make sure the buffer is fully drained.
    while (buffer.hasRemaining()) {
        dest.write(buffer);
    }
}

From source file:org.gephi.desktop.importer.DesktopImportControllerUI.java

/**
 * Uncompress a Bzip2 file.//from ww  w  .j  a v  a2  s  .c om
 */
private static File getBzipFile(FileObject in, File out, boolean isTar) throws IOException {

    // Stream buffer
    final int BUFF_SIZE = 8192;
    final byte[] buffer = new byte[BUFF_SIZE];

    BZip2CompressorInputStream inputStream = null;
    FileOutputStream outStream = null;

    try {
        FileInputStream is = new FileInputStream(in.getPath());
        inputStream = new BZip2CompressorInputStream(is);
        outStream = new FileOutputStream(out.getAbsolutePath());

        if (isTar) {
            // Read Tar header
            int remainingBytes = readTarHeader(inputStream);

            // Read content
            ByteBuffer bb = ByteBuffer.allocateDirect(4 * BUFF_SIZE);
            byte[] tmpCache = new byte[BUFF_SIZE];
            int nRead, nGet;
            while ((nRead = inputStream.read(tmpCache)) != -1) {
                if (nRead == 0) {
                    continue;
                }
                bb.put(tmpCache);
                bb.position(0);
                bb.limit(nRead);
                while (bb.hasRemaining() && remainingBytes > 0) {
                    nGet = Math.min(bb.remaining(), BUFF_SIZE);
                    nGet = Math.min(nGet, remainingBytes);
                    bb.get(buffer, 0, nGet);
                    outStream.write(buffer, 0, nGet);
                    remainingBytes -= nGet;
                }
                bb.clear();
            }
        } else {
            int len;
            while ((len = inputStream.read(buffer)) > 0) {
                outStream.write(buffer, 0, len);
            }
        }
    } catch (IOException ex) {
        Exceptions.printStackTrace(ex);
    } finally {
        if (inputStream != null) {
            inputStream.close();
        }
        if (outStream != null) {
            outStream.close();
        }
    }

    return out;
}

From source file:com.aimfire.gallery.cardboard.PhotoActivity.java

/**
 * Draws a frame for an eye.//  w w  w  .  j a  v  a  2s.  c  om
 *
 * @param eye The eye to render. Includes all required transformations.
 */
@Override
public void onDrawEye(Eye eye) {
    if (mAssetInd == -1) {
        // we are still showing instruction, return without doing anything
        return;
    }

    if (!mAssetChangedLeft && !mAssetChangedRight) {
        // nothing changed, do nothing and return
        return;
    }

    if (eye.getType() == Eye.Type.LEFT)
        mAssetChangedLeft = false;
    else if (eye.getType() == Eye.Type.RIGHT)
        mAssetChangedRight = false;

    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
    checkGLError("mColorParam");

    GLES20.glUseProgram(mPicProgram);

    GLES20.glUniform1f(mDimRatioParam, mDimRatio);

    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    if (eye.getType() == Eye.Type.LEFT) {
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureCurr[0]);
    } else {
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureCurr[1]);
    }

    // set the zoom level
    GLES20.glUniform1f(mZoomParam, sZoom[mImgZoomInd]);

    /*
     * if user prefers negative parallax, shift window on left frame leftward and right frame
     * rightward. if user prefers positive parallax, do the opposite
     */
    if (eye.getType() == Eye.Type.LEFT) {
        GLES20.glUniform1f(mParallaxParam, mImgParallaxAdj / 2.0f);
    } else {
        GLES20.glUniform1f(mParallaxParam, -mImgParallaxAdj / 2.0f);
    }

    // Set the position of the picture
    //float zoomCoords[] = new float[picCoords.length];
    //for(int i=0; i<picCoords.length; i++)
    //zoomCoords[i] = picCoords[i] * zoom[zoomInd];

    //ByteBuffer bblVertices = ByteBuffer.allocateDirect(zoomCoords.length * 4);
    ByteBuffer bblVertices = ByteBuffer.allocateDirect(picCoords.length * 4);
    bblVertices.order(ByteOrder.nativeOrder());
    mPicVertices = bblVertices.asFloatBuffer();
    //mPicVertices.put(zoomCoords);
    mPicVertices.put(picCoords);
    mPicVertices.position(0);

    GLES20.glVertexAttribPointer(mPicPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride,
            mPicVertices);

    GLES20.glDrawElements(GLES20.GL_TRIANGLES, /* mode */
            6, /* count */
            GLES20.GL_UNSIGNED_SHORT, /* type */
            mPicElements /* element array buffer offset */
    );
}

From source file:org.gephi.desktop.importer.DesktopImportControllerUI.java

/**
 * Uncompress a GZIP file./*from  w  w w  .j a v a  2s . c om*/
 */
private static File getGzFile(FileObject in, File out, boolean isTar) throws IOException {

    // Stream buffer
    final int BUFF_SIZE = 8192;
    final byte[] buffer = new byte[BUFF_SIZE];

    GZIPInputStream inputStream = null;
    FileOutputStream outStream = null;

    try {
        inputStream = new GZIPInputStream(new FileInputStream(in.getPath()));
        outStream = new FileOutputStream(out);

        if (isTar) {
            // Read Tar header
            int remainingBytes = readTarHeader(inputStream);

            // Read content
            ByteBuffer bb = ByteBuffer.allocateDirect(4 * BUFF_SIZE);
            byte[] tmpCache = new byte[BUFF_SIZE];
            int nRead, nGet;
            while ((nRead = inputStream.read(tmpCache)) != -1) {
                if (nRead == 0) {
                    continue;
                }
                bb.put(tmpCache);
                bb.position(0);
                bb.limit(nRead);
                while (bb.hasRemaining() && remainingBytes > 0) {
                    nGet = Math.min(bb.remaining(), BUFF_SIZE);
                    nGet = Math.min(nGet, remainingBytes);
                    bb.get(buffer, 0, nGet);
                    outStream.write(buffer, 0, nGet);
                    remainingBytes -= nGet;
                }
                bb.clear();
            }
        } else {
            int len;
            while ((len = inputStream.read(buffer)) > 0) {
                outStream.write(buffer, 0, len);
            }
        }
    } catch (IOException ex) {
        Exceptions.printStackTrace(ex);
    } finally {
        if (inputStream != null) {
            inputStream.close();
        }
        if (outStream != null) {
            outStream.close();
        }
    }

    return out;
}

From source file:com.ibm.crail.tools.CrailBenchmark.java

void early(String filename) throws Exception {
    ByteBuffer buf = ByteBuffer.allocateDirect(32);
    CrailFile file = fs// w ww.ja  v  a2s.  c  o  m
            .create(filename, CrailNodeType.DATAFILE, CrailStorageClass.DEFAULT, CrailLocationClass.DEFAULT)
            .early().asFile();
    CrailBufferedOutputStream stream = file.getBufferedOutputStream(0);
    System.out.println("buffered stream initialized");

    Thread.sleep(1000);
    stream.write(buf);
    System.out.println("buffered stream written");

    Thread.sleep(1000);
    stream.write(buf);
    System.out.println("buffered stream written");

    stream.purge();
    stream.close();

    System.out.println("buffered stream closed");

    fs.getStatistics().print("close");
}