Example usage for java.nio ByteBuffer clear

List of usage examples for java.nio ByteBuffer clear

Introduction

In this page you can find the example usage for java.nio ByteBuffer clear.

Prototype

public final Buffer clear() 

Source Link

Document

Clears this buffer.

Usage

From source file:org.gephi.io.importer.api.ImportUtils.java

/**
 * Uncompress a Bzip2 file.//from  w  w  w. j a va2  s  . c o m
 */
public static File getBzipFile(FileObject in, File out, boolean isTar) throws IOException {

    // Stream buffer
    final int BUFF_SIZE = 8192;
    final byte[] buffer = new byte[BUFF_SIZE];

    BZip2CompressorInputStream inputStream = null;
    FileOutputStream outStream = null;

    try {
        FileInputStream is = new FileInputStream(in.getPath());
        inputStream = new BZip2CompressorInputStream(is);
        outStream = new FileOutputStream(out.getAbsolutePath());

        if (isTar) {
            // Read Tar header
            int remainingBytes = readTarHeader(inputStream);

            // Read content
            ByteBuffer bb = ByteBuffer.allocateDirect(4 * BUFF_SIZE);
            byte[] tmpCache = new byte[BUFF_SIZE];
            int nRead, nGet;
            while ((nRead = inputStream.read(tmpCache)) != -1) {
                if (nRead == 0) {
                    continue;
                }
                bb.put(tmpCache);
                bb.position(0);
                bb.limit(nRead);
                while (bb.hasRemaining() && remainingBytes > 0) {
                    nGet = Math.min(bb.remaining(), BUFF_SIZE);
                    nGet = Math.min(nGet, remainingBytes);
                    bb.get(buffer, 0, nGet);
                    outStream.write(buffer, 0, nGet);
                    remainingBytes -= nGet;
                }
                bb.clear();
            }
        } else {
            int len;
            while ((len = inputStream.read(buffer)) > 0) {
                outStream.write(buffer, 0, len);
            }
        }
    } catch (IOException ex) {
        Exceptions.printStackTrace(ex);
    } finally {
        if (inputStream != null) {
            inputStream.close();
        }
        if (outStream != null) {
            outStream.close();
        }
    }

    return out;
}

From source file:org.siddhiesb.transport.passthru.util.BufferFactory.java

public void release(ByteBuffer buffer) {
    lock.lock();/* w w  w . j av a 2  s  .c  o m*/
    try {
        if (marker < buffers.length - 1) {
            buffer.clear();
            buffers[++marker] = buffer;
        }
    } finally {
        lock.unlock();
    }
}

From source file:org.apache.nifi.io.nio.consumer.AbstractStreamConsumer.java

@Override
public final void addFilledBuffer(final ByteBuffer buffer) {
    if (isConsumerFinished()) {
        buffer.clear();
        bufferPool.returnBuffer(buffer, buffer.remaining());
    } else {// w w w.j a v a  2s .c om
        filledBuffers.add(buffer);
    }
}

From source file:me.carpela.network.pt.cracker.tools.ttorrent.Torrent.java

private static String hashFiles(List<File> files, int pieceLenght)
        throws InterruptedException, IOException, NoSuchAlgorithmException {
    int threads = getHashingThreadsCount();
    ExecutorService executor = Executors.newFixedThreadPool(threads);
    ByteBuffer buffer = ByteBuffer.allocate(pieceLenght);
    List<Future<String>> results = new LinkedList<Future<String>>();
    StringBuilder hashes = new StringBuilder();

    long length = 0L;
    int pieces = 0;

    long start = System.nanoTime();
    for (File file : files) {

        length += file.length();/*from  w  w  w .ja  v  a2 s  .c o  m*/

        FileInputStream fis = new FileInputStream(file);
        FileChannel channel = fis.getChannel();
        int step = 10;

        try {
            while (channel.read(buffer) > 0) {
                if (buffer.remaining() == 0) {
                    buffer.clear();
                    results.add(executor.submit(new CallableChunkHasher(buffer)));
                }

                if (results.size() >= threads) {
                    pieces += accumulateHashes(hashes, results);
                }

                if (channel.position() / (double) channel.size() * 100f > step) {
                    step += 10;
                }
            }
        } finally {
            channel.close();
            fis.close();
        }
    }

    // Hash the last bit, if any
    if (buffer.position() > 0) {
        buffer.limit(buffer.position());
        buffer.position(0);
        results.add(executor.submit(new CallableChunkHasher(buffer)));
    }

    pieces += accumulateHashes(hashes, results);

    // Request orderly executor shutdown and wait for hashing tasks to
    // complete.
    executor.shutdown();
    while (!executor.isTerminated()) {
        Thread.sleep(10);
    }
    long elapsed = System.nanoTime() - start;

    int expectedPieces = (int) (Math.ceil((double) length / pieceLenght));
    return hashes.toString();
}

From source file:org.apache.hadoop.hbase.filter.TestFuzzyRowAndColumnRangeFilter.java

private void runTest(HTable hTable, int cqStart, int expectedSize) throws IOException {
    // [0, 2, ?, ?, ?, ?, 0, 0, 0, 1]
    byte[] fuzzyKey = new byte[10];
    ByteBuffer buf = ByteBuffer.wrap(fuzzyKey);
    buf.clear();
    buf.putShort((short) 2);
    for (int i = 0; i < 4; i++)
        buf.put((byte) 63);
    buf.putInt((short) 1);

    byte[] mask = new byte[] { 0, 0, 1, 1, 1, 1, 0, 0, 0, 0 };

    Pair<byte[], byte[]> pair = new Pair<byte[], byte[]>(fuzzyKey, mask);
    FuzzyRowFilter fuzzyRowFilter = new FuzzyRowFilter(Lists.newArrayList(pair));
    ColumnRangeFilter columnRangeFilter = new ColumnRangeFilter(Bytes.toBytes(cqStart), true, Bytes.toBytes(4),
            true);//ww w  .j  ava  2  s .  c  o  m
    //regular test
    runScanner(hTable, expectedSize, fuzzyRowFilter, columnRangeFilter);
    //reverse filter order test
    runScanner(hTable, expectedSize, columnRangeFilter, fuzzyRowFilter);
}

From source file:com.turn.ttorrent.client.TorrentByteStorage.java

public ByteBuffer read(int offset, int length) throws IOException {
    ByteBuffer data = ByteBuffer.allocate(length);
    int bytes = this.channel.read(data, offset);
    data.clear();
    data.limit(bytes >= 0 ? bytes : 0);
    return data;/*from w w  w . ja v a2s  .  com*/
}

From source file:eu.scape_project.arc2warc.PayloadContent.java

public InputStream getPayloadContentAsInputStream() throws IOException {
    if (length >= buffer.length) {
        File tempDir = org.apache.commons.io.FileUtils.getTempDirectory();
        final File tmp = File.createTempFile(RandomStringUtils.randomAlphabetic(10), "tmp", tempDir);
        tmp.deleteOnExit();/*from   www  .j av a  2s.  co  m*/
        FileOutputStream outputStream = null;
        try {
            outputStream = new FileOutputStream(tmp);
            copyAndCheck(outputStream);
        } finally {
            IOUtils.closeQuietly(outputStream);
        }
        return new FileInputStream(tmp);
    } else {
        final ByteBuffer wrap = ByteBuffer.wrap(buffer);
        wrap.clear();
        OutputStream outStream = StreamUtils.newOutputStream(wrap);
        copyAndCheck(outStream);
        wrap.flip();
        return StreamUtils.newInputStream(wrap);
    }
}

From source file:org.apache.hadoop.hbase.filter.TestFuzzyRowAndColumnRangeFilter.java

@Test
public void Test() throws Exception {
    String cf = "f";
    String table = "TestFuzzyAndColumnRangeFilterClient";
    HTable ht = TEST_UTIL.createTable(Bytes.toBytes(table), Bytes.toBytes(cf), Integer.MAX_VALUE);

    // 10 byte row key - (2 bytes 4 bytes 4 bytes)
    // 4 byte qualifier
    // 4 byte value

    for (int i1 = 0; i1 < 2; i1++) {
        for (int i2 = 0; i2 < 5; i2++) {
            byte[] rk = new byte[10];

            ByteBuffer buf = ByteBuffer.wrap(rk);
            buf.clear();
            buf.putShort((short) 2);
            buf.putInt(i1);//w  w  w .  ja v  a2 s.  c o  m
            buf.putInt(i2);

            for (int c = 0; c < 5; c++) {
                byte[] cq = new byte[4];
                Bytes.putBytes(cq, 0, Bytes.toBytes(c), 0, 4);

                Put p = new Put(rk);
                p.setDurability(Durability.SKIP_WAL);
                p.add(cf.getBytes(), cq, Bytes.toBytes(c));
                ht.put(p);
                LOG.info("Inserting: rk: " + Bytes.toStringBinary(rk) + " cq: " + Bytes.toStringBinary(cq));
            }
        }
    }

    TEST_UTIL.flush();

    // test passes
    runTest(ht, 0, 10);

    // test fails
    runTest(ht, 1, 8);
}

From source file:org.apache.nifi.processors.standard.util.BaseStrictSyslog5424ParserTest.java

@Test
public void testTrailingNewLine() {
    final String message = "<34>1 2003-10-11T22:14:15.003Z mymachine.example.com su - "
            + "ID47 - BOM'su root' failed for lonvick on /dev/pts/8\n";

    final byte[] bytes = message.getBytes(CHARSET);
    final ByteBuffer buffer = ByteBuffer.allocate(bytes.length);
    buffer.clear();
    buffer.put(bytes);//  w  w  w  .ja  v a  2s .c o m

    final Syslog5424Event event = parser.parseEvent(buffer);
    Assert.assertNotNull(event);
    Assert.assertTrue(event.isValid());
}

From source file:org.apache.nifi.processors.standard.util.BaseStrictSyslog5424ParserTest.java

@Test
public void testInvalidPriority() {
    final String message = "10 Oct 13 14:14:43 localhost some body of the message";

    final byte[] bytes = message.getBytes(CHARSET);
    final ByteBuffer buffer = ByteBuffer.allocate(bytes.length);
    buffer.clear();
    buffer.put(bytes);//from w ww.j  a  va2 s .  c o m

    final Syslog5424Event event = parser.parseEvent(buffer);
    Assert.assertNotNull(event);
    Assert.assertFalse(event.isValid());
    Assert.assertEquals(message, event.getFullMessage());
}