Example usage for java.nio ByteBuffer rewind

List of usage examples for java.nio ByteBuffer rewind

Introduction

In this page you can find the example usage for java.nio ByteBuffer rewind.

Prototype

public final Buffer rewind() 

Source Link

Document

Rewinds this buffer.

Usage

From source file:org.apache.hadoop.hbase.client.coprocessor.AggregationClient.java

/**
 * It computes a global standard deviation for a given column and its value.
 * Standard deviation is square root of (average of squares -
 * average*average). From individual regions, it obtains sum, square sum and
 * number of rows. With these, the above values are computed to get the global
 * std./*w  w w  .java 2 s  .  c o  m*/
 * @param table
 * @param scan
 * @return standard deviations
 * @throws Throwable
 */
private <R, S, P extends Message, Q extends Message, T extends Message> Pair<List<S>, Long> getStdArgs(
        final HTable table, final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan) throws Throwable {
    final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, false);
    class StdCallback implements Batch.Callback<Pair<List<S>, Long>> {
        long rowCountVal = 0l;
        S sumVal = null, sumSqVal = null;

        public synchronized Pair<List<S>, Long> getStdParams() {
            List<S> l = new ArrayList<S>();
            l.add(sumVal);
            l.add(sumSqVal);
            Pair<List<S>, Long> p = new Pair<List<S>, Long>(l, rowCountVal);
            return p;
        }

        @Override
        public synchronized void update(byte[] region, byte[] row, Pair<List<S>, Long> result) {
            if (result.getFirst().size() > 0) {
                sumVal = ci.add(sumVal, result.getFirst().get(0));
                sumSqVal = ci.add(sumSqVal, result.getFirst().get(1));
                rowCountVal += result.getSecond();
            }
        }
    }
    StdCallback stdCallback = new StdCallback();
    table.coprocessorService(AggregateService.class, scan.getStartRow(), scan.getStopRow(),
            new Batch.Call<AggregateService, Pair<List<S>, Long>>() {
                @Override
                public Pair<List<S>, Long> call(AggregateService instance) throws IOException {
                    ServerRpcController controller = new ServerRpcController();
                    BlockingRpcCallback<AggregateResponse> rpcCallback = new BlockingRpcCallback<AggregateResponse>();
                    instance.getStd(controller, requestArg, rpcCallback);
                    AggregateResponse response = rpcCallback.get();
                    if (controller.failedOnException()) {
                        throw controller.getFailedOn();
                    }
                    Pair<List<S>, Long> pair = new Pair<List<S>, Long>(new ArrayList<S>(), 0L);
                    if (response.getFirstPartCount() == 0) {
                        return pair;
                    }
                    List<S> list = new ArrayList<S>();
                    for (int i = 0; i < response.getFirstPartCount(); i++) {
                        ByteString b = response.getFirstPart(i);
                        T t = ProtobufUtil.getParsedGenericInstance(ci.getClass(), 4, b);
                        S s = ci.getPromotedValueFromProto(t);
                        list.add(s);
                    }
                    pair.setFirst(list);
                    ByteBuffer bb = ByteBuffer.allocate(8).put(getBytesFromResponse(response.getSecondPart()));
                    bb.rewind();
                    pair.setSecond(bb.getLong());
                    return pair;
                }
            }, stdCallback);
    return stdCallback.getStdParams();
}

From source file:org.apache.hadoop.hbase.client.coprocessor.AggregationClient.java

/**
 * It computes a global standard deviation for a given column and its value.
 * Standard deviation is square root of (average of squares -
 * average*average). From individual regions, it obtains sum, square sum and
 * number of rows. With these, the above values are computed to get the global
 * std.//ww  w.  ja  v a  2s.c om
 * @param table
 * @param scan
 * @return standard deviations
 * @throws Throwable
 */
private <R, S, P extends Message, Q extends Message, T extends Message> Pair<List<S>, Long> getStdArgs(
        final HTable table, final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan) throws Throwable {
    final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, false);
    class StdCallback implements Batch.Callback<Pair<List<S>, Long>> {
        long rowCountVal = 0l;
        S sumVal = null, sumSqVal = null;

        public Pair<List<S>, Long> getStdParams() {
            List<S> l = new ArrayList<S>();
            l.add(sumVal);
            l.add(sumSqVal);
            Pair<List<S>, Long> p = new Pair<List<S>, Long>(l, rowCountVal);
            return p;
        }

        @Override
        public synchronized void update(byte[] region, byte[] row, Pair<List<S>, Long> result) {
            if (result.getFirst().size() > 0) {
                sumVal = ci.add(sumVal, result.getFirst().get(0));
                sumSqVal = ci.add(sumSqVal, result.getFirst().get(1));
                rowCountVal += result.getSecond();
            }
        }
    }
    StdCallback stdCallback = new StdCallback();
    table.coprocessorService(AggregateService.class, scan.getStartRow(), scan.getStopRow(),
            new Batch.Call<AggregateService, Pair<List<S>, Long>>() {
                @Override
                public Pair<List<S>, Long> call(AggregateService instance) throws IOException {
                    ServerRpcController controller = new ServerRpcController();
                    BlockingRpcCallback<AggregateResponse> rpcCallback = new BlockingRpcCallback<AggregateResponse>();
                    instance.getStd(controller, requestArg, rpcCallback);
                    AggregateResponse response = rpcCallback.get();
                    if (controller.failedOnException()) {
                        throw controller.getFailedOn();
                    }
                    Pair<List<S>, Long> pair = new Pair<List<S>, Long>(new ArrayList<S>(), 0L);
                    if (response.getFirstPartCount() == 0) {
                        return pair;
                    }
                    List<S> list = new ArrayList<S>();
                    for (int i = 0; i < response.getFirstPartCount(); i++) {
                        ByteString b = response.getFirstPart(i);
                        T t = ProtobufUtil.getParsedGenericInstance(ci.getClass(), 4, b);
                        S s = ci.getPromotedValueFromProto(t);
                        list.add(s);
                    }
                    pair.setFirst(list);
                    ByteBuffer bb = ByteBuffer.allocate(8).put(getBytesFromResponse(response.getSecondPart()));
                    bb.rewind();
                    pair.setSecond(bb.getLong());
                    return pair;
                }
            }, stdCallback);
    return stdCallback.getStdParams();
}

From source file:org.apache.hadoop.yarn.client.api.impl.YarnClientImpl.java

private void addTimelineDelegationToken(ContainerLaunchContext clc) throws YarnException, IOException {
    Credentials credentials = new Credentials();
    DataInputByteBuffer dibb = new DataInputByteBuffer();
    ByteBuffer tokens = clc.getTokens();
    if (tokens != null) {
        dibb.reset(tokens);//  w  w  w.  j  av  a 2 s . com
        credentials.readTokenStorageStream(dibb);
        tokens.rewind();
    }
    // If the timeline delegation token is already in the CLC, no need to add
    // one more
    for (org.apache.hadoop.security.token.Token<? extends TokenIdentifier> token : credentials.getAllTokens()) {
        if (token.getKind().equals(TimelineDelegationTokenIdentifier.KIND_NAME)) {
            return;
        }
    }
    org.apache.hadoop.security.token.Token<TimelineDelegationTokenIdentifier> timelineDelegationToken = getTimelineDelegationToken();
    if (timelineDelegationToken == null) {
        return;
    }
    credentials.addToken(timelineService, timelineDelegationToken);
    if (LOG.isDebugEnabled()) {
        LOG.debug("Add timline delegation token into credentials: " + timelineDelegationToken);
    }
    DataOutputBuffer dob = new DataOutputBuffer();
    credentials.writeTokenStorageToStream(dob);
    tokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
    clc.setTokens(tokens);
}

From source file:org.usergrid.persistence.Schema.java

public static ByteBuffer encrypt(ByteBuffer clear) {
    if (clear == null || !clear.hasRemaining())
        return clear;
    try {/*from   w w w. j ava 2  s .co  m*/
        SecretKeySpec sKeySpec = new SecretKeySpec(getRawKey(encryptionSeed), "AES");
        Cipher cipher = Cipher.getInstance("AES");
        cipher.init(Cipher.ENCRYPT_MODE, sKeySpec);
        ByteBuffer encrypted = ByteBuffer.allocate(cipher.getOutputSize(clear.remaining()));
        cipher.doFinal(clear, encrypted);
        encrypted.rewind();
        return encrypted;
    } catch (Exception e) {
        throw new IllegalStateException(e);
    }
}

From source file:org.apache.hadoop.crypto.CryptoStreamsTestBase.java

@Test(timeout = 120000)
public void testCombinedOp() throws Exception {
    OutputStream out = getOutputStream(defaultBufferSize);
    writeData(out);//from  www  .j  ava 2  s.c o m

    final int len1 = dataLen / 8;
    final int len2 = dataLen / 10;

    InputStream in = getInputStream(defaultBufferSize);
    // Read len1 data.
    byte[] readData = new byte[len1];
    readAll(in, readData, 0, len1);
    byte[] expectedData = new byte[len1];
    System.arraycopy(data, 0, expectedData, 0, len1);
    Assert.assertArrayEquals(readData, expectedData);

    long pos = ((Seekable) in).getPos();
    Assert.assertEquals(len1, pos);

    // Seek forward len2
    ((Seekable) in).seek(pos + len2);
    // Skip forward len2
    long n = in.skip(len2);
    Assert.assertEquals(len2, n);

    // Pos: 1/4 dataLen
    positionedReadCheck(in, dataLen / 4);

    // Pos should be len1 + len2 + len2
    pos = ((Seekable) in).getPos();
    Assert.assertEquals(len1 + len2 + len2, pos);

    // Read forward len1
    ByteBuffer buf = ByteBuffer.allocate(len1);
    int nRead = ((ByteBufferReadable) in).read(buf);
    Assert.assertEquals(nRead, buf.position());
    readData = new byte[nRead];
    buf.rewind();
    buf.get(readData);
    expectedData = new byte[nRead];
    System.arraycopy(data, (int) pos, expectedData, 0, nRead);
    Assert.assertArrayEquals(readData, expectedData);

    long lastPos = pos;
    // Pos should be lastPos + nRead
    pos = ((Seekable) in).getPos();
    Assert.assertEquals(lastPos + nRead, pos);

    // Pos: 1/3 dataLen
    positionedReadCheck(in, dataLen / 3);

    // Read forward len1
    readData = new byte[len1];
    readAll(in, readData, 0, len1);
    expectedData = new byte[len1];
    System.arraycopy(data, (int) pos, expectedData, 0, len1);
    Assert.assertArrayEquals(readData, expectedData);

    lastPos = pos;
    // Pos should be lastPos + len1
    pos = ((Seekable) in).getPos();
    Assert.assertEquals(lastPos + len1, pos);

    // Read forward len1
    buf = ByteBuffer.allocate(len1);
    nRead = ((ByteBufferReadable) in).read(buf);
    Assert.assertEquals(nRead, buf.position());
    readData = new byte[nRead];
    buf.rewind();
    buf.get(readData);
    expectedData = new byte[nRead];
    System.arraycopy(data, (int) pos, expectedData, 0, nRead);
    Assert.assertArrayEquals(readData, expectedData);

    lastPos = pos;
    // Pos should be lastPos + nRead
    pos = ((Seekable) in).getPos();
    Assert.assertEquals(lastPos + nRead, pos);

    // ByteBuffer read after EOF
    ((Seekable) in).seek(dataLen);
    buf.clear();
    n = ((ByteBufferReadable) in).read(buf);
    Assert.assertEquals(n, -1);

    in.close();
}

From source file:com.emc.ecs.smart.SmartUploader.java

private String computeFileMD5() throws IOException {
    l4j.debug("Computing File MD5 with NIO");
    fileChannel.position(0);/*w  ww.  j ava  2  s .  c  om*/
    MessageDigest md5;
    try {
        md5 = MessageDigest.getInstance("MD5");
    } catch (NoSuchAlgorithmException e) {
        // Should never happen
        throw new RuntimeException("Could not load MD5", e);
    }

    long start = System.currentTimeMillis();
    // Force a 2MB buffer for better performance.
    ByteBuffer buf = ByteBuffer.allocateDirect(SMALL_SEGMENT);
    int c;
    long position = 0;
    buf.clear();
    while ((c = fileChannel.read(buf)) != -1) {
        buf.rewind();
        buf.limit(c);
        md5.update(buf);
        buf.clear();
        position += c;
        System.out.printf("\rLocal MD5 computation: %d / %d (%d %%)", position, fileSize,
                position * 100L / fileSize);
    }
    long duration = System.currentTimeMillis() - start;
    printRate(duration);

    return MD5Utils.toHexString(md5.digest());
}

From source file:org.usergrid.persistence.Schema.java

public static ByteBuffer decrypt(ByteBuffer encrypted) {
    if (encrypted == null || !encrypted.hasRemaining())
        return encrypted;
    try {/*from ww  w  . ja  v a2 s.  co  m*/
        SecretKeySpec sKeySpec = new SecretKeySpec(getRawKey(encryptionSeed), "AES");
        Cipher cipher = Cipher.getInstance("AES");
        cipher.init(Cipher.DECRYPT_MODE, sKeySpec);
        ByteBuffer decrypted = ByteBuffer.allocate(cipher.getOutputSize(encrypted.remaining()));
        cipher.doFinal(encrypted, decrypted);
        decrypted.rewind();
        return decrypted;
    } catch (Exception e) {
        throw new IllegalStateException(e);
    }
}

From source file:voldemort.store.cachestore.impl.ChannelStore.java

private void init(boolean reset) throws IOException {
    if (reset) {/*from w w  w  .  j a v a 2s .com*/
        indexChannel.truncate(OFFSET);
        dataChannel.truncate(OFFSET);
        keyChannel.truncate(OFFSET);
        totalRecord = 0;
    } else {
        long length = indexChannel.size() - OFFSET;
        totalRecord = (int) (length / RECORD_SIZE);
        ByteBuffer buf = ByteBuffer.allocate(RECORD_SIZE);
        logger.info("Building key map and read index file for " + filename + " total record " + totalRecord);
        long per = 0;
        int j = 0;
        if (totalRecord >= 1000000)
            per = totalRecord / 10;

        for (int i = 0; i < totalRecord; i++) {
            indexChannel.read(buf);
            assert (buf.capacity() == RECORD_SIZE);
            buf.rewind();
            byte status = buf.get();
            if (isDeleted(status))
                this.deleted++;
            else {
                long key = buf.getLong();
                byte[] keys;
                try {
                    keys = readChannel(key, keyChannel);
                    long data = buf.getLong();
                    long block2version = buf.getLong();
                    CacheBlock block = new CacheBlock(i, data, block2version, status);
                    map.put(toKey(keys), block);
                } catch (Exception ex) {
                    logger.warn("Not able to read record no " + i + " , skip reason " + ex.getMessage());
                    buf.clear();
                    error++;
                    continue;
                }
            }
            buf.clear();
            if (per > 0 && (i + 1) % per == 0) {
                logger.info((++j * 10) + "% complete");
            }
        }
    }
    dataOffset = dataChannel.size();
    keyOffset = keyChannel.size();
    //logOffset = logChannel.size();
    logger.info("Total record " + totalRecord + " deleted " + deleted + " error " + error + " active "
            + (totalRecord - deleted - error));
}

From source file:com.github.ambry.utils.UtilsTest.java

@Test
public void testReadStrings() throws IOException {
    // good case/*from  w w w.  j  a  v  a 2  s.  c  om*/
    ByteBuffer buffer = ByteBuffer.allocate(10);
    buffer.putShort((short) 8);
    String s = getRandomString(8);
    buffer.put(s.getBytes());
    buffer.flip();
    String outputString = Utils.readShortString(new DataInputStream(new ByteBufferInputStream(buffer)));
    Assert.assertEquals(s, outputString);
    // 0-length
    buffer.rewind();
    buffer.putShort(0, (short) 0);
    outputString = Utils.readShortString(new DataInputStream(new ByteBufferInputStream(buffer)));
    Assert.assertTrue(outputString.isEmpty());

    // failed case
    buffer.rewind();
    buffer.putShort((short) 10);
    buffer.put(s.getBytes());
    buffer.flip();
    try {
        outputString = Utils.readShortString(new DataInputStream(new ByteBufferInputStream(buffer)));
        Assert.assertTrue(false);
    } catch (IllegalArgumentException e) {
        Assert.assertTrue(true);
    }
    buffer.rewind();
    buffer.putShort(0, (short) -1);
    try {
        outputString = Utils.readShortString(new DataInputStream(new ByteBufferInputStream(buffer)));
        Assert.fail("Should have encountered exception with negative length.");
    } catch (IllegalArgumentException e) {
    }

    // good case
    buffer = ByteBuffer.allocate(40004);
    buffer.putInt(40000);
    s = getRandomString(40000);
    buffer.put(s.getBytes());
    buffer.flip();
    outputString = Utils.readIntString(new DataInputStream(new ByteBufferInputStream(buffer)),
            StandardCharsets.UTF_8);
    Assert.assertEquals(s, outputString);
    // 0-length
    buffer.rewind();
    buffer.putInt(0, 0);
    outputString = Utils.readShortString(new DataInputStream(new ByteBufferInputStream(buffer)));
    Assert.assertTrue(outputString.isEmpty());

    // failed case
    buffer.rewind();
    buffer.putInt(50000);
    buffer.put(s.getBytes());
    buffer.flip();
    try {
        outputString = Utils.readIntString(new DataInputStream(new ByteBufferInputStream(buffer)),
                StandardCharsets.UTF_8);
        fail("Should have failed");
    } catch (IllegalArgumentException e) {
        // expected.
    }

    buffer.rewind();
    buffer.putInt(0, -1);
    try {
        Utils.readIntString(new DataInputStream(new ByteBufferInputStream(buffer)), StandardCharsets.UTF_8);
        Assert.fail("Should have encountered exception with negative length.");
    } catch (IllegalArgumentException e) {
        // expected.
    }
}

From source file:org.voltdb.HsqlBackend.java

public VoltTable runDML(String dml) {
    dml = dml.trim();//from  w  ww .j  a va  2 s  . c  om
    String indicator = dml.substring(0, 1).toLowerCase();
    if (indicator.equals("s") || // "s" is for "select ..."
            indicator.equals("(")) { // "(" is for "(select ... UNION ...)" et. al.
        try {
            Statement stmt = dbconn.createStatement();
            sqlLog.l7dlog(Level.DEBUG, LogKeys.sql_Backend_ExecutingDML.name(), new Object[] { dml }, null);
            sqlLog.debug("Executing " + dml);
            ResultSet rs = stmt.executeQuery(dml);
            ResultSetMetaData rsmd = rs.getMetaData();

            // note the index values here carefully
            VoltTable.ColumnInfo[] columns = new VoltTable.ColumnInfo[rsmd.getColumnCount()];
            for (int i = 1; i <= rsmd.getColumnCount(); i++) {
                String colname = rsmd.getColumnLabel(i);
                String type = rsmd.getColumnTypeName(i);
                //LOG.fine("Column type: " + type);
                if (type.equals("VARCHAR"))
                    columns[i - 1] = new VoltTable.ColumnInfo(colname, VoltType.STRING);
                else if (type.equals("TINYINT"))
                    columns[i - 1] = new VoltTable.ColumnInfo(colname, VoltType.TINYINT);
                else if (type.equals("SMALLINT"))
                    columns[i - 1] = new VoltTable.ColumnInfo(colname, VoltType.SMALLINT);
                else if (type.equals("INTEGER"))
                    columns[i - 1] = new VoltTable.ColumnInfo(colname, VoltType.INTEGER);
                else if (type.equals("BIGINT"))
                    columns[i - 1] = new VoltTable.ColumnInfo(colname, VoltType.BIGINT);
                else if (type.equals("DECIMAL"))
                    columns[i - 1] = new VoltTable.ColumnInfo(colname, VoltType.DECIMAL);
                else if (type.equals("FLOAT"))
                    columns[i - 1] = new VoltTable.ColumnInfo(colname, VoltType.FLOAT);
                else if (type.equals("TIMESTAMP"))
                    columns[i - 1] = new VoltTable.ColumnInfo(colname, VoltType.TIMESTAMP);
                else if (type.equals("VARBINARY"))
                    columns[i - 1] = new VoltTable.ColumnInfo(colname, VoltType.VARBINARY);
                else if (type.equals("CHARACTER"))
                    columns[i - 1] = new VoltTable.ColumnInfo(colname, VoltType.STRING);
                else
                    throw new ExpectedProcedureException(
                            "Trying to create a column in Backend with a (currently) unsupported type: "
                                    + type);
            }
            VoltTable table = new VoltTable(columns);
            while (rs.next()) {
                Object[] row = new Object[table.getColumnCount()];
                for (int i = 0; i < table.getColumnCount(); i++) {
                    if (table.getColumnType(i) == VoltType.STRING)
                        row[i] = rs.getString(i + 1);
                    else if (table.getColumnType(i) == VoltType.TINYINT)
                        row[i] = rs.getByte(i + 1);
                    else if (table.getColumnType(i) == VoltType.SMALLINT)
                        row[i] = rs.getShort(i + 1);
                    else if (table.getColumnType(i) == VoltType.INTEGER)
                        row[i] = rs.getInt(i + 1);
                    else if (table.getColumnType(i) == VoltType.BIGINT)
                        row[i] = rs.getLong(i + 1);
                    else if (table.getColumnType(i) == VoltType.DECIMAL)
                        row[i] = rs.getBigDecimal(i + 1);
                    else if (table.getColumnType(i) == VoltType.FLOAT)
                        row[i] = rs.getDouble(i + 1);
                    else if (table.getColumnType(i) == VoltType.VARBINARY)
                        row[i] = rs.getBytes(i + 1);
                    else if (table.getColumnType(i) == VoltType.TIMESTAMP) {
                        Timestamp t = rs.getTimestamp(i + 1);
                        if (t == null) {
                            row[i] = null;
                        } else {
                            // convert from millisecond to microsecond granularity
                            row[i] = new org.voltdb.types.TimestampType(t.getTime() * 1000);
                        }
                    } else {
                        throw new ExpectedProcedureException(
                                "Trying to read a (currently) unsupported type from a JDBC resultset.");
                    }
                    if (rs.wasNull()) {
                        // JDBC returns 0/0.0 instead of null. Put null into the row.
                        row[i] = null;
                    }
                }

                table.addRow(row);
            }
            stmt.close();
            rs.close();
            return table;
        } catch (Exception e) {
            if (e instanceof ExpectedProcedureException) {
                throw (ExpectedProcedureException) e;
            }
            sqlLog.l7dlog(Level.TRACE, LogKeys.sql_Backend_DmlError.name(), e);
            throw new ExpectedProcedureException("HSQLDB Backend DML Error ", e);
        }
    } else {
        try {
            Statement stmt = dbconn.createStatement();
            sqlLog.debug("Executing: " + dml);
            long ucount = stmt.executeUpdate(dml);
            sqlLog.debug("  result: " + String.valueOf(ucount));
            VoltTable table = new VoltTable(new VoltTable.ColumnInfo("", VoltType.BIGINT));
            table.addRow(ucount);
            return table;
        } catch (SQLException e) {
            // glorious hack to determine if the error is a constraint failure
            if (e.getMessage().contains("constraint")) {
                sqlLog.l7dlog(Level.TRACE, LogKeys.sql_Backend_ConvertingHSQLExtoCFEx.name(), e);
                final byte messageBytes[] = e.getMessage().getBytes();
                ByteBuffer b = ByteBuffer.allocate(25 + messageBytes.length);
                b.putInt(messageBytes.length);
                b.put(messageBytes);
                b.put(e.getSQLState().getBytes());
                b.putInt(0); // ConstraintFailure.type
                try {
                    FastSerializer.writeString("HSQL", b);
                } catch (IOException e1) {
                    e1.printStackTrace();
                }
                b.putInt(0);//Table size is 0
                b.rewind();
                throw new ConstraintFailureException(b);
            } else {
                sqlLog.l7dlog(Level.TRACE, LogKeys.sql_Backend_DmlError.name(), e);
                throw new ExpectedProcedureException("HSQLDB Backend DML Error ", e);
            }

        } catch (Exception e) {
            // rethrow an expected exception
            sqlLog.l7dlog(Level.TRACE, LogKeys.sql_Backend_DmlError.name(), e);
            throw new ExpectedProcedureException("HSQLDB Backend DML Error ", e);
        }
    }
}