Example usage for java.nio ByteBuffer wrap

List of usage examples for java.nio ByteBuffer wrap

Introduction

In this page you can find the example usage for java.nio ByteBuffer wrap.

Prototype

public static ByteBuffer wrap(byte[] array) 

Source Link

Document

Creates a new byte buffer by wrapping the given byte array.

Usage

From source file:com.simpsonwil.strongquests.util.UUIDFetcher.java

public static UUID fromBytes(byte[] array) {
    if (array.length != 16)
        throw new IllegalArgumentException("Illegal byte array length: " + array.length);

    ByteBuffer byteBuffer = ByteBuffer.wrap(array);
    long mostSignificant = byteBuffer.getLong();
    long leastSignificant = byteBuffer.getLong();

    return new UUID(mostSignificant, leastSignificant);
}

From source file:ezbake.frack.submitter.SubmitterClient.java

private void run(CmdLineParser parser)
        throws TException, IOException, CmdLineException, EzConfigurationLoaderException {
    if (!(submit || shutdown || ping)) {
        throw new CmdLineException(parser, "Must provide either -u or -d option to client");
    }/*from   w w w .  j  av  a2  s  .  co  m*/
    Properties props = new EzConfiguration().getProperties();
    props.setProperty(EzBakePropertyConstants.EZBAKE_SECURITY_ID, securityId);
    ThriftClientPool pool = new ThriftClientPool(props);
    Submitter.Client client = pool.getClient(submitterConstants.SERVICE_NAME, Submitter.Client.class);
    try {
        if (submit) {
            if (Strings.isNullOrEmpty(pipelineId)) {
                throw new CmdLineException(parser, "Pipeline ID required for submission");
            }
            File zipFile = new File(pathToTarGz);
            byte[] fileBytes = FileUtils.readFileToByteArray(zipFile);
            SubmitResult result = client.submit(ByteBuffer.wrap(fileBytes), pipelineId);
            System.out.println(result.getMessage());
        } else if (shutdown) {
            if (Strings.isNullOrEmpty(pipelineId)) {
                throw new CmdLineException(parser, "Pipeline ID required for shutdown");
            }
            client.shutdown(pipelineId);
        } else {
            boolean healthy = client.ping();
            System.out.println(healthy ? "The service is healthy!" : "The service is unhealthy!");
        }
    } finally {
        if (client != null) {
            pool.returnToPool(client);
            pool.close();
        }
    }
}

From source file:com.cnaude.mutemanager.UUIDFetcher.java

public static UUID fromBytes(byte[] array) {
    if (array.length != 16) {
        throw new IllegalArgumentException("Illegal byte array length: " + array.length);
    }//from  w  w  w  . j  a  v a  2  s  .  com
    ByteBuffer byteBuffer = ByteBuffer.wrap(array);
    long mostSignificant = byteBuffer.getLong();
    long leastSignificant = byteBuffer.getLong();
    return new UUID(mostSignificant, leastSignificant);
}

From source file:org.mcplissken.repository.models.content.Content.java

public void writeAsFile(File result) throws IOException {

    Path path = Paths.get(result.getPath());

    byte[] bytes = Files.readAllBytes(path);

    data = ByteBuffer.wrap(bytes);

}

From source file:com.linkedin.pinot.core.startree.StarTreeDataTable.java

public void sort(int startRecordId, int endRecordId, final int startOffsetInRecord,
        final int endOffsetInRecord) {
    final MMapBuffer mappedByteBuffer;
    try {/*from   ww  w  . j  a va 2s.c o  m*/
        int length = endRecordId - startRecordId;
        final long startOffset = startRecordId * (long) totalSizeInBytes;
        mappedByteBuffer = new MMapBuffer(file, startOffset, length * (long) totalSizeInBytes,
                MMapMode.READ_WRITE);

        List<Integer> idList = new ArrayList<Integer>();
        for (int i = startRecordId; i < endRecordId; i++) {
            idList.add(i - startRecordId);
        }
        Comparator<Integer> comparator = new Comparator<Integer>() {
            byte[] buf1 = new byte[dimensionSizeInBytes];
            byte[] buf2 = new byte[dimensionSizeInBytes];

            @Override
            public int compare(Integer o1, Integer o2) {
                long pos1 = (o1) * (long) totalSizeInBytes;
                long pos2 = (o2) * (long) totalSizeInBytes;

                //System.out.println("pos1="+ pos1 +" , pos2="+ pos2);
                mappedByteBuffer.toDirectByteBuffer(pos1, dimensionSizeInBytes).get(buf1);
                mappedByteBuffer.toDirectByteBuffer(pos2, dimensionSizeInBytes).get(buf2);
                IntBuffer bb1 = ByteBuffer.wrap(buf1).asIntBuffer();
                IntBuffer bb2 = ByteBuffer.wrap(buf2).asIntBuffer();
                for (int dimIndex : sortOrder) {
                    int v1 = bb1.get(dimIndex);
                    int v2 = bb2.get(dimIndex);
                    if (v1 != v2) {
                        return v1 - v2;
                    }
                }
                return 0;
            }
        };
        Collections.sort(idList, comparator);
        //System.out.println("AFter sorting:" + idList);
        int[] currentPositions = new int[length];
        int[] indexToRecordIdMapping = new int[length];
        byte[] buf1 = new byte[totalSizeInBytes];
        byte[] buf2 = new byte[totalSizeInBytes];

        for (int i = 0; i < length; i++) {
            currentPositions[i] = i;
            indexToRecordIdMapping[i] = i;
        }
        for (int i = 0; i < length; i++) {
            int thisRecordId = indexToRecordIdMapping[i];
            int thisRecordIdPos = currentPositions[thisRecordId];

            int thatRecordId = idList.get(i);
            int thatRecordIdPos = currentPositions[thatRecordId];

            //swap the buffers
            mappedByteBuffer.toDirectByteBuffer(thisRecordIdPos * (long) totalSizeInBytes, totalSizeInBytes)
                    .get(buf1);
            mappedByteBuffer.toDirectByteBuffer(thatRecordIdPos * (long) totalSizeInBytes, totalSizeInBytes)
                    .get(buf2);
            //        mappedByteBuffer.position(thisRecordIdPos * totalSizeInBytes);
            //        mappedByteBuffer.get(buf1);
            //        mappedByteBuffer.position(thatRecordIdPos * totalSizeInBytes);
            //        mappedByteBuffer.get(buf2);
            mappedByteBuffer.readFrom(buf2, 0, thisRecordIdPos * (long) totalSizeInBytes, totalSizeInBytes);
            mappedByteBuffer.readFrom(buf1, 0, thatRecordIdPos * (long) totalSizeInBytes, totalSizeInBytes);
            //        mappedByteBuffer.position(thisRecordIdPos * totalSizeInBytes);
            //        mappedByteBuffer.put(buf2);
            //        mappedByteBuffer.position(thatRecordIdPos * totalSizeInBytes);
            //        mappedByteBuffer.put(buf1);
            //update the positions
            indexToRecordIdMapping[i] = thatRecordId;
            indexToRecordIdMapping[thatRecordIdPos] = thisRecordId;

            currentPositions[thatRecordId] = i;
            currentPositions[thisRecordId] = thatRecordIdPos;
        }
        if (mappedByteBuffer != null) {
            mappedByteBuffer.flush();
            mappedByteBuffer.close();
        }
    } catch (IOException e) {
        e.printStackTrace();
    } finally {
        //      IOUtils.closeQuietly(randomAccessFile);
    }
}

From source file:com.chiorichan.util.StringUtil.java

public static String bytesToStringUTFNIO(byte[] bytes) {
    if (bytes == null)
        return null;

    CharBuffer cBuffer = ByteBuffer.wrap(bytes).asCharBuffer();
    return cBuffer.toString();
}

From source file:com.sm.store.Utils.java

public static void addBlockInt(Value value, Value block) {
    ByteBuffer buf = ByteBuffer.wrap((byte[]) value.getData());
    int k = buf.getInt() + ByteBuffer.wrap((byte[]) block.getData()).getInt();
    byte[] data = putInt(k);
    value.setData(data);/*from   w w  w. j a  v a  2s . co  m*/
    value.setVersion(value.getVersion() + 1);
}

From source file:de.micromata.genome.logging.spi.ifiles.IndexHeader.java

public void writeFileHeader(OutputStream os, File indexFile, IndexDirectory indexDirectory) throws IOException {
    indexDirectoryIdx = indexDirectory.createNewLogIdxFile(indexFile);
    ByteBuffer lbb = ByteBuffer.wrap(new byte[Long.BYTES]);
    ByteBuffer ibb = ByteBuffer.wrap(new byte[Integer.BYTES]);
    os.write(INDEX_FILE_TYPE);/*ww w  .j a  v a  2s.  com*/
    os.write(INDEX_FILE_VERSION);
    lbb.putLong(0, System.currentTimeMillis());
    os.write(lbb.array());
    ibb.putInt(0, indexDirectoryIdx);
    os.write(ibb.array());
    ByteArrayOutputStream bout = new ByteArrayOutputStream();
    for (Pair<String, Integer> headerp : headerOrder) {
        String hn = StringUtils.rightPad(headerp.getFirst(), HEADER_NAME_LENGTH);
        bout.write(hn.getBytes());
        ibb.putInt(0, headerp.getSecond());
        bout.write(ibb.array());
    }
    byte[] headerar = bout.toByteArray();
    int idxOffset = FILE_TYPE_LENGTH + FILE_VERSION_LENGTH + Long.BYTES /* timestamp */
            + Integer.BYTES /** indexDirectory */
            + Integer.BYTES /* indexOfset */
            + headerar.length;
    ibb.putInt(0, idxOffset);
    os.write(ibb.array());
    os.write(headerar);
    os.flush();
}

From source file:com.thinkbiganalytics.util.JdbcCommon.java

/**
 * converts a JDBC result set to an Avro stream
 *
 * @param rs        The result set of the JDBC query
 * @param outStream The output stream to for the Avro formatted records
 * @return the number of rows converted to Avro format
 * @throws SQLException if errors occur while reading data from the database
 * @throws IOException  if unable to convert to Avro format
 *//*from  w  w  w .j a va 2  s .c  o  m*/
public static long convertToAvroStream(final ResultSet rs, final OutputStream outStream)
        throws SQLException, IOException {
    final Schema schema = createSchema(rs);
    final GenericRecord rec = new GenericData.Record(schema);

    final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema);
    try (final DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter)) {
        dataFileWriter.create(schema, outStream);

        final ResultSetMetaData meta = rs.getMetaData();
        final int nrOfColumns = meta.getColumnCount();
        long nrOfRows = 0;
        while (rs.next()) {
            for (int i = 1; i <= nrOfColumns; i++) {
                final int javaSqlType = meta.getColumnType(i);
                final Object value = rs.getObject(i);

                if (value == null) {
                    rec.put(i - 1, null);

                } else if (javaSqlType == BINARY || javaSqlType == VARBINARY || javaSqlType == LONGVARBINARY
                        || javaSqlType == ARRAY || javaSqlType == BLOB || javaSqlType == CLOB) {
                    // bytes requires little bit different handling
                    byte[] bytes = rs.getBytes(i);
                    ByteBuffer bb = ByteBuffer.wrap(bytes);
                    rec.put(i - 1, bb);

                } else if (value instanceof Byte) {
                    // tinyint(1) type is returned by JDBC driver as java.sql.Types.TINYINT
                    // But value is returned by JDBC as java.lang.Byte
                    // (at least H2 JDBC works this way)
                    // direct put to avro record results:
                    // org.apache.avro.AvroRuntimeException: Unknown datum type java.lang.Byte
                    rec.put(i - 1, ((Byte) value).intValue());

                } else if (value instanceof BigDecimal || value instanceof BigInteger) {
                    // Avro can't handle BigDecimal and BigInteger as numbers - it will throw an AvroRuntimeException such as: "Unknown datum type: java.math.BigDecimal: 38"
                    rec.put(i - 1, value.toString());

                } else if (value instanceof Number || value instanceof Boolean) {
                    rec.put(i - 1, value);

                } else {
                    // The different types that we support are numbers (int, long, double, float),
                    // as well as boolean values and Strings. Since Avro doesn't provide
                    // timestamp types, we want to convert those to Strings. So we will cast anything other
                    // than numbers or booleans to strings by using the toString() method.
                    rec.put(i - 1, value.toString());
                }
            }
            dataFileWriter.append(rec);
            nrOfRows += 1;
        }

        return nrOfRows;
    }
}

From source file:gridool.communication.transport.tcp.GridSocketPoolingClient.java

private static ByteBuffer toBuffer(final GridCommunicationMessage msg) {
    final long startTime = System.currentTimeMillis();
    final FastByteArrayOutputStream out = new FastByteArrayOutputStream();
    try {/*from  w w  w. j  av a 2 s . com*/
        IOUtils.writeInt(0, out);// allocate first 4 bytes for size
        ObjectUtils.toStreamVerbose(msg, out);
    } catch (IOException e) {
        LOG.error(e.getMessage(), e);
        throw new IllegalStateException(e);
    }
    final byte[] b = out.toByteArray();
    final int objsize = b.length - 4;
    Primitives.putInt(b, 0, objsize);

    if (LOG.isDebugEnabled()) {
        final long elapsedTime = System.currentTimeMillis() - startTime;
        LOG.debug("Elapsed time for serializing (including lazy evaluation) a GridCommunicationMessage ["
                + msg.getMessageId() + "] of " + b.length + " bytes: "
                + DateTimeFormatter.formatTime(elapsedTime));
    }

    final ByteBuffer buf = ByteBuffer.wrap(b);
    return buf;
}