Example usage for java.nio ByteBuffer array

List of usage examples for java.nio ByteBuffer array

Introduction

In this page you can find the example usage for java.nio ByteBuffer array.

Prototype

public final byte[] array() 

Source Link

Document

Returns the byte array which this buffer is based on, if there is one.

Usage

From source file:com.l2jfree.gameserver.network.L2Client.java

@Override
public boolean encrypt(final ByteBuffer buf, final int size) {
    getCrypt().encrypt(buf.array(), buf.position(), size);
    buf.position(buf.position() + size);
    return true;/*from w  w  w  .  j  a v a2  s  . c o  m*/
}

From source file:com.linkedin.pinot.queries.PercentileTDigestQueriesTest.java

/**
 * Helper method to build a segment containing a single valued string column with RAW (no-dictionary) index.
 *
 * @return Array of string values for the rows in the generated index.
 * @throws Exception/*ww  w .ja  va  2  s  .  com*/
 */

private RecordReader buildIndex(Schema schema) throws Exception {
    SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema);

    config.setOutDir(SEGMENT_DIR_NAME);
    config.setSegmentName(SEGMENT_NAME);
    config.setTableName(TABLE_NAME);
    config.setRawIndexCreationColumns(Arrays.asList(TDIGEST_COLUMN));

    List<GenericRow> rows = new ArrayList<>(NUM_ROWS);
    _expectedTDigest = new TDigest(PercentileTDigestAggregationFunction.DEFAULT_TDIGEST_COMPRESSION);

    _expectedGroupBy = new HashMap<>();
    _expectedSelectionTDigests = new ArrayList<>(NUM_ROWS);

    for (int i = 0; i < NUM_ROWS; i++) {
        HashMap<String, Object> map = new HashMap<>();

        // Set the value for fixed-byte sorted column.
        double value = _random.nextDouble();
        TDigest tDigest = new TDigest(PercentileTDigestAggregationFunction.DEFAULT_TDIGEST_COMPRESSION);
        tDigest.add(value);
        _expectedTDigest.add(tDigest);

        ByteBuffer byteBuffer = ByteBuffer.allocate(tDigest.byteSize());
        tDigest.asBytes(byteBuffer);
        _expectedSelectionTDigests.add(ByteArray.toHexString(byteBuffer.array()));

        // Add the TDigest column.
        byte[] bytes = byteBuffer.array();
        map.put(TDIGEST_COLUMN, bytes);

        // Add the GroupBy column.
        String group = groups[_random.nextInt(groups.length)];
        map.put(GROUPBY_COLUMN, group);

        // Generate the groups.
        TDigest groupTDigest = _expectedGroupBy.get(group);
        if (groupTDigest == null) {
            _expectedGroupBy.put(group, tDigest);
        } else {
            groupTDigest.add(tDigest);
        }

        GenericRow genericRow = new GenericRow();
        genericRow.init(map);
        rows.add(genericRow);
    }

    RecordReader recordReader = new GenericRowRecordReader(rows, schema);
    SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
    driver.init(config, recordReader);
    driver.build();

    SegmentDirectory.createFromLocalFS(driver.getOutputDirectory(), ReadMode.mmap);
    recordReader.rewind();

    TDigest actual = new TDigest(PercentileTDigestAggregationFunction.DEFAULT_TDIGEST_COMPRESSION);
    GenericRow reuse = new GenericRow();
    while (recordReader.hasNext()) {
        reuse = recordReader.next(reuse);
        actual.add(TDigest.fromBytes(ByteBuffer.wrap((byte[]) reuse.getValue(TDIGEST_COLUMN))));
    }
    return recordReader;
}

From source file:edu.umn.cs.spatialHadoop.indexing.BTRPartitioner.java

@Override
public void write(DataOutput out) throws IOException {
    mbr.write(out);//from  w w w.j  a v a2 s . co m
    out.writeInt(columns);
    out.writeInt(rows);
    ByteBuffer bbuffer = ByteBuffer.allocate((xSplits.length + ySplits.length) * 8);
    for (double xSplit : xSplits)
        bbuffer.putDouble(xSplit);
    for (double ySplit : ySplits)
        bbuffer.putDouble(ySplit);
    if (bbuffer.hasRemaining())
        throw new RuntimeException("Did not calculate buffer size correctly");
    out.write(bbuffer.array(), bbuffer.arrayOffset(), bbuffer.position());
}

From source file:com.amazonaws.services.dynamodbv2.replication.impl.ShardSubscriberImpl.java

/**
 * Converts Kinesis record to DynamoDB Streams record.
 *
 * @param record//ww  w  .  j  a v a2 s . c om
 *            The Kinesis record
 * @return The DynamoDB Streams record
 * @throws IOException
 *             Exception in conversion
 */
protected com.amazonaws.services.dynamodbv2.model.Record convertToStreamRecord(final Record record)
        throws IOException {
    final ByteBuffer data = record.getData();
    LOGGER.debug("deserializing record data: " + (new String(data.array())));
    return MAPPER.readValue(data.array(), com.amazonaws.services.dynamodbv2.model.Record.class);
}

From source file:com.freshplanet.ane.GoogleCloudStorageUpload.tasks.UploadToGoogleCloudStorageAsyncTask.java

private byte[] createHeaderByteArrayImage(byte[] prefix, byte[] suffix, byte[] image) {
    Log.d(TAG, "[UploadToGoogleCloudStorageAsyncTask] Entering createHeaderByteArrayImage()");

    int size = image.length + prefix.length + suffix.length;
    ByteBuffer bytes = ByteBuffer.allocate(size);
    bytes.put(prefix);/*from  w  w  w  . j a v a  2  s  .c o  m*/
    bytes.put(image);
    bytes.put(suffix);

    Log.d(TAG, "[UploadToGoogleCloudStorageAsyncTask] Exiting createHeaderByteArrayImage()");
    return bytes.array();
}

From source file:org.jmangos.sniffer.handler.PKTLogHandler.java

/**
 * (non-Javadoc)//  w  ww  . java 2 s  . co m
 * 
 * @see org.jmangos.sniffer.handler.PacketLogHandler#onDecodePacket(org.jmangos
 *      .sniffer.network.model.NetworkChannel,
 *      org.jmangos.sniffer.enums.Direction, java.lang.Integer,
 *      java.lang.Integer, byte[], int)
 */
@Override
public void onDecodePacket(final NetworkChannel channel, final Direction direction, final Integer size,
        final Integer opcode, final byte[] data, final int frame) {
    if (!isInit()) {
        init();
    }
    try {
        final ByteBuffer buffer = ByteBuffer.allocate(4 + 4 + 4 + 4 + 4 + data.length + 4);
        buffer.order(ByteOrder.LITTLE_ENDIAN);
        buffer.put(direction.getValue());
        buffer.putInt(channel.hashCode());
        buffer.putInt(frame);
        buffer.putInt(0);
        buffer.putInt(data.length + 4);
        buffer.putInt(opcode);
        buffer.put(data);
        this.fous.write(buffer.array());
    } catch (final IOException e) {
        e.printStackTrace();
    }
}

From source file:fr.amap.viewer3d.loading.shader.Shader.java

private void extractActiveUniforms() {

    IntBuffer buf = IntBuffer.allocate(1);
    gl.glGetProgramiv(programId, GL3.GL_ACTIVE_UNIFORMS, buf);

    IntBuffer size = IntBuffer.allocate(40);
    IntBuffer length = IntBuffer.allocate(40);
    ByteBuffer nm = ByteBuffer.allocate(256);
    IntBuffer type = IntBuffer.allocate(1);

    for (int i = 0; i < buf.get(0); i++) {
        gl.glGetActiveUniform(programId, i, 40, length, size, type, nm);
        String uniformName = new String(ArrayUtils.subarray(nm.array(), 0, length.get(0)));

        uniformMap.put(uniformName, gl.glGetUniformLocation(programId, uniformName));
    }//from www  .  ja v a 2s.  co m
}

From source file:fr.amap.viewer3d.loading.shader.Shader.java

private void extractActiveAttributes() {

    IntBuffer buf = IntBuffer.allocate(1);
    gl.glGetProgramiv(programId, GL3.GL_ACTIVE_ATTRIBUTES, buf);

    IntBuffer size = IntBuffer.allocate(40);
    IntBuffer length = IntBuffer.allocate(40);
    ByteBuffer nm = ByteBuffer.allocate(256);
    IntBuffer type = IntBuffer.allocate(1);

    for (int i = 0; i < buf.get(0); i++) {
        gl.glGetActiveAttrib(programId, i, 40, length, size, type, nm);
        String attributeName = new String(ArrayUtils.subarray(nm.array(), 0, length.get(0)));

        attributeMap.put(attributeName, gl.glGetAttribLocation(programId, attributeName));
    }//from   w w  w  .  ja v  a  2  s .com
}

From source file:io.klerch.alexa.state.handler.AWSIotStateHandler.java

private String getState(final AlexaScope scope) throws AlexaStateException {
    final String thingName = getThingName(scope);

    createThingIfNotExisting(scope);/*from  w  ww . j  ava 2 s.c  o  m*/

    final GetThingShadowRequest awsRequest = new GetThingShadowRequest().withThingName(thingName);
    try {
        final GetThingShadowResult response = awsDataClient.getThingShadow(awsRequest);
        final ByteBuffer buffer = response.getPayload();

        try {
            return (buffer != null && buffer.hasArray()) ? new String(buffer.array(), "UTF-8") : "{}";
        } catch (UnsupportedEncodingException e) {
            final String error = format("Could not handle received contents of thing-shadow '%1$s'", thingName);
            log.error(error, e);
            throw AlexaStateException.create(error).withCause(e).withHandler(this).build();
        }
    }
    // if a thing does not have a shadow this is a usual exception
    catch (com.amazonaws.services.iotdata.model.ResourceNotFoundException e) {
        log.info(e);
        // we are fine with a thing having no shadow what just means there's nothing to read out for the model
        // return an empty JSON to indicate nothing is in the thing shadow
        return "{}";
    }
}

From source file:com.github.cambierr.lorawanpacket.semtech.Rxpk.java

public JSONObject toJson() throws MalformedPacketException {
    JSONObject output = new JSONObject();

    output.put("time", time);
    output.put("tmst", tmst);
    output.put("freq", freq);
    output.put("chan", chan);
    output.put("rfch", rfch);
    output.put("stat", stat);
    output.put("modu", modu.name());

    if (modu.equals(Modulation.LORA)) {
        output.put("codr", codr);
        output.put("lsnr", lsnr);
    }//from   w w w  . j  av  a 2s  .  co  m

    output.put("datr", datr);
    output.put("rssi", rssi);
    output.put("size", size);

    ByteBuffer bb = ByteBuffer.allocate(384);
    data.toRaw(bb);
    output.put("data", Base64.getEncoder()
            .encodeToString(Arrays.copyOfRange(bb.array(), 0, bb.capacity() - bb.remaining())));

    return output;
}