Example usage for java.nio ByteBuffer wrap

List of usage examples for java.nio ByteBuffer wrap

Introduction

In this page you can find the example usage for java.nio ByteBuffer wrap.

Prototype

public static ByteBuffer wrap(byte[] array) 

Source Link

Document

Creates a new byte buffer by wrapping the given byte array.

Usage

From source file:com.willetinc.hadoop.mapreduce.dynamodb.BinarySplitterTest.java

@Test
public void testSplitInteger() {

    final int NUM_RANGE_SPLITS = 2;
    final String VALUE = "007";
    final Types hashKeyType = Types.NUMBER;
    final AttributeValue hashKeyValue = new AttributeValue().withN(VALUE);
    final Types rangeKeyType = Types.STRING;
    final AttributeValue minRangeKeyValue = new AttributeValue()
            .withB(ByteBuffer.wrap(new byte[] { 0x0, 0x0 }));
    final AttributeValue maxRangeKeyValue = new AttributeValue()
            .withB(ByteBuffer.wrap(new byte[] { 0x0, 0xF }));

    Configuration conf = createMock(Configuration.class);
    BinarySplitter splitter = new BinarySplitter();

    List<InputSplit> inputSplits = new ArrayList<InputSplit>();

    splitter.generateRangeKeySplits(conf, inputSplits, hashKeyType, hashKeyValue, rangeKeyType,
            minRangeKeyValue, maxRangeKeyValue, NUM_RANGE_SPLITS);

    assertEquals(2, inputSplits.size());

    for (InputSplit inputSplit : inputSplits) {
        DynamoDBQueryInputFormat.DynamoDBQueryInputSplit split = (DynamoDBQueryInputFormat.DynamoDBQueryInputSplit) inputSplit;
        Iterator<AttributeValue> itr = split.getRangeKeyValues().iterator();

        System.out.print(split.getRangeKeyOperator() + " ");
        System.out.print(Base64.encodeBase64String(itr.next().getB().array()) + " AND ");
        System.out.println(Base64.encodeBase64String(itr.next().getB().array()));
    }/*from  w  w  w  .  ja  va  2s.co  m*/

}

From source file:ch.ledcom.jpreseed.UsbCreatorTest.java

@Test
public void createUsb() throws IOException {
    Date startTime = new Date();
    ByteBuffer sysLinuxCfg = ByteBuffer.wrap("sysLinuxCfg".getBytes());
    Set<Path> preseeds = Collections.singleton(HELLO_WORLD_TXT);

    try (InputStream srcBootImg = Files.newInputStream(VFAT_IMG_GZ);
            GZIPOutputStream newBootImg = new GZIPOutputStream(Files.newOutputStream(NEW_IMAGE))) {
        new UsbCreator().create(srcBootImg, newBootImg, sysLinuxCfg, preseeds);
    }//from  w w  w .j a v a  2 s.  co  m

    assertThat(NEW_IMAGE).exists();

    FileSystem fileSystem = FileSystemFactory.create(RamDisk.readGzipped(NEW_IMAGE.toFile()), true);

    FsDirectoryEntry newInitRdGzEntry = fileSystem.getRoot().getEntry(INITRD_GZ);
    assertThat(newInitRdGzEntry).hasBeenModifiedAt(startTime, 2000);
    assertThat(fileSystem.getRoot().getEntry(SYSLINUX_CFG)).hasBeenModifiedAt(startTime, 2000);

    CpioArchiveInputStream initRdCpio = getCpioArchiveInputStream(newInitRdGzEntry);
    assertThat(initRdCpio).hasSingleEntry(HELLO_WORLD_TXT.getFileName().toString());
}

From source file:com.ning.http.client.providers.apache.ApacheResponseBodyPart.java

@Override
public ByteBuffer getBodyByteBuffer() {
    return ByteBuffer.wrap(chunk);
}

From source file:com.amazonaws.services.kinesis.model.transform.PutRecordsRequestMarshallerTest.java

@Test
public void test() throws Exception {
    PutRecordsRequest putRecordsRequest = new PutRecordsRequest();
    putRecordsRequest.setStreamName("stream name");
    List<PutRecordsRequestEntry> records = new ArrayList<PutRecordsRequestEntry>();
    for (int i = 0; i < 10; i++) {
        PutRecordsRequestEntry record = new PutRecordsRequestEntry();
        String randomStr = RandomStringUtils.random(8 * 1024);
        record.setData(ByteBuffer.wrap(randomStr.getBytes(StringUtils.UTF8)));
        record.setPartitionKey("partition key");
        records.add(record);/*w  w  w .  ja v  a 2  s  .  co  m*/
    }
    putRecordsRequest.setRecords(records);
    PutRecordsRequestMarshaller marshaller = new PutRecordsRequestMarshaller();
    Request<PutRecordsRequest> request = marshaller.marshall(putRecordsRequest);

    assertEquals("content encoding", "gzip", request.getHeaders().get("Content-Encoding"));
    byte[] content = IOUtils.toByteArray(request.getContent());
    assertEquals("content length", request.getHeaders().get("Content-Length"), String.valueOf(content.length));
    GZIPInputStream gis = new GZIPInputStream(new ByteArrayInputStream(content));
    String str = IOUtils.toString(gis);
    assertTrue("content is compressed", content.length < str.length());
    Map<String, String> map = JsonUtils.jsonToMap(str);
    assertEquals("StreamName", "stream name", map.get("StreamName"));
}

From source file:StreamUtility.java

/**
 * Converts little endian bytes to a int
 * @param value byte array to read/*from w w w.  j a v a  2 s  .c om*/
 * @return integer value of the byte array
 */
public static int littleEndianToInt(byte[] value) {
    return ByteBuffer.wrap(value).order(ByteOrder.LITTLE_ENDIAN).getInt();
}

From source file:com.microsoft.applicationinsights.extensibility.initializer.SequencePropertyInitializer.java

private static String uuidToBase64() {
    Base64 base64 = new Base64();
    UUID uuid = UUID.randomUUID();
    ByteBuffer bb = ByteBuffer.wrap(new byte[16]);
    bb.putLong(uuid.getMostSignificantBits());
    bb.putLong(uuid.getLeastSignificantBits());
    return base64.encodeBase64URLSafeString(bb.array());
}

From source file:com.amazonaws.services.logs.connectors.s3.S3TransformerTest.java

@Test
public void transformAccessLog() throws IOException {
    S3Transformer classUnderTest = new S3Transformer();

    // load the example events
    byte[] data = TestUtils.getCompressedTestFile("/access-log-example.json");

    // execute//  w  w  w. j  av a 2s  .c o  m
    List<CloudWatchLogsEvent> logEvents = new ArrayList<>(
            classUnderTest.toClass(new Record().withData(ByteBuffer.wrap(data))));

    List<byte[]> s3Entries = new ArrayList<>();

    for (CloudWatchLogsEvent logEvent : logEvents) {
        s3Entries.add(classUnderTest.fromClass(logEvent));
    }

    // verify
    JsonNode sourceNode;

    // event 1
    sourceNode = JSON_OBJECT_MAPPER.readTree(s3Entries.get(0));

    assertEquals("49545295115971876468408574808414755329919666212443258898", sourceNode.get("id").asText());
    assertEquals(1421116133213L, sourceNode.get("timestamp").asLong());
    assertEquals("123456789012", sourceNode.get("owner").asText());
    assertEquals("Apache/access.log", sourceNode.get("logGroup").asText());
    assertEquals("i-c3f9bec9", sourceNode.get("logStream").asText());
    assertEquals("127.0.0.1 frank GET 200 4535", sourceNode.get("message").asText());

    assertEquals(200, sourceNode.get("extractedFields").get("status_code").asLong());
    assertEquals("GET", sourceNode.get("extractedFields").get("verb").asText());
    assertEquals(4535, sourceNode.get("extractedFields").get("response_size").asLong());
    assertEquals("frank", sourceNode.get("extractedFields").get("user").asText());
    assertEquals("127.0.0.1", sourceNode.get("extractedFields").get("ip").asText());

    // event 2
    sourceNode = JSON_OBJECT_MAPPER.readTree(s3Entries.get(1));

    assertEquals("49545295115971876468408574808465530214343480843939348498", sourceNode.get("id").asText());
    assertEquals(1421116143214L, sourceNode.get("timestamp").asLong());
    assertEquals("123456789012", sourceNode.get("owner").asText());
    assertEquals("Apache/access.log", sourceNode.get("logGroup").asText());
    assertEquals("i-c3f9bec9", sourceNode.get("logStream").asText());
    assertEquals("127.0.0.1 alice POST 404 34", sourceNode.get("message").asText());

    assertEquals(404, sourceNode.get("extractedFields").get("status_code").asLong());
    assertEquals("POST", sourceNode.get("extractedFields").get("verb").asText());
    assertEquals(34, sourceNode.get("extractedFields").get("response_size").asLong());
    assertEquals("alice", sourceNode.get("extractedFields").get("user").asText());
    assertEquals("127.0.0.1", sourceNode.get("extractedFields").get("ip").asText());
}

From source file:com.nearinfinity.honeycomb.hbase.bulkload.FieldParser.java

/**
 * Try to parse a string into a byte string based on a column type.
 *
 * @param val    String value/*from  w w w. ja  v  a2 s .  c  o m*/
 * @param schema Column schema to base value parsing on.
 * @return Byte string
 * @throws ParseException The string value could not be parsed into the column type.
 */
public static ByteBuffer parse(String val, ColumnSchema schema) throws ParseException {
    checkNotNull(val, "Should not be parsing null. Something went terribly wrong.");
    checkNotNull(schema, "Column metadata is null.");

    ColumnType type = schema.getType();

    if (val.length() == 0 && type != ColumnType.STRING && type != ColumnType.BINARY) {
        if (schema.getIsNullable()) {
            return null;
        }

        throw new IllegalArgumentException(
                "Expected a value for a" + " non-null SQL column, but no value was given.");
    }

    switch (type) {
    case LONG:
        return ByteBuffer.wrap(Longs.toByteArray(Long.parseLong(val)));
    case ULONG:
        BigInteger n = new BigInteger(val);
        if (n.compareTo(BigInteger.ZERO) == -1) {
            throw new IllegalArgumentException("negative value provided for unsigned column. value: " + val);
        }
        return ByteBuffer.wrap(Longs.toByteArray(n.longValue()));
    case DOUBLE:
        return ByteBuffer.wrap(Bytes.toBytes(Double.parseDouble(val)));
    case DATE:
        return extractDate(val, "yyyy-MM-dd", "yyyy-MM-dd", "yyyy/MM/dd", "yyyy.MM.dd", "yyyyMMdd");
    case TIME:
        return extractDate(val, "HH:mm:ss", "HH:mm:ss", "HHmmss");
    case DATETIME:
        return extractDate(val, "yyyy-MM-dd HH:mm:ss", "yyyy-MM-dd HH:mm:ss", "yyyy/MM/dd HH:mm:ss",
                "yyyy.MM.dd HH:mm:ss", "yyyyMMdd HHmmss");
    case DECIMAL:
        return extractDecimal(val, schema.getPrecision(), schema.getScale());
    case STRING:
    case BINARY:
    default:
        return ByteBuffer.wrap(val.getBytes(Charset.forName("UTF-8")));
    }
}

From source file:com.knewton.mapreduce.util.RandomStudentEventGenerator.java

/**
 * Helper method for wrapping an id in a <code>ByteBuffer</code>.
 *
 * @return A byte buffer with a random ID
 *///from  w  ww .  j a  va  2  s . c om
public static ByteBuffer getRandomIdBuffer() {
    long id = getRandomId();
    ByteBuffer bb = ByteBuffer.wrap(new byte[8]);
    bb.putLong(id);
    bb.rewind();
    return bb;
}

From source file:com.rackspacecloud.blueflood.io.serializers.astyanax.CounterRollupSerializationTest.java

@Test
public void testCounterV1RoundTrip() throws IOException {
    BluefloodCounterRollup c0 = new BluefloodCounterRollup().withCount(7442245).withSampleCount(1);
    BluefloodCounterRollup c1 = new BluefloodCounterRollup().withCount(34454722343L).withSampleCount(10);

    ByteArrayOutputStream baos = new ByteArrayOutputStream();

    baos.write(Base64.encodeBase64(Serializers.counterRollupInstance.toByteBuffer(c0).array()));
    baos.write("\n".getBytes());
    baos.write(Base64.encodeBase64(Serializers.counterRollupInstance.toByteBuffer(c1).array()));
    baos.write("\n".getBytes());

    ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
    BufferedReader reader = new BufferedReader(new InputStreamReader(bais));

    ByteBuffer bb = ByteBuffer.wrap(Base64.decodeBase64(reader.readLine().getBytes()));
    BluefloodCounterRollup cc0 = Serializers.serializerFor(BluefloodCounterRollup.class).fromByteBuffer(bb);
    Assert.assertEquals(c0, cc0);//w w  w  .j  av  a 2 s .c o  m

    bb = ByteBuffer.wrap(Base64.decodeBase64(reader.readLine().getBytes()));
    BluefloodCounterRollup cc1 = Serializers.serializerFor(BluefloodCounterRollup.class).fromByteBuffer(bb);

    Assert.assertEquals(c1, cc1);
    Assert.assertFalse(cc0.equals(cc1));
}