Example usage for java.nio ByteBuffer capacity

List of usage examples for java.nio ByteBuffer capacity

Introduction

In this page you can find the example usage for java.nio ByteBuffer capacity.

Prototype

public final int capacity() 

Source Link

Document

Returns the capacity of this buffer.

Usage

From source file:com.ardikars.opennetcut.app.NetworkScanner.java

private void scanByIp() {
    StaticField.RANDOM_STRING = Utils.getPcapTmpFileName();
    PcapDumper dumper;//from   www  .  j  a v  a 2  s.  co  m
    dumper = Jxnet.PcapDumpOpen(StaticField.PCAP, StaticField.RANDOM_STRING);
    if (dumper == null) {
        if (StaticField.LOGGER != null) {
            StaticField.LOGGER.log(LoggerStatus.COMMON,
                    "[ " + WARNING + " ] :: " + Jxnet.PcapGetErr(StaticField.PCAP));
        }
    }
    PcapPktHdr pktHdr = new PcapPktHdr();
    ByteBuffer buffer = null;
    byte[] bytes = null;
    int no = 1;
    arp.setTargetProtocolAddress(scanIP);
    ethernet.setPacket(arp);
    buffer = FormatUtils.toDirectBuffer(ethernet.toBytes());
    if (Jxnet.PcapSendPacket(StaticField.PCAP, buffer, buffer.capacity()) != 0) {
        if (StaticField.LOGGER != null) {
            StaticField.LOGGER.log(LoggerStatus.COMMON, "[ " + WARNING + " ] :: " + FAILED_TO_SEND_PACKET);
        }
        return;
    } else {
        Map<Class, Packet> packets = PacketHelper.next(StaticField.PCAP, pktHdr);
        if (packets != null) {
            if (stop) {
                StaticField.LOGGER.log(LoggerStatus.PROGRESS, Integer.toString(100));
                if (!dumper.isClosed()) {
                    Jxnet.PcapDumpClose(dumper);
                    StaticField.LOGGER.log(LoggerStatus.COMMON, "[ " + INFORMATION + " ] :: " + SCAN_FINISHED);
                }
                return;
            }
            ARP capArp = (ARP) packets.get(ARP.class);
            if (capArp != null) {
                if (capArp.getOperationCode() == ARPOperationCode.ARP_REPLY) {
                    Jxnet.PcapDump(dumper, pktHdr, FormatUtils.toDirectBuffer(capArp.toBytes()));
                    handler.nextPacket(no, pktHdr, packets);
                    no++;
                }
            }
        }
    }
    if (StaticField.LOGGER != null) {
        StaticField.LOGGER.log(LoggerStatus.PROGRESS, Integer.toString(100));
        if (!dumper.isClosed()) {
            Jxnet.PcapDumpClose(dumper);
            StaticField.LOGGER.log(LoggerStatus.COMMON, "[ " + INFORMATION + " ] :: " + SCAN_FINISHED);
        }
    }
}

From source file:org.usergrid.mongo.protocol.OpQuery.java

@Override
public ChannelBuffer encode(ChannelBuffer buffer) {
    int l = 28; // 7 ints * 4 bytes

    ByteBuffer fullCollectionNameBytes = getCString(fullCollectionName);
    l += fullCollectionNameBytes.capacity();

    ByteBuffer queryBytes = encodeDocument(query);
    l += queryBytes.capacity();//from w w  w  . j av  a 2  s  .  c  o m

    ByteBuffer returnFieldSelectorBytes = encodeDocument(returnFieldSelector);
    l += returnFieldSelectorBytes.capacity();

    messageLength = l;

    buffer = super.encode(buffer);

    buffer.writeInt(flags);

    buffer.writeBytes(fullCollectionNameBytes);

    buffer.writeInt(numberToSkip);
    buffer.writeInt(numberToReturn);

    buffer.writeBytes(queryBytes);

    buffer.writeBytes(returnFieldSelectorBytes);

    return buffer;
}

From source file:org.apache.hadoop.hive.serde2.avro.AvroDeserializer.java

private Object deserializeList(Object datum, Schema fileSchema, Schema recordSchema, ListTypeInfo columnType)
        throws AvroSerdeException {
    // Need to check the original schema to see if this is actually a Fixed.
    if (recordSchema.getType().equals(Schema.Type.FIXED)) {
        // We're faking out Hive to work through a type system impedence mismatch.
        // Pull out the backing array and convert to a list.
        GenericData.Fixed fixed = (GenericData.Fixed) datum;
        List<Byte> asList = new ArrayList<Byte>(fixed.bytes().length);
        for (int j = 0; j < fixed.bytes().length; j++) {
            asList.add(fixed.bytes()[j]);
        }//ww  w.j av  a2s  .c  o  m
        return asList;
    } else if (recordSchema.getType().equals(Schema.Type.BYTES)) {
        // This is going to be slow... hold on.
        ByteBuffer bb = (ByteBuffer) datum;
        List<Byte> asList = new ArrayList<Byte>(bb.capacity());
        byte[] array = bb.array();
        for (int j = 0; j < array.length; j++) {
            asList.add(array[j]);
        }
        return asList;
    } else { // An actual list, deser its values
        List listData = (List) datum;
        Schema listSchema = recordSchema.getElementType();
        List<Object> listContents = new ArrayList<Object>(listData.size());
        for (Object obj : listData) {
            listContents.add(worker(obj, fileSchema == null ? null : fileSchema.getElementType(), listSchema,
                    columnType.getListElementTypeInfo()));
        }
        return listContents;
    }
}

From source file:org.openteufel.file.mpq.MPQFileSector.java

public int getDecompressed(ByteBuffer out) throws DataFormatException, IOException {
    // If the file is encrypted, each sector (after compression/implosion, if applicable) is encrypted with the file's key.
    // Each sector is encrypted using the key + the 0-based index of the sector in the file.
    // NOTE compression type byte (if existing) is encrypted as well!
    ByteBuffer dataDecrypted;
    if (this.encryptionSeed != null)
        dataDecrypted = MPQEncryptionUtils.decrypt(dataRaw, encryptionSeed);
    else//from ww w. ja  v a  2  s  .  c om
        dataDecrypted = dataRaw;
    dataDecrypted.rewind();

    switch (compression) {
    case Uncompressed: {
        out.put(dataDecrypted);
        return dataDecrypted.capacity();
    }
    case Imploded: {
        byte[] buf = new byte[sizeUncompressed];
        int numDecompressed = Exploder.pkexplode(dataDecrypted.array(), buf);
        if (numDecompressed != this.sizeUncompressed)
            throw new IllegalStateException();
        out.put(buf, 0, sizeUncompressed);
        return sizeUncompressed;
    }
    case ZLib: {
        int numDecompressed = 0;
        byte[] buf = new byte[1024];
        Inflater inflater = new Inflater();
        inflater.setInput(dataDecrypted.array());
        while (!inflater.finished()) {
            int decompressedBytes = inflater.inflate(buf);
            numDecompressed += decompressedBytes;
            out.put(buf, 0, decompressedBytes);
        }
        inflater.end();
        if (numDecompressed != this.sizeUncompressed)
            throw new IllegalStateException();
        return numDecompressed;
    }
    case BZip2: {
        int numDecompressed = 0;
        byte[] buf = new byte[1024];
        InputStream inputStream = new ByteArrayInputStream(dataDecrypted.array());
        BZip2CompressorInputStream uncompressStream = new BZip2CompressorInputStream(inputStream);
        while (true) {
            int decompressedBytes = uncompressStream.read(buf);
            if (decompressedBytes < 0)
                break;
            numDecompressed += decompressedBytes;
            out.put(buf, 0, decompressedBytes);
        }
        uncompressStream.close();
        inputStream.close();
        if (numDecompressed != sizeUncompressed)
            throw new IllegalStateException();
        return numDecompressed;
    }
    default:
        throw new IllegalStateException("Unknown Compression");
    }
}

From source file:org.bimserver.webservices.authorization.Authorization.java

public String asHexToken(Key key) {
    try {//from   w w w.j av a  2s  . co  m
        Cipher encodingCipher = Cipher.getInstance("AES");
        encodingCipher.init(Cipher.ENCRYPT_MODE, key);

        ByteBuffer buffer = ByteBuffer.allocate(16 + 1 + 8 + 8 + getBufferSize());
        buffer.position(16);
        buffer.put(getId());
        buffer.putLong(getExpires().getTimeInMillis());
        buffer.putLong(getUoid());
        getBytes(buffer);
        if (buffer.position() != buffer.capacity()) {
            throw new RuntimeException(
                    "Buffer's position should be at the end " + buffer.position() + "/" + buffer.capacity());
        }
        MessageDigest messageDigest = MessageDigest.getInstance("MD5");
        buffer.position(16);
        messageDigest.update(buffer);
        buffer.position(0);
        buffer.put(messageDigest.digest());

        byte[] encodedBytes = encodingCipher.doFinal(buffer.array());
        String encodedHexString = new String(Hex.encodeHex(encodedBytes));
        return encodedHexString;
    } catch (Exception e) {
        LOGGER.error("", e);
    }
    return null;
}

From source file:edu.uci.ics.hyracks.dataflow.std.sort.util.DeletableFrameTupleAppenderTest.java

@Test
public void testReOrganizeBuffer() throws Exception {
    int count = 10;
    testDelete();/*from w  ww .  j av  a2  s . c om*/
    appender.reOrganizeBuffer();
    ByteBuffer bufferRead = makeAFrame(cap, count, 0);
    DeletableFrameTupleAppender accessor = new DeletableFrameTupleAppender(recordDescriptor);
    accessor.reset(bufferRead);
    for (int i = 0; i < accessor.getTupleCount(); i++) {
        appender.append(accessor, i);
    }
    for (int i = 0; i < bufferRead.capacity(); i++) {
        assertEquals(bufferRead.get(i), appender.getBuffer().get(i));
    }
}

From source file:org.neo4j.io.pagecache.impl.SingleFilePageSwapperTest.java

private byte[] array(ByteBuffer target) {
    target.clear();/*from w w w.  j  ava  2  s .c om*/
    byte[] array = new byte[target.capacity()];
    while (target.position() < target.capacity()) {
        array[target.position()] = target.get();
    }
    return array;
}

From source file:com.ardikars.opennetcut.app.NetworkScanner.java

private void scanAll() {
    StaticField.RANDOM_STRING = Utils.getPcapTmpFileName();
    PcapDumper dumper;/*from   w w w  .ja va 2  s.c  o  m*/
    dumper = Jxnet.PcapDumpOpen(StaticField.PCAP, StaticField.RANDOM_STRING);
    if (dumper == null) {
        if (StaticField.LOGGER != null) {
            StaticField.LOGGER.log(LoggerStatus.COMMON,
                    "[ " + WARNING + " ] :: " + Jxnet.PcapGetErr(StaticField.PCAP));
        }
    }
    PcapPktHdr pktHdr = new PcapPktHdr();
    ByteBuffer buffer = null;

    int ipsSize = ips.size();
    for (int i = 0; i < ipsSize; i++) {
        arp.setTargetProtocolAddress(ips.get(i));
        ethernet.setPacket(arp);
        buffer = FormatUtils.toDirectBuffer(ethernet.toBytes());
        if (PcapSendPacket(StaticField.PCAP, buffer, buffer.capacity()) != 0) {
            if (StaticField.LOGGER != null) {
                StaticField.LOGGER.log(LoggerStatus.COMMON, "[ " + WARNING + " ] :: " + FAILED_TO_SEND_PACKET);
            }
            break;
        } else {
            Map<Class, Packet> packets = PacketHelper.next(StaticField.PCAP, pktHdr);
            if (packets != null) {
                ARP capArp = (ARP) packets.get(ARP.class);
                if (capArp.getOperationCode() == ARPOperationCode.ARP_REPLY) {
                    Jxnet.PcapDump(dumper, pktHdr,
                            FormatUtils.toDirectBuffer(packets.get(Ethernet.class).toBytes()));
                    this.handler.nextPacket(null, pktHdr, packets);
                }
            }
        }
        if (stop) {
            StaticField.LOGGER.log(LoggerStatus.PROGRESS, Integer.toString(100));
            if (!dumper.isClosed()) {
                Jxnet.PcapDumpClose(dumper);
                StaticField.LOGGER.log(LoggerStatus.COMMON, "[ " + INFORMATION + " ] :: " + SCAN_FINISHED);
            }
            return;
        }
        if (StaticField.LOGGER != null)
            StaticField.LOGGER.log(LoggerStatus.PROGRESS, Integer.toString((i * 100) / ipsSize));
    }
    if (StaticField.LOGGER != null) {
        StaticField.LOGGER.log(LoggerStatus.PROGRESS, Integer.toString(100));
    }
    if (!dumper.isClosed()) {
        Jxnet.PcapDumpClose(dumper);
        StaticField.LOGGER.log(LoggerStatus.COMMON, "[ " + INFORMATION + " ] :: " + SCAN_FINISHED);
    }
}

From source file:org.stem.db.FatFile.java

public synchronized BlobDescriptor writeBlob(byte[] key, ByteBuffer blob) throws IOException {
    //int offset = (int) writer.getFilePointer(); // TODO: This is WRONG, don't know why
    int offset = pointer;
    int length = blob.capacity();
    Blob.Header header = Blob.Header.create(key, length, FatFileIndex.Entry.FLAG_LIVE);

    writer.seek(pointer);//www . ja v  a 2 s  .  c o m
    writer.write(header);
    writer.write(blob.array()); // payload

    int bodyOffset = offset + Blob.Header.SIZE;
    pointer = bodyOffset + length;

    FatFileIndex.Entry indexEntry = FatFileIndex.create(header, offset, FatFileIndex.Entry.FLAG_LIVE);
    index.add(indexEntry);

    tracker.count(header);

    return new BlobDescriptor(this.id, offset, bodyOffset);
}

From source file:com.github.cambierr.lorawanpacket.semtech.Rxpk.java

public JSONObject toJson() throws MalformedPacketException {
    JSONObject output = new JSONObject();

    output.put("time", time);
    output.put("tmst", tmst);
    output.put("freq", freq);
    output.put("chan", chan);
    output.put("rfch", rfch);
    output.put("stat", stat);
    output.put("modu", modu.name());

    if (modu.equals(Modulation.LORA)) {
        output.put("codr", codr);
        output.put("lsnr", lsnr);
    }//  w w w . j av a2  s.  c  o  m

    output.put("datr", datr);
    output.put("rssi", rssi);
    output.put("size", size);

    ByteBuffer bb = ByteBuffer.allocate(384);
    data.toRaw(bb);
    output.put("data", Base64.getEncoder()
            .encodeToString(Arrays.copyOfRange(bb.array(), 0, bb.capacity() - bb.remaining())));

    return output;
}