Example usage for java.nio ByteBuffer clear

List of usage examples for java.nio ByteBuffer clear

Introduction

In this page you can find the example usage for java.nio ByteBuffer clear.

Prototype

public final Buffer clear() 

Source Link

Document

Clears this buffer.

Usage

From source file:org.apache.jxtadoop.hdfs.server.datanode.BlockSender.java

/**
 * Sends upto maxChunks chunks of data./*from   w  w w  .j av  a2 s.c  om*/
 * 
 * When blockInPosition is >= 0, assumes 'out' is a 
 * {@link SocketOutputStream} and tries 
 * {@link SocketOutputStream#transferToFully(FileChannel, long, int)} to
 * send data (and updates blockInPosition).
 */
private int sendChunks(ByteBuffer pkt, int maxChunks, OutputStream out) throws IOException {
    // Sends multiple chunks in one packet with a single write().

    int len = Math.min((int) (endOffset - offset), bytesPerChecksum * maxChunks);
    if (len == 0) {
        return 0;
    }

    int numChunks = (len + bytesPerChecksum - 1) / bytesPerChecksum;
    int packetLen = len + numChunks * checksumSize + 4;
    pkt.clear();

    // write packet header
    pkt.putInt(packetLen);
    pkt.putLong(offset);
    pkt.putLong(seqno);
    pkt.put((byte) ((offset + len >= endOffset) ? 1 : 0));
    //why no ByteBuf.putBoolean()?
    pkt.putInt(len);

    int checksumOff = pkt.position();
    int checksumLen = numChunks * checksumSize;
    byte[] buf = pkt.array();

    if (checksumSize > 0 && checksumIn != null) {
        try {
            checksumIn.readFully(buf, checksumOff, checksumLen);
        } catch (IOException e) {
            LOG.warn(" Could not read or failed to veirfy checksum for data" + " at offset " + offset
                    + " for block " + block + " got : " + StringUtils.stringifyException(e));
            IOUtils.closeStream(checksumIn);
            checksumIn = null;
            if (corruptChecksumOk) {
                if (checksumOff < checksumLen) {
                    // Just fill the array with zeros.
                    Arrays.fill(buf, checksumOff, checksumLen, (byte) 0);
                }
            } else {
                throw e;
            }
        }
    }

    int dataOff = checksumOff + checksumLen;

    if (blockInPosition < 0) {
        //normal transfer
        IOUtils.readFully(blockIn, buf, dataOff, len);

        if (verifyChecksum) {
            int dOff = dataOff;
            int cOff = checksumOff;
            int dLeft = len;

            for (int i = 0; i < numChunks; i++) {
                checksum.reset();
                int dLen = Math.min(dLeft, bytesPerChecksum);
                checksum.update(buf, dOff, dLen);
                if (!checksum.compare(buf, cOff)) {
                    throw new ChecksumException("Checksum failed at " + (offset + len - dLeft), len);
                }
                dLeft -= dLen;
                dOff += dLen;
                cOff += checksumSize;
            }
        }
        //writing is done below (mainly to handle IOException)
    }

    try {
        if (blockInPosition >= 0) {
            //use transferTo(). Checks on out and blockIn are already done. 

            SocketOutputStream sockOut = (SocketOutputStream) out;
            //first write the packet
            sockOut.write(buf, 0, dataOff);
            // no need to flush. since we know out is not a buffered stream. 

            sockOut.transferToFully(((FileInputStream) blockIn).getChannel(), blockInPosition, len);

            blockInPosition += len;
        } else {
            // normal transfer
            out.write(buf, 0, dataOff + len);
        }

    } catch (IOException e) {
        /* exception while writing to the client (well, with transferTo(),
         * it could also be while reading from the local file).
         */
        throw ioeToSocketException(e);
    }

    if (throttler != null) { // rebalancing so throttle
        throttler.throttle(packetLen);
    }

    return len;
}

From source file:cn.ac.ncic.mastiff.io.coding.RLEDecoder.java

@Override
public byte[] ensureDecompressed() throws IOException {
    FlexibleEncoding.ORC.DynamicByteArray dynamicBuffer = new FlexibleEncoding.ORC.DynamicByteArray();
    dynamicBuffer.add(inBuf.getData(), 12, inBuf.getLength() - 12);
    ByteBuffer byteBuf = ByteBuffer.allocate(dynamicBuffer.size());
    dynamicBuffer.setByteBuffer(byteBuf, 0, dynamicBuffer.size());
    byteBuf.flip();//from ww  w.j a  v  a  2s. c  om
    FlexibleEncoding.ORC.InStream instream = FlexibleEncoding.ORC.InStream.create("test", byteBuf, null,
            dynamicBuffer.size());
    RunLengthIntegerReader rlein = new RunLengthIntegerReader(instream, true);
    DataOutputBuffer decoding = new DataOutputBuffer();
    decoding.writeInt(decompressedSize);
    decoding.writeInt(numPairs);
    decoding.writeInt(startPos);
    for (int i = 1; i < numPairs; i++) {
        int tmp = (int) rlein.next();
        decoding.writeInt(tmp);
    }
    byteBuf.clear();
    inBuf.close();
    return decoding.getData();
}

From source file:org.openhealthtools.openatna.syslog.protocol.ProtocolMessageFactory.java

/**
 * This reads up to 256 characters to read headers (excluding SDs). This limit is arbitrary.
 * It is imposed to reduce the risk/*from w  ww.ja va  2  s.c om*/
 * of badly formed or malicious messages from using too many resources.
 *
 * @param in
 * @return
 * @throws SyslogException
 */
public SyslogMessage read(InputStream in) throws SyslogException {
    try {
        PushbackInputStream pin = new PushbackInputStream(in, 5);
        int priority = readPriority(pin);
        int facility;
        int severity;
        byte c;
        int spaces = 5;
        int count = 0;
        ByteBuffer buff = ByteBuffer.wrap(new byte[256]);

        String timestamp = null;
        String host = "-";
        String app = "-";
        String proc = "-";
        String mid = "-";
        int max = 256;
        int curr = 0;

        while (count < spaces && curr < max) {
            c = (byte) pin.read();
            curr++;
            if (c == ' ') {
                count++;
                String currHeader = new String(buff.array(), 0, buff.position(), Constants.ENC_UTF8);
                buff.clear();
                switch (count) {
                case 1:
                    timestamp = currHeader;
                    break;
                case 2:
                    host = currHeader;
                    break;
                case 3:
                    app = currHeader;
                    break;
                case 4:
                    proc = currHeader;
                    break;
                case 5:
                    mid = currHeader;
                    break;
                }
            } else {
                buff.put(c);
            }
        }
        if (timestamp == null) {
            throw new SyslogException("no timestamp defined");
        }

        c = (byte) pin.read();
        List<StructuredElement> els = new ArrayList<StructuredElement>();
        if (c == '-') {
            c = (byte) pin.read();
            if (c != ' ') {
                throw new SyslogException("not a space");
            }
        } else if (c == '[') {
            pin.unread(c);
            els = StructuredElement.parse(pin);
        } else {
            throw new SyslogException("Illegal Structured data");
        }
        LogMessage logMessage = getLogMessage(mid);
        String encoding = readBom(pin, logMessage.getExpectedEncoding());
        logMessage.read(pin, encoding);
        facility = priority / 8;
        severity = priority % 8;
        ProtocolMessage sm = new ProtocolMessage(facility, severity, timestamp, host, logMessage, app, mid,
                proc);
        for (StructuredElement el : els) {
            sm.addStructuredElement(el);
        }
        return sm;
    } catch (IOException e) {
        e.printStackTrace();
        throw new SyslogException(e);
    }
}

From source file:com.google.cloud.hadoop.gcsio.GoogleCloudStorageIntegrationHelper.java

/**
 * Helper which reads the entire file as a String.
 *///from   w ww  .  j a v a  2 s .  c o m
protected String readTextFile(String bucketName, String objectName) throws IOException {
    SeekableReadableByteChannel readChannel = null;
    ByteBuffer readBuffer = ByteBuffer.allocate(1024);
    StringBuffer returnBuffer = new StringBuffer();

    try {
        readChannel = open(bucketName, objectName);
        int numBytesRead = readChannel.read(readBuffer);
        while (numBytesRead > 0) {
            readBuffer.flip();
            returnBuffer.append(StandardCharsets.UTF_8.decode(readBuffer));
            readBuffer.clear();
            numBytesRead = readChannel.read(readBuffer);
        }
    } finally {
        if (readChannel != null) {
            readChannel.close();
        }
    }
    return returnBuffer.toString();
}

From source file:de.digitalcollections.streaming.euphoria.controller.StreamingController.java

/**
 * Stream the given input to the given output via NIO {@link Channels} and a directly allocated NIO
 * {@link ByteBuffer}. Both the input and output streams will implicitly be closed after streaming, regardless of
 * whether an exception is been thrown or not.
 *
 * @param input The input stream.//w  w w  .  j  a v  a 2 s.com
 * @param output The output stream.
 * @return The length of the written bytes.
 * @throws IOException When an I/O error occurs.
 */
private long stream(InputStream input, OutputStream output) throws IOException {
    try (ReadableByteChannel inputChannel = Channels.newChannel(input);
            WritableByteChannel outputChannel = Channels.newChannel(output)) {
        ByteBuffer buffer = ByteBuffer.allocateDirect(DEFAULT_STREAM_BUFFER_SIZE);
        long size = 0;

        while (inputChannel.read(buffer) != -1) {
            buffer.flip();
            size += outputChannel.write(buffer);
            buffer.clear();
        }

        return size;
    }
}

From source file:cn.ac.ncic.mastiff.io.coding.RunLengthEncodingIntReader.java

@Override
public byte[] ensureDecompressed() throws IOException {
    FlexibleEncoding.ORC.DynamicByteArray dynamicBuffer = new FlexibleEncoding.ORC.DynamicByteArray();
    dynamicBuffer.add(inBuf.getData(), 12, inBuf.getLength() - 12);
    ByteBuffer byteBuf = ByteBuffer.allocate(dynamicBuffer.size());
    dynamicBuffer.setByteBuffer(byteBuf, 0, dynamicBuffer.size());
    byteBuf.flip();/*w  ww  .ja va2s.  c  om*/
    FlexibleEncoding.ORC.InStream instream = FlexibleEncoding.ORC.InStream.create("test", byteBuf, null,
            dynamicBuffer.size());
    RunLengthIntegerReader rlein = new RunLengthIntegerReader(instream, true);
    DataOutputBuffer decoding = new DataOutputBuffer();
    decoding.writeInt(decompressedSize);
    decoding.writeInt(numPairs);
    decoding.writeInt(startPos);
    for (int i = 0; i < numPairs; i++) {
        int tmp = (int) rlein.next();
        decoding.writeInt(tmp);
    }
    byteBuf.clear();
    inBuf.close();
    return decoding.getData();

}

From source file:net.jenet.Host.java

/**
 * @param buffer/*from   ww  w.ja va2s.  c  o m*/
 * @return
 */
int receive(ByteBuffer buffer) {
    try {
        buffer.clear();
        receivedAddress = (InetSocketAddress) communicationChannel.receive(buffer);
        buffer.flip();
        if (receivedAddress != null)
            LOG.debug("Host.receive:" + address + ". Received " + buffer.limit() + " bytes  from "
                    + receivedAddress);
        return buffer.limit();
    } catch (Exception e) {
        LOG.error("Host.receive: Error reading buffers.", e);
        return -1;
    }
}

From source file:cn.ac.ncic.mastiff.io.coding.RunLengthEncodingByteReader.java

public byte[] CompressensureDecompressed() throws IOException {
    FlexibleEncoding.ORC.DynamicByteArray dynamicBuffer = new FlexibleEncoding.ORC.DynamicByteArray();
    dynamicBuffer.add(inBuf.getData(), 0, inBuf.getLength());
    ByteBuffer byteBuf = ByteBuffer.allocate(dynamicBuffer.size());
    dynamicBuffer.setByteBuffer(byteBuf, 0, dynamicBuffer.size());
    byteBuf.flip();//from  www .ja  v a2s  . c  o m
    FlexibleEncoding.ORC.InStream instream = FlexibleEncoding.ORC.InStream.create("test", byteBuf, null,
            dynamicBuffer.size());
    RunLengthByteReader rlein = new RunLengthByteReader(instream);
    DataOutputBuffer decoding = new DataOutputBuffer();
    decoding.writeInt(decompressedSize);
    decoding.writeInt(numPairs);
    decoding.writeInt(startPos);
    for (int i = 0; i < numPairs; i++) {
        byte tmp = rlein.next();
        decoding.writeByte(tmp);
    }
    byteBuf.clear();
    inBuf.close();
    return decoding.getData();
}

From source file:cn.ac.ncic.mastiff.io.coding.RunLengthEncodingByteReader.java

@Override
public byte[] ensureDecompressed() throws IOException {
    //  byte[]  bytes=inBuf.getData() ;
    FlexibleEncoding.ORC.DynamicByteArray dynamicBuffer = new FlexibleEncoding.ORC.DynamicByteArray();
    dynamicBuffer.add(inBuf.getData(), 12, inBuf.getLength() - 12);
    ByteBuffer byteBuf = ByteBuffer.allocate(dynamicBuffer.size());
    dynamicBuffer.setByteBuffer(byteBuf, 0, dynamicBuffer.size());
    byteBuf.flip();/*  www .j  a v a2 s  .  c o m*/
    FlexibleEncoding.ORC.InStream instream = FlexibleEncoding.ORC.InStream.create("test", byteBuf, null,
            dynamicBuffer.size());
    RunLengthByteReader rlein = new RunLengthByteReader(instream);
    DataOutputBuffer decoding = new DataOutputBuffer();
    decoding.writeInt(decompressedSize);
    decoding.writeInt(numPairs);
    decoding.writeInt(startPos);
    for (int i = 0; i < numPairs; i++) {
        byte tmp = rlein.next();
        decoding.writeByte(tmp);
    }
    byteBuf.clear();
    inBuf.close();
    return decoding.getData();
}

From source file:org.apache.hadoop.hdfs.server.datanode.RaidBlockSender.java

/**
 * Sends upto maxChunks chunks of data./*from w w  w.  ja va  2s .  c  o  m*/
 * 
 * When blockInPosition is >= 0, assumes 'out' is a 
 * {@link SocketOutputStream} and tries 
 * {@link SocketOutputStream#transferToFully(FileChannel, long, int)} to
 * send data (and updates blockInPosition).
 */
private int sendChunks(ByteBuffer pkt, int maxChunks, OutputStream out) throws IOException {
    // Sends multiple chunks in one packet with a single write().

    int len = (int) Math.min(endOffset - offset, (((long) bytesPerChecksum) * ((long) maxChunks)));
    int numChunks = (len + bytesPerChecksum - 1) / bytesPerChecksum;
    int packetLen = len + numChunks * checksumSize + 4;
    boolean lastDataPacket = offset + len == endOffset && len > 0;
    pkt.clear();

    PacketHeader header = new PacketHeader(packetLen, offset, seqno, (len == 0), len);
    header.putInBuffer(pkt);

    int checksumOff = pkt.position();
    int checksumLen = numChunks * checksumSize;
    byte[] buf = pkt.array();

    if (checksumSize > 0 && checksumIn != null) {
        try {
            checksumIn.readFully(buf, checksumOff, checksumLen);
        } catch (IOException e) {
            LOG.warn(" Could not read or failed to veirfy checksum for data" + " at offset " + offset
                    + " for block " + block + " got : " + StringUtils.stringifyException(e));
            IOUtils.closeStream(checksumIn);
            checksumIn = null;
            if (corruptChecksumOk) {
                if (checksumOff < checksumLen) {
                    // Just fill the array with zeros.
                    Arrays.fill(buf, checksumOff, checksumLen, (byte) 0);
                }
            } else {
                throw e;
            }
        }

        // write in progress that we need to use to get last checksum
        if (lastDataPacket && lastChunkChecksum != null) {
            int start = checksumOff + checksumLen - checksumSize;
            byte[] updatedChecksum = lastChunkChecksum.getChecksum();

            if (updatedChecksum != null) {
                System.arraycopy(updatedChecksum, 0, buf, start, checksumSize);
            }
        }
    }

    int dataOff = checksumOff + checksumLen;

    if (blockInPosition < 0) {
        //normal transfer
        IOUtils.readFully(blockIn, buf, dataOff, len);

        if (verifyChecksum) {
            int dOff = dataOff;
            int cOff = checksumOff;
            int dLeft = len;

            for (int i = 0; i < numChunks; i++) {
                checksum.reset();
                int dLen = Math.min(dLeft, bytesPerChecksum);
                checksum.update(buf, dOff, dLen);
                if (!checksum.compare(buf, cOff)) {
                    long failedPos = offset + len - dLeft;
                    throw new ChecksumException("Checksum failed at " + failedPos, failedPos);
                }
                dLeft -= dLen;
                dOff += dLen;
                cOff += checksumSize;
            }
        }
        //writing is done below (mainly to handle IOException)
    }

    try {
        if (blockInPosition >= 0) {
            //use transferTo(). Checks on out and blockIn are already done. 

            SocketOutputStream sockOut = (SocketOutputStream) out;
            //first write the packet
            sockOut.write(buf, 0, dataOff);
            // no need to flush. since we know out is not a buffered stream. 

            sockOut.transferToFully(((FileInputStream) blockIn).getChannel(), blockInPosition, len);

            blockInPosition += len;
        } else {
            // normal transfer
            out.write(buf, 0, dataOff + len);
        }

    } catch (IOException e) {
        /* exception while writing to the client (well, with transferTo(),
         * it could also be while reading from the local file).
         */
        throw ioeToSocketException(e);
    }

    return len;
}