Example usage for java.nio ByteBuffer position

List of usage examples for java.nio ByteBuffer position

Introduction

In this page you can find the example usage for java.nio ByteBuffer position.

Prototype

public final int position() 

Source Link

Document

Returns the position of this buffer.

Usage

From source file:io.warp10.continuum.gts.GTSDecoder.java

/**
 * @param baseTimestamp Base timestamp for computing deltas.
 * @param key AES Wrapping key to use for unwrapping encrypted data
 * @param bb ByteBuffer containing the encoded GTS. Only remaining data will be read.
 *                      Encrypted data that cannot be decrypted will be silently ignored.
 *                      If the buffer contains encrypted data which could be decrypted,
 *                      reallocation will take place therefore 'bb' and the internal buffer
 *                      used by this instance of GTSDecoder will
 *                      differ after the first encrypted chunk is encountered.
 *///from www .j  a  va 2  s. co  m
public GTSDecoder(long baseTimestamp, byte[] key, ByteBuffer bb) {
    this.baseTimestamp = baseTimestamp;
    this.buffer = bb;
    if (null != key) {
        this.wrappingKey = Arrays.copyOfRange(key, 0, key.length);
    } else {
        this.wrappingKey = null;
    }
    this.position = bb.position();
}

From source file:hivemall.mf.OnlineMatrixFactorizationUDTF.java

protected final void runIterativeTraining(@Nonnegative final int iterations) throws HiveException {
    final ByteBuffer inputBuf = this.inputBuf;
    final NioFixedSegment fileIO = this.fileIO;
    assert (inputBuf != null);
    assert (fileIO != null);
    final long numTrainingExamples = count;

    final Reporter reporter = getReporter();
    final Counter iterCounter = (reporter == null) ? null
            : reporter.getCounter("hivemall.mf.MatrixFactorization$Counter", "iteration");

    try {//from w ww . ja  v  a2  s  . c  o  m
        if (lastWritePos == 0) {// run iterations w/o temporary file
            if (inputBuf.position() == 0) {
                return; // no training example
            }
            inputBuf.flip();

            int iter = 2;
            for (; iter <= iterations; iter++) {
                reportProgress(reporter);
                setCounterValue(iterCounter, iter);

                while (inputBuf.remaining() > 0) {
                    int user = inputBuf.getInt();
                    int item = inputBuf.getInt();
                    double rating = inputBuf.getDouble();
                    // invoke train
                    count++;
                    train(user, item, rating);
                }
                cvState.multiplyLoss(0.5d);
                if (cvState.isConverged(iter, numTrainingExamples)) {
                    break;
                }
                inputBuf.rewind();
            }
            logger.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus "
                    + NumberUtils.formatNumber(count) + " training updates in total) ");
        } else {// read training examples in the temporary file and invoke train for each example

            // write training examples in buffer to a temporary file
            if (inputBuf.position() > 0) {
                writeBuffer(inputBuf, fileIO, lastWritePos);
            } else if (lastWritePos == 0) {
                return; // no training example
            }
            try {
                fileIO.flush();
            } catch (IOException e) {
                throw new HiveException("Failed to flush a file: " + fileIO.getFile().getAbsolutePath(), e);
            }
            if (logger.isInfoEnabled()) {
                File tmpFile = fileIO.getFile();
                logger.info(
                        "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: "
                                + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")");
            }

            // run iterations
            int iter = 2;
            for (; iter <= iterations; iter++) {
                setCounterValue(iterCounter, iter);

                inputBuf.clear();
                long seekPos = 0L;
                while (true) {
                    reportProgress(reporter);
                    // TODO prefetch
                    // writes training examples to a buffer in the temporary file
                    final int bytesRead;
                    try {
                        bytesRead = fileIO.read(seekPos, inputBuf);
                    } catch (IOException e) {
                        throw new HiveException("Failed to read a file: " + fileIO.getFile().getAbsolutePath(),
                                e);
                    }
                    if (bytesRead == 0) { // reached file EOF
                        break;
                    }
                    assert (bytesRead > 0) : bytesRead;
                    seekPos += bytesRead;

                    // reads training examples from a buffer
                    inputBuf.flip();
                    int remain = inputBuf.remaining();
                    assert (remain > 0) : remain;
                    for (; remain >= RECORD_BYTES; remain -= RECORD_BYTES) {
                        int user = inputBuf.getInt();
                        int item = inputBuf.getInt();
                        double rating = inputBuf.getDouble();
                        // invoke train
                        count++;
                        train(user, item, rating);
                    }
                    inputBuf.compact();
                }
                cvState.multiplyLoss(0.5d);
                if (cvState.isConverged(iter, numTrainingExamples)) {
                    break;
                }
            }
            logger.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples)
                    + " training examples using a secondary storage (thus " + NumberUtils.formatNumber(count)
                    + " training updates in total)");
        }
    } finally {
        // delete the temporary file and release resources
        try {
            fileIO.close(true);
        } catch (IOException e) {
            throw new HiveException("Failed to close a file: " + fileIO.getFile().getAbsolutePath(), e);
        }
        this.inputBuf = null;
        this.fileIO = null;
    }
}

From source file:edu.umass.cs.gigapaxos.paxospackets.PaxosPacket.java

protected ByteBuffer toBytes(ByteBuffer bbuf) throws UnsupportedEncodingException {
    // paxospacket stuff
    bbuf.putInt(PaxosPacket.PaxosPacketType.PAXOS_PACKET.getInt()); // type
    bbuf.putInt(this.packetType.getInt()); // paxos type
    bbuf.putInt(this.version);
    // paxosID length followed by paxosID
    byte[] paxosIDBytes = this.paxosID != null ? this.paxosID.getBytes(CHARSET) : new byte[0];
    bbuf.put((byte) paxosIDBytes.length);
    bbuf.put(paxosIDBytes);/*  w w  w.j  av  a  2 s . c  om*/
    assert (bbuf.position() == SIZEOF_PAXOSPACKET_FIXED + paxosIDBytes.length) : bbuf.position() + " != "
            + SIZEOF_PAXOSPACKET_FIXED + paxosIDBytes.length;
    return bbuf;
}

From source file:com.github.hrpc.rpc.Server.java

/**
 * Helper for {@link #channelRead(ReadableByteChannel, ByteBuffer)}
 * and {@link #channelWrite(WritableByteChannel, ByteBuffer)}. Only
 * one of readCh or writeCh should be non-null.
 *
 * @see #channelRead(ReadableByteChannel, ByteBuffer)
 * @see #channelWrite(WritableByteChannel, ByteBuffer)
 *//*  w  ww  .  jav a2s  .  co m*/
private static int channelIO(ReadableByteChannel readCh, WritableByteChannel writeCh, ByteBuffer buf)
        throws IOException {

    int originalLimit = buf.limit();
    int initialRemaining = buf.remaining();
    int ret = 0;

    while (buf.remaining() > 0) {
        try {
            int ioSize = Math.min(buf.remaining(), NIO_BUFFER_LIMIT);
            buf.limit(buf.position() + ioSize);

            ret = (readCh == null) ? writeCh.write(buf) : readCh.read(buf);

            if (ret < ioSize) {
                break;
            }

        } finally {
            buf.limit(originalLimit);
        }
    }

    int nBytes = initialRemaining - buf.remaining();
    return (nBytes > 0) ? nBytes : ret;
}

From source file:com.flexive.core.stream.BinaryUploadProtocol.java

/**
 * {@inheritDoc}//from   w w w  . jav  a 2s.co m
 */
@Override
public synchronized boolean receiveStream(ByteBuffer buffer) throws IOException {
    if (!buffer.hasRemaining()) {
        //this can only happen on remote clients
        if (LOG.isDebugEnabled())
            LOG.debug("aborting (empty)");
        return false;
    }
    if (!rcvStarted) {
        rcvStarted = true;
        if (LOG.isDebugEnabled())
            LOG.debug("(internal serverside) receive start");
        try {
            pout = getContentStorage().receiveTransitBinary(division, handle, mimeType, expectedLength,
                    timeToLive);
        } catch (SQLException e) {
            LOG.error("SQL Error trying to receive binary stream: " + e.getMessage(), e);
        } catch (FxNotFoundException e) {
            LOG.error("Failed to lookup content storage for division #" + division + ": "
                    + e.getLocalizedMessage());
        }
    }
    if (LOG.isDebugEnabled() && count + buffer.remaining() > expectedLength) {
        LOG.debug("poss. overflow: pos=" + buffer.position() + " lim=" + buffer.limit() + " cap="
                + buffer.capacity());
        LOG.debug("Curr count: " + count + " count+rem="
                + (count + buffer.remaining() + " delta:" + ((count + buffer.remaining()) - expectedLength)));
    }
    count += buffer.remaining();
    pout.write(buffer.array(), buffer.position(), buffer.remaining());
    buffer.clear();
    if (expectedLength > 0 && count >= expectedLength) {
        if (LOG.isDebugEnabled())
            LOG.debug("aborting");
        return false;
    }
    return true;
}

From source file:hivemall.mf.BPRMatrixFactorizationUDTF.java

private final void runIterativeTraining(@Nonnegative final int iterations) throws HiveException {
    final ByteBuffer inputBuf = this.inputBuf;
    final NioFixedSegment fileIO = this.fileIO;
    assert (inputBuf != null);
    assert (fileIO != null);
    final long numTrainingExamples = count;

    final Reporter reporter = getReporter();
    final Counter iterCounter = (reporter == null) ? null
            : reporter.getCounter("hivemall.mf.BPRMatrixFactorization$Counter", "iteration");

    try {//from   w  w  w.j  a v a 2s.  com
        if (lastWritePos == 0) {// run iterations w/o temporary file
            if (inputBuf.position() == 0) {
                return; // no training example
            }
            inputBuf.flip();

            int iter = 2;
            for (; iter <= iterations; iter++) {
                reportProgress(reporter);
                setCounterValue(iterCounter, iter);

                while (inputBuf.remaining() > 0) {
                    int u = inputBuf.getInt();
                    int i = inputBuf.getInt();
                    int j = inputBuf.getInt();
                    // invoke train
                    count++;
                    train(u, i, j);
                }
                cvState.multiplyLoss(0.5d);
                cvState.logState(iter, eta());
                if (cvState.isConverged(iter, numTrainingExamples)) {
                    break;
                }
                if (cvState.isLossIncreased()) {
                    etaEstimator.update(1.1f);
                } else {
                    etaEstimator.update(0.5f);
                }
                inputBuf.rewind();
            }
            LOG.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus "
                    + NumberUtils.formatNumber(count) + " training updates in total) ");
        } else {// read training examples in the temporary file and invoke train for each example

            // write training examples in buffer to a temporary file
            if (inputBuf.position() > 0) {
                writeBuffer(inputBuf, fileIO, lastWritePos);
            } else if (lastWritePos == 0) {
                return; // no training example
            }
            try {
                fileIO.flush();
            } catch (IOException e) {
                throw new HiveException("Failed to flush a file: " + fileIO.getFile().getAbsolutePath(), e);
            }
            if (LOG.isInfoEnabled()) {
                File tmpFile = fileIO.getFile();
                LOG.info(
                        "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: "
                                + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")");
            }

            // run iterations
            int iter = 2;
            for (; iter <= iterations; iter++) {
                setCounterValue(iterCounter, iter);

                inputBuf.clear();
                long seekPos = 0L;
                while (true) {
                    reportProgress(reporter);
                    // TODO prefetch
                    // writes training examples to a buffer in the temporary file
                    final int bytesRead;
                    try {
                        bytesRead = fileIO.read(seekPos, inputBuf);
                    } catch (IOException e) {
                        throw new HiveException("Failed to read a file: " + fileIO.getFile().getAbsolutePath(),
                                e);
                    }
                    if (bytesRead == 0) { // reached file EOF
                        break;
                    }
                    assert (bytesRead > 0) : bytesRead;
                    seekPos += bytesRead;

                    // reads training examples from a buffer
                    inputBuf.flip();
                    int remain = inputBuf.remaining();
                    assert (remain > 0) : remain;
                    for (; remain >= RECORD_BYTES; remain -= RECORD_BYTES) {
                        int u = inputBuf.getInt();
                        int i = inputBuf.getInt();
                        int j = inputBuf.getInt();
                        // invoke train
                        count++;
                        train(u, i, j);
                    }
                    inputBuf.compact();
                }
                cvState.multiplyLoss(0.5d);
                cvState.logState(iter, eta());
                if (cvState.isConverged(iter, numTrainingExamples)) {
                    break;
                }
                if (cvState.isLossIncreased()) {
                    etaEstimator.update(1.1f);
                } else {
                    etaEstimator.update(0.5f);
                }
            }
            LOG.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples)
                    + " training examples using a secondary storage (thus " + NumberUtils.formatNumber(count)
                    + " training updates in total)");
        }
    } finally {
        // delete the temporary file and release resources
        try {
            fileIO.close(true);
        } catch (IOException e) {
            throw new HiveException("Failed to close a file: " + fileIO.getFile().getAbsolutePath(), e);
        }
        this.inputBuf = null;
        this.fileIO = null;
    }
}

From source file:com.koda.integ.hbase.blockcache.OffHeapBlockCacheOld.java

/**
 * Read external with codec./*from w ww . j av a  2s  .com*/
 *
 * @param blockName the block name
 * @return the cacheable
 * @throws IOException Signals that an I/O exception has occurred.
 */
private Cacheable readExternalWithCodec(String blockName) throws IOException {
    if (overflowExtEnabled == false)
        return null;
    // Check if we have  already this block in external storage cache
    try {
        // We use 16 - byte hash for external storage cache  
        byte[] hashed = Utils.hash128(blockName);
        StorageHandle handle = (StorageHandle) extStorageCache.get(hashed);
        if (handle == null)
            return null;
        ByteBuffer buffer = extStorageCache.getLocalBufferWithAddress().getBuffer();
        SerDe serde = extStorageCache.getSerDe();
        @SuppressWarnings("unused")
        Codec codec = extStorageCache.getCompressionCodec();

        buffer.clear();

        StorageHandle newHandle = storage.getData(handle, buffer);
        if (buffer.position() > 0)
            buffer.flip();
        int size = buffer.getInt();
        if (size == 0)
            return null;
        // Skip key
        int keySize = buffer.getInt();
        buffer.position(8 + keySize);
        boolean inMemory = buffer.get() == (byte) 1;

        //buffer.position(5);
        buffer.limit(size + 4);
        Cacheable obj = (Cacheable) serde.readCompressed(buffer/*, codec*/);
        if (inMemory) {
            permGenCache.put(blockName, obj);
        } else {
            tenGenCache.put(blockName, obj);
        }
        if (newHandle.equals(handle) == false) {
            extStorageCache.put(hashed, newHandle);
        }

        return obj;

    } catch (NativeMemoryException e) {
        throw new IOException(e);
    }

}

From source file:com.rapleaf.hank.storage.curly.CurlyReader.java

@Override
// Note: the buffer in result must be at least readBufferSize long
public void readRecord(ByteBuffer location, ReaderResult result) throws IOException {
    // Attempt to load value from the cache
    if (cache != null && loadValueFromCache(location, result)) {
        return;/* ww  w  .  j  a v  a2s . co m*/
    }
    // Deep copy the location if caching is activated
    ByteBuffer locationDeepCopy = cache != null ? Bytes.byteBufferDeepCopy(location) : null;
    if (blockCompressionCodec == null) {
        // When not using block compression, location just contains an offset. Decode it.
        long recordFileOffset = EncodingHelper.decodeLittleEndianFixedWidthLong(location);
        // Directly read record into result
        readRecordAtOffset(recordFileOffset, result);
    } else {
        // When using block compression, location contains the block's offset and an offset in the block. Decode them.
        long recordFileOffset = EncodingHelper.decodeLittleEndianFixedWidthLong(location.array(),
                location.arrayOffset() + location.position(), offsetNumBytes);
        long offsetInBlock = EncodingHelper.decodeLittleEndianFixedWidthLong(location.array(),
                location.arrayOffset() + location.position() + offsetNumBytes, offsetInBlockNumBytes);
        // Read in the compressed block into the result
        readRecordAtOffset(recordFileOffset, result);

        // Decompress the block
        InputStream blockInputStream = new ByteArrayInputStream(result.getBuffer().array(),
                result.getBuffer().arrayOffset() + result.getBuffer().position(),
                result.getBuffer().remaining());
        // Build an InputStream corresponding to the compression codec
        InputStream decompressedBlockInputStream;
        switch (blockCompressionCodec) {
        case GZIP:
            decompressedBlockInputStream = new GZIPInputStream(blockInputStream);
            break;
        case SLOW_IDENTITY:
            decompressedBlockInputStream = new BufferedInputStream(blockInputStream);
            break;
        default:
            throw new RuntimeException("Unknown block compression codec: " + blockCompressionCodec);
        }
        // Decompress into the specialized result buffer
        IOUtils.copy(decompressedBlockInputStream, result.getDecompressionOutputStream());
        ByteBuffer decompressedBlockByteBuffer = result.getDecompressionOutputStream().getByteBuffer();

        // Position ourselves at the beginning of the actual value
        decompressedBlockByteBuffer.position((int) offsetInBlock);
        // Determine result value size
        int valueSize = EncodingHelper.decodeLittleEndianVarInt(decompressedBlockByteBuffer);

        // We can exactly wrap our value
        ByteBuffer value = ByteBuffer.wrap(decompressedBlockByteBuffer.array(),
                decompressedBlockByteBuffer.arrayOffset() + decompressedBlockByteBuffer.position(), valueSize);

        // Copy decompressed result into final result buffer
        result.requiresBufferSize(valueSize);
        result.getBuffer().clear();
        result.getBuffer().put(value);
        result.getBuffer().flip();
    }
    // Store result in cache if needed
    if (cache != null) {
        addValueToCache(locationDeepCopy, result.getBuffer());
    }
}

From source file:edu.umass.cs.gigapaxos.paxospackets.PaxosPacket.java

protected PaxosPacket(ByteBuffer bbuf) throws UnsupportedEncodingException, UnknownHostException {
    super(PaxosPacketType.PAXOS_PACKET);

    bbuf.getInt(); // packet type
    this.packetType = PaxosPacketType.getPaxosPacketType(bbuf.getInt());
    this.version = bbuf.getInt();
    byte paxosIDLength = bbuf.get();
    byte[] paxosIDBytes = new byte[paxosIDLength];
    bbuf.get(paxosIDBytes);/*  w  w  w .  j  a  va 2s .  c o  m*/
    this.paxosID = paxosIDBytes.length > 0 ? new String(paxosIDBytes, CHARSET) : null;
    int exactLength = (4 + 4 + 4 + 1 + paxosIDBytes.length);
    assert (bbuf.position() == exactLength);
}

From source file:edu.hawaii.soest.kilonalu.flntu.FLNTUSource.java

/**
 * A method that executes the streaming of data from the source to the RBNB
 * server after all configuration of settings, connections to hosts, and
 * thread initiatizing occurs.  This method contains the detailed code for 
 * streaming the data and interpreting the stream.
 *//*from w w  w .  j av  a2 s  . c o  m*/
protected boolean execute() {
    logger.debug("FLNTUSource.execute() called.");
    // do not execute the stream if there is no connection
    if (!isConnected())
        return false;

    boolean failed = false;

    SocketChannel socket = getSocketConnection();

    // while data are being sent, read them into the buffer
    try {
        // create four byte placeholders used to evaluate up to a four-byte 
        // window.  The FIFO layout looks like:
        //           -------------------------
        //   in ---> | One | Two |Three|Four |  ---> out
        //           -------------------------
        byte byteOne = 0x00, // set initial placeholder values
                byteTwo = 0x00, byteThree = 0x00, byteFour = 0x00;

        // Create a buffer that will store the sample bytes as they are read
        ByteBuffer sampleBuffer = ByteBuffer.allocate(getBufferSize());

        // create a byte buffer to store bytes from the TCP stream
        ByteBuffer buffer = ByteBuffer.allocateDirect(getBufferSize());

        // add a channel of data that will be pushed to the server.  
        // Each sample will be sent to the Data Turbine as an rbnb frame.
        ChannelMap rbnbChannelMap = new ChannelMap();

        // while there are bytes to read from the socket ...
        while (socket.read(buffer) != -1 || buffer.position() > 0) {

            // prepare the buffer for reading
            buffer.flip();

            // while there are unread bytes in the ByteBuffer
            while (buffer.hasRemaining()) {
                byteOne = buffer.get();
                logger.debug("char: " + (char) byteOne + "\t" + "b1: "
                        + new String(Hex.encodeHex((new byte[] { byteOne }))) + "\t" + "b2: "
                        + new String(Hex.encodeHex((new byte[] { byteTwo }))) + "\t" + "b3: "
                        + new String(Hex.encodeHex((new byte[] { byteThree }))) + "\t" + "b4: "
                        + new String(Hex.encodeHex((new byte[] { byteFour }))) + "\t" + "sample pos: "
                        + sampleBuffer.position() + "\t" + "sample rem: " + sampleBuffer.remaining() + "\t"
                        + "sample cnt: " + sampleByteCount + "\t" + "buffer pos: " + buffer.position() + "\t"
                        + "buffer rem: " + buffer.remaining() + "\t" + "state: " + state);

                // Use a State Machine to process the byte stream.
                switch (state) {

                case 0:

                    // sample sets begin with 'mvs 1\r\n' and end with 'mvs 0\r\n'.  Find the 
                    // beginning of the sample set using the 4-byte window (s 1\r\n)
                    // note bytes are in reverse order in the FIFO window
                    if (byteOne == 0x0A && byteTwo == 0x0D && byteThree == 0x31 && byteFour == 0x20) {
                        // we've found the beginning of a sample set, move on
                        state = 1;
                        break;

                    } else {
                        break;
                    }

                case 1: // read the rest of the bytes to the next EOL characters

                    // sample line is terminated by record delimiter byte (\r\n)
                    // note bytes are in reverse order in the FIFO window
                    if (byteOne == 0x0A && byteTwo == 0x0D && byteThree == 0x30 && byteFour == 0x20) {

                        // we've found the sample set ending, clear buffers and return
                        // to state 0 to wait for the next set
                        byteOne = 0x00;
                        byteTwo = 0x00;
                        byteThree = 0x00;
                        byteFour = 0x00;
                        sampleBuffer.clear();
                        sampleByteCount = 0;
                        rbnbChannelMap.Clear();
                        logger.debug("Cleared b1,b2,b3,b4. Cleared sampleBuffer. Cleared rbnbChannelMap.");
                        state = 0;

                        // if we're not at the sample set end, look for individual samples    
                    } else if (byteOne == 0x0A && byteTwo == 0x0D) {

                        // found the sample ending delimiter
                        // add in the sample delimiter to the sample buffer
                        if (sampleBuffer.remaining() > 0) {
                            sampleBuffer.put(byteOne);
                            sampleByteCount++;
                        } else {
                            sampleBuffer.compact();
                            logger.debug("Compacting sampleBuffer ...");
                            sampleBuffer.put(byteOne);
                            sampleByteCount++;

                        }

                        // extract just the length of the sample bytes out of the
                        // sample buffer, and place it in the channel map as a 
                        // byte array.  Then, send it to the data turbine.
                        byte[] sampleArray = new byte[sampleByteCount];
                        sampleBuffer.flip();
                        sampleBuffer.get(sampleArray);

                        // send the sample to the data turbine
                        rbnbChannelMap.PutTimeAuto("server");
                        String sampleString = new String(sampleArray, "US-ASCII");
                        int channelIndex = rbnbChannelMap.Add(getRBNBChannelName());
                        rbnbChannelMap.PutMime(channelIndex, "text/plain");
                        rbnbChannelMap.PutDataAsString(channelIndex, sampleString);
                        getSource().Flush(rbnbChannelMap);
                        logger.info("Sample: " + sampleString.substring(0, sampleString.length() - 2)
                                + " sent data to the DataTurbine. ");
                        byteOne = 0x00;
                        byteTwo = 0x00;
                        byteThree = 0x00;
                        byteFour = 0x00;
                        sampleBuffer.clear();
                        sampleByteCount = 0;
                        rbnbChannelMap.Clear();
                        logger.debug("Cleared b1,b2,b3,b4. Cleared sampleBuffer. Cleared rbnbChannelMap.");
                        break;

                    } else { // not 0x0

                        // still in the middle of the sample, keep adding bytes
                        sampleByteCount++; // add each byte found

                        if (sampleBuffer.remaining() > 0) {
                            sampleBuffer.put(byteOne);
                        } else {
                            sampleBuffer.compact();
                            logger.debug("Compacting sampleBuffer ...");
                            sampleBuffer.put(byteOne);

                        }

                        break;
                    } // end if for 0x0D20 EOL

                } // end switch statement

                // shift the bytes in the FIFO window
                byteFour = byteThree;
                byteThree = byteTwo;
                byteTwo = byteOne;

            } //end while (more unread bytes)

            // prepare the buffer to read in more bytes from the stream
            buffer.compact();

        } // end while (more socket bytes to read)
        socket.close();

    } catch (IOException e) {
        // handle exceptions
        // In the event of an i/o exception, log the exception, and allow execute()
        // to return false, which will prompt a retry.
        failed = true;
        e.printStackTrace();
        return !failed;
    } catch (SAPIException sapie) {
        // In the event of an RBNB communication  exception, log the exception, 
        // and allow execute() to return false, which will prompt a retry.
        failed = true;
        sapie.printStackTrace();
        return !failed;
    }

    return !failed;
}