Example usage for java.nio ByteBuffer flip

List of usage examples for java.nio ByteBuffer flip

Introduction

In this page you can find the example usage for java.nio ByteBuffer flip.

Prototype

public final Buffer flip() 

Source Link

Document

Flips this buffer.

Usage

From source file:com.healthmarketscience.jackcess.Database.java

/**
 * Copies the given InputStream to the given channel using the most
 * efficient means possible.//w  w  w. j a v a  2  s  .com
 */
private static void transferFrom(FileChannel channel, InputStream in) throws IOException {
    ReadableByteChannel readChannel = Channels.newChannel(in);
    if (!BROKEN_NIO) {
        // sane implementation
        channel.transferFrom(readChannel, 0, MAX_EMPTYDB_SIZE);
    } else {
        // do things the hard way for broken vms
        ByteBuffer bb = ByteBuffer.allocate(8096);
        while (readChannel.read(bb) >= 0) {
            bb.flip();
            channel.write(bb);
            bb.clear();
        }
    }
}

From source file:org.apache.kylin.engine.mr.steps.FactDistinctColumnsReducer.java

private void outputStatistics(List<Long> allCuboids) throws IOException, InterruptedException {
    // output written to baseDir/statistics/statistics-r-00000 (etc)
    String statisticsFileName = BatchConstants.CFG_OUTPUT_STATISTICS + "/"
            + BatchConstants.CFG_OUTPUT_STATISTICS;

    ByteBuffer valueBuf = ByteBuffer.allocate(BufferedMeasureCodec.DEFAULT_BUFFER_SIZE);

    // mapper overlap ratio at key -1
    long grandTotal = 0;
    for (HLLCounter hll : cuboidHLLMap.values()) {
        grandTotal += hll.getCountEstimate();
    }//from w  ww. ja  v a 2 s. c om
    double mapperOverlapRatio = grandTotal == 0 ? 0 : (double) totalRowsBeforeMerge / grandTotal;
    mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(-1),
            new BytesWritable(Bytes.toBytes(mapperOverlapRatio)), statisticsFileName);

    // mapper number at key -2
    mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(-2),
            new BytesWritable(Bytes.toBytes(baseCuboidRowCountInMappers.size())), statisticsFileName);

    // sampling percentage at key 0
    mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(0L),
            new BytesWritable(Bytes.toBytes(samplingPercentage)), statisticsFileName);

    for (long i : allCuboids) {
        valueBuf.clear();
        cuboidHLLMap.get(i).writeRegisters(valueBuf);
        valueBuf.flip();
        mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(i),
                new BytesWritable(valueBuf.array(), valueBuf.limit()), statisticsFileName);
    }
}

From source file:org.alfresco.contentstore.CassandraContentStore.java

private ByteBuffer getNodeBlock(Node node, long rangeId, int size) {
    ByteBuffer bb = null;

    String nodeId = node.getNodeId();
    long nodeVersion = node.getNodeVersion();
    MimeType mimeType = node.getMimeType();

    ResultSet rs = cassandraSession.getCassandraSession()
            .execute(getNodeBlockStatement.bind(nodeId, nodeVersion, mimeType.getMimetype(), rangeId));
    Row row = rs.one();/*w  w w .j a  v a2 s  .  c om*/
    if (row != null) {
        bb = row.getBytes("data");
        bb.compact();
        bb.flip();
        if (bb.limit() > size) {
            bb.limit(size);
        }
    }

    return bb;
}

From source file:com.yobidrive.diskmap.needles.Needle.java

public void putNeedleInBuffer(ByteBuffer result) throws Exception {
    int startPosition = result.position();
    result.limit(result.capacity());//  w ww  .  jav  a  2  s  .c  o  m
    result.putInt(MAGICSTART);
    result.putLong(needleNumber);
    result.put(flags);
    result.putInt(keyBytes.length);
    result.put(keyBytes);
    result.putInt(version == null ? 0 : version.toBytes().length);
    if (version != null)
        result.put(version.toBytes());
    result.putInt(previousNeedle == null ? -1 : previousNeedle.getNeedleFileNumber()); // Chaining
    result.putLong(previousNeedle == null ? -1L : previousNeedle.getNeedleOffset()); // Chaining
    result.putInt(originalFileNumber); // Original needle location (for cleaning)
    result.putInt(originalSize); // Original needle size (for cleaning)
    result.putInt(data == null ? 0 : data.length);
    if (data != null)
        result.put(data);
    result.putInt(MAGICEND);
    result.put(hashMD5());
    while (((result.position() - startPosition) % 256) > 0) {
        result.put(PADDING);
    }
    result.flip();
}

From source file:hivemall.fm.FactorizationMachineUDTF.java

protected void runTrainingIteration(int iterations) throws HiveException {
    final ByteBuffer inputBuf = this._inputBuf;
    final NioStatefullSegment fileIO = this._fileIO;
    assert (inputBuf != null);
    assert (fileIO != null);
    final long numTrainingExamples = _t;
    final boolean adaregr = _va_rand != null;

    final Reporter reporter = getReporter();
    final Counter iterCounter = (reporter == null) ? null
            : reporter.getCounter("hivemall.fm.FactorizationMachines$Counter", "iteration");

    try {/*  w  ww.jav a2s. c o m*/
        if (fileIO.getPosition() == 0L) {// run iterations w/o temporary file
            if (inputBuf.position() == 0) {
                return; // no training example
            }
            inputBuf.flip();

            int iter = 2;
            for (; iter <= iterations; iter++) {
                reportProgress(reporter);
                setCounterValue(iterCounter, iter);

                while (inputBuf.remaining() > 0) {
                    int bytes = inputBuf.getInt();
                    assert (bytes > 0) : bytes;
                    int xLength = inputBuf.getInt();
                    final Feature[] x = new Feature[xLength];
                    for (int j = 0; j < xLength; j++) {
                        x[j] = instantiateFeature(inputBuf);
                    }
                    double y = inputBuf.getDouble();
                    // invoke train
                    ++_t;
                    train(x, y, adaregr);
                }
                if (_cvState.isConverged(iter, numTrainingExamples)) {
                    break;
                }
                inputBuf.rewind();
            }
            LOG.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus "
                    + NumberUtils.formatNumber(_t) + " training updates in total) ");
        } else {// read training examples in the temporary file and invoke train for each example

            // write training examples in buffer to a temporary file
            if (inputBuf.remaining() > 0) {
                writeBuffer(inputBuf, fileIO);
            }
            try {
                fileIO.flush();
            } catch (IOException e) {
                throw new HiveException("Failed to flush a file: " + fileIO.getFile().getAbsolutePath(), e);
            }
            if (LOG.isInfoEnabled()) {
                File tmpFile = fileIO.getFile();
                LOG.info(
                        "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: "
                                + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")");
            }

            // run iterations
            int iter = 2;
            for (; iter <= iterations; iter++) {
                setCounterValue(iterCounter, iter);

                inputBuf.clear();
                fileIO.resetPosition();
                while (true) {
                    reportProgress(reporter);
                    // TODO prefetch
                    // writes training examples to a buffer in the temporary file
                    final int bytesRead;
                    try {
                        bytesRead = fileIO.read(inputBuf);
                    } catch (IOException e) {
                        throw new HiveException("Failed to read a file: " + fileIO.getFile().getAbsolutePath(),
                                e);
                    }
                    if (bytesRead == 0) { // reached file EOF
                        break;
                    }
                    assert (bytesRead > 0) : bytesRead;

                    // reads training examples from a buffer
                    inputBuf.flip();
                    int remain = inputBuf.remaining();
                    if (remain < INT_BYTES) {
                        throw new HiveException("Illegal file format was detected");
                    }
                    while (remain >= INT_BYTES) {
                        int pos = inputBuf.position();
                        int recordBytes = inputBuf.getInt();
                        remain -= INT_BYTES;
                        if (remain < recordBytes) {
                            inputBuf.position(pos);
                            break;
                        }

                        final int xLength = inputBuf.getInt();
                        final Feature[] x = new Feature[xLength];
                        for (int j = 0; j < xLength; j++) {
                            x[j] = instantiateFeature(inputBuf);
                        }
                        double y = inputBuf.getDouble();

                        // invoke training
                        ++_t;
                        train(x, y, adaregr);

                        remain -= recordBytes;
                    }
                    inputBuf.compact();
                }
                if (_cvState.isConverged(iter, numTrainingExamples)) {
                    break;
                }
            }
            LOG.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples)
                    + " training examples on a secondary storage (thus " + NumberUtils.formatNumber(_t)
                    + " training updates in total)");
        }
    } finally {
        // delete the temporary file and release resources
        try {
            fileIO.close(true);
        } catch (IOException e) {
            throw new HiveException("Failed to close a file: " + fileIO.getFile().getAbsolutePath(), e);
        }
        this._inputBuf = null;
        this._fileIO = null;
    }
}

From source file:hivemall.GeneralLearnerBaseUDTF.java

protected final void runIterativeTraining(@Nonnegative final int iterations) throws HiveException {
    final ByteBuffer buf = this.inputBuf;
    final NioStatefulSegment dst = this.fileIO;
    assert (buf != null);
    assert (dst != null);
    final long numTrainingExamples = count;

    final Reporter reporter = getReporter();
    final Counters.Counter iterCounter = (reporter == null) ? null
            : reporter.getCounter("hivemall.GeneralLearnerBase$Counter", "iteration");

    try {/*from w ww. j  av a 2 s.co m*/
        if (dst.getPosition() == 0L) {// run iterations w/o temporary file
            if (buf.position() == 0) {
                return; // no training example
            }
            buf.flip();

            for (int iter = 2; iter <= iterations; iter++) {
                cvState.next();
                reportProgress(reporter);
                setCounterValue(iterCounter, iter);

                while (buf.remaining() > 0) {
                    int recordBytes = buf.getInt();
                    assert (recordBytes > 0) : recordBytes;
                    int featureVectorLength = buf.getInt();
                    final FeatureValue[] featureVector = new FeatureValue[featureVectorLength];
                    for (int j = 0; j < featureVectorLength; j++) {
                        featureVector[j] = readFeatureValue(buf, featureType);
                    }
                    float target = buf.getFloat();
                    train(featureVector, target);
                }
                buf.rewind();

                if (is_mini_batch) { // Update model with accumulated delta
                    batchUpdate();
                }

                if (cvState.isConverged(numTrainingExamples)) {
                    break;
                }
            }
            logger.info("Performed " + cvState.getCurrentIteration() + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus "
                    + NumberUtils.formatNumber(numTrainingExamples * cvState.getCurrentIteration())
                    + " training updates in total) ");
        } else {// read training examples in the temporary file and invoke train for each example
            // write training examples in buffer to a temporary file
            if (buf.remaining() > 0) {
                writeBuffer(buf, dst);
            }
            try {
                dst.flush();
            } catch (IOException e) {
                throw new HiveException("Failed to flush a file: " + dst.getFile().getAbsolutePath(), e);
            }
            if (logger.isInfoEnabled()) {
                File tmpFile = dst.getFile();
                logger.info(
                        "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: "
                                + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")");
            }

            // run iterations
            for (int iter = 2; iter <= iterations; iter++) {
                cvState.next();
                setCounterValue(iterCounter, iter);

                buf.clear();
                dst.resetPosition();
                while (true) {
                    reportProgress(reporter);
                    // TODO prefetch
                    // writes training examples to a buffer in the temporary file
                    final int bytesRead;
                    try {
                        bytesRead = dst.read(buf);
                    } catch (IOException e) {
                        throw new HiveException("Failed to read a file: " + dst.getFile().getAbsolutePath(), e);
                    }
                    if (bytesRead == 0) { // reached file EOF
                        break;
                    }
                    assert (bytesRead > 0) : bytesRead;

                    // reads training examples from a buffer
                    buf.flip();
                    int remain = buf.remaining();
                    if (remain < SizeOf.INT) {
                        throw new HiveException("Illegal file format was detected");
                    }
                    while (remain >= SizeOf.INT) {
                        int pos = buf.position();
                        int recordBytes = buf.getInt();
                        remain -= SizeOf.INT;

                        if (remain < recordBytes) {
                            buf.position(pos);
                            break;
                        }

                        int featureVectorLength = buf.getInt();
                        final FeatureValue[] featureVector = new FeatureValue[featureVectorLength];
                        for (int j = 0; j < featureVectorLength; j++) {
                            featureVector[j] = readFeatureValue(buf, featureType);
                        }
                        float target = buf.getFloat();
                        train(featureVector, target);

                        remain -= recordBytes;
                    }
                    buf.compact();
                }

                if (is_mini_batch) { // Update model with accumulated delta
                    batchUpdate();
                }

                if (cvState.isConverged(numTrainingExamples)) {
                    break;
                }
            }
            logger.info("Performed " + cvState.getCurrentIteration() + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples)
                    + " training examples on a secondary storage (thus "
                    + NumberUtils.formatNumber(numTrainingExamples * cvState.getCurrentIteration())
                    + " training updates in total)");
        }
    } catch (Throwable e) {
        throw new HiveException("Exception caused in the iterative training", e);
    } finally {
        // delete the temporary file and release resources
        try {
            dst.close(true);
        } catch (IOException e) {
            throw new HiveException("Failed to close a file: " + dst.getFile().getAbsolutePath(), e);
        }
        this.inputBuf = null;
        this.fileIO = null;
    }
}

From source file:com.diozero.sandpit.imu.invensense.MPU9150DriverMqttPublisher.java

private void mqttPublish(ImuData imu_data, double[] ypr) throws MqttException {
    if (mqttClient != null) {
        MqttMessage message = new MqttMessage();
        message.setQos(MQTT_QOS_AT_MOST_ONCE);

        // 4 sets of 3 doubles, 1 set of 4 doubles
        byte[] bytes = new byte[4 * 3 * 8 + 1 * 4 * 8];
        ByteBuffer buffer = ByteBuffer.wrap(bytes);

        buffer.putDouble(imu_data.getCompass().getX());
        buffer.putDouble(imu_data.getCompass().getY());
        buffer.putDouble(imu_data.getCompass().getZ());

        buffer.putDouble(imu_data.getAccel().getX());
        buffer.putDouble(imu_data.getAccel().getY());
        buffer.putDouble(imu_data.getAccel().getZ());

        buffer.putDouble(imu_data.getGyro().getX());
        buffer.putDouble(imu_data.getGyro().getY());
        buffer.putDouble(imu_data.getGyro().getZ());

        buffer.putDouble(imu_data.getQuaternion().getQ0());
        buffer.putDouble(imu_data.getQuaternion().getQ1());
        buffer.putDouble(imu_data.getQuaternion().getQ2());
        buffer.putDouble(imu_data.getQuaternion().getQ3());

        buffer.putDouble(ypr[0]);/*from  www .j av a  2 s. c o m*/
        buffer.putDouble(ypr[1]);
        buffer.putDouble(ypr[2]);

        buffer.flip();
        message.setPayload(bytes);
        mqttClient.publish(MQTT_TOPIC_IMU, message);
    }
}

From source file:com.l2jfree.network.mmocore.ReadWriteThread.java

private void readPacket(SelectionKey key) {
    @SuppressWarnings("unchecked")
    T con = (T) key.attachment();//  ww w. j  a va  2 s  .com

    ByteBuffer buf = con.getReadBuffer();

    if (buf == null) {
        buf = getReadBuffer();
        buf.clear();
    }

    int readPackets = 0;
    int readBytes = 0;

    for (;;) {
        final int remainingFreeSpace = buf.remaining();
        int result = -2;

        try {
            result = con.getReadableByteChannel().read(buf);
        } catch (IOException e) {
            //error handling goes bellow
        }

        switch (result) {
        case -2: // IOException
        {
            closeConnectionImpl(con, true);
            return;
        }
        case -1: // EOS
        {
            closeConnectionImpl(con, false);
            return;
        }
        default: {
            buf.flip();
            // try to read as many packets as possible
            for (;;) {
                final int startPos = buf.position();

                if (readPackets >= getMaxIncomingPacketsPerPass() || readBytes >= getMaxIncomingBytesPerPass())
                    break;

                if (!tryReadPacket2(con, buf))
                    break;

                readPackets++;
                readBytes += (buf.position() - startPos);
            }
            break;
        }
        }

        // stop reading, if we have reached a config limit
        if (readPackets >= getMaxIncomingPacketsPerPass() || readBytes >= getMaxIncomingBytesPerPass())
            break;

        // if the buffer wasn't filled completely, we should stop trying as the input channel is empty
        if (remainingFreeSpace > result)
            break;

        // compact the buffer for reusing the remaining bytes
        if (buf.hasRemaining())
            buf.compact();
        else
            buf.clear();
    }

    // check if there are some more bytes in buffer and allocate/compact to prevent content lose.
    if (buf.hasRemaining()) {
        if (buf == getReadBuffer()) {
            con.setReadBuffer(getPooledBuffer().put(getReadBuffer()));
        } else {
            buf.compact();
        }
    } else {
        if (buf == getReadBuffer()) {
            // no additional buffers used
        } else {
            con.setReadBuffer(null);
            recycleBuffer(buf);
        }
    }
}

From source file:byps.http.HHttpServlet.java

@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    if (log.isDebugEnabled())
        log.debug("doGet(");

    // Test adapter function?
    final String testAdapterStr = request.getParameter(HTestAdapter.KEY_PARAM);
    if (testAdapterStr != null && testAdapterStr.length() != 0) {
        doTestAdapter(request, response);
        if (log.isDebugEnabled())
            log.debug(")doGet");
        return;/*from w w  w.  j  ava  2 s . c  om*/
    }

    // Negotiate?
    final String negoStr = request.getParameter("negotiate");
    if (log.isDebugEnabled())
        log.debug("negotiate=" + negoStr);
    if (negoStr != null && negoStr.length() != 0) {
        ByteBuffer ibuf = ByteBuffer.allocate(negoStr.length() * 3);
        ibuf.put(negoStr.getBytes("UTF-8"));
        ibuf.flip();
        doNegotiate(request, response, ibuf);
        return;
    }

    // Get stream or old utility request

    // Parameter messageid
    final String messageIdStr = request.getParameter("messageid");
    if (log.isDebugEnabled())
        log.debug("messageId=" + messageIdStr);
    if (messageIdStr == null || messageIdStr.length() == 0) {
        // response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
        response.getWriter().println("HHttpServlet running.");
        return;
    }

    // Paremter streamid is set, if a stream is to be read
    final String streamIdStr = request.getParameter("streamid");
    if (log.isDebugEnabled())
        log.debug("streamId=" + streamIdStr);

    // Parameter cancel is set, if the message given by messageid must be
    // canceled. For newer clients, this functionality is replaced by the
    // UtilityRequest interface.
    // To support older clients, it is still handled here.
    final String cancelStr = request.getParameter("cancel");
    if (log.isDebugEnabled())
        log.debug("cancel=" + cancelStr);
    if (cancelStr != null && cancelStr.length() != 0) {

        final HSession sess = getSessionFromMessageHeaderOrHttpRequest(null, request);
        if (sess != null) {

            long messageId = BBufferJson.parseLong(messageIdStr);

            if (messageId == HWireClient.MESSAGEID_CANCEL_ALL_REQUESTS) {
                if (log.isDebugEnabled())
                    log.debug("activeMessages.cleanup");
                sess.wireServer.cancelAllMessages();
            } else if (messageId == HWireClient.MESSAGEID_DISCONNECT) {
                if (log.isDebugEnabled())
                    log.debug("sess.done");
                sess.done();
            } else {
                if (log.isDebugEnabled())
                    log.debug("activeMessages.cancelMessage");
                sess.wireServer.cancelMessage(messageId);
            }

        }

        response.setStatus(HttpServletResponse.SC_OK);
        response.getOutputStream().close();

    }

    // Read a stream
    else if (streamIdStr != null && streamIdStr.length() != 0) {
        if (log.isDebugEnabled())
            log.debug("sendOutgoingStream");

        final String serverIdStr = request.getParameter("serverid");

        // Byte-Range request?
        // http://stackoverflow.com/questions/8293687/sample-http-range-request-session
        // Range: bytes=0-
        // Range: bytes=64312833-64657026

        final BContentStream stream = doGetStream(serverIdStr, messageIdStr, streamIdStr);

        HRangeRequest rangeRequest = new HRangeRequest(request);

        sendOutgoingStream(stream, response, rangeRequest);
    }

    // Bad request
    else {
        response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
        response.getOutputStream().close();
    }

    if (log.isDebugEnabled())
        log.debug(")doGet");
}

From source file:org.carbondata.core.util.CarbonUtil.java

public static void writeLevelCardinalityFile(String loadFolderLoc, String tableName, int[] dimCardinality)
        throws KettleException {
    String levelCardinalityFilePath = loadFolderLoc + File.separator + CarbonCommonConstants.LEVEL_METADATA_FILE
            + tableName + CarbonCommonConstants.CARBON_METADATA_EXTENSION;
    FileOutputStream fileOutputStream = null;
    FileChannel channel = null;/* w  w  w. jav  a  2s . c om*/
    try {
        int dimCardinalityArrLength = dimCardinality.length;

        // first four bytes for writing the length of array, remaining for array data
        ByteBuffer buffer = ByteBuffer.allocate(CarbonCommonConstants.INT_SIZE_IN_BYTE
                + dimCardinalityArrLength * CarbonCommonConstants.INT_SIZE_IN_BYTE);

        fileOutputStream = new FileOutputStream(levelCardinalityFilePath);
        channel = fileOutputStream.getChannel();
        buffer.putInt(dimCardinalityArrLength);

        for (int i = 0; i < dimCardinalityArrLength; i++) {
            buffer.putInt(dimCardinality[i]);
        }

        buffer.flip();
        channel.write(buffer);
        buffer.clear();

        LOGGER.info("Level cardinality file written to : " + levelCardinalityFilePath);
    } catch (IOException e) {
        LOGGER.error(
                "Error while writing level cardinality file : " + levelCardinalityFilePath + e.getMessage());
        throw new KettleException("Not able to write level cardinality file", e);
    } finally {
        closeStreams(channel, fileOutputStream);
    }
}