Example usage for java.nio ByteBuffer rewind

List of usage examples for java.nio ByteBuffer rewind

Introduction

In this page you can find the example usage for java.nio ByteBuffer rewind.

Prototype

public final Buffer rewind() 

Source Link

Document

Rewinds this buffer.

Usage

From source file:org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppImpl.java

protected Credentials parseCredentials() throws IOException {
    Credentials credentials = new Credentials();
    DataInputByteBuffer dibb = new DataInputByteBuffer();
    ByteBuffer tokens = submissionContext.getAMContainerSpec().getTokens();
    if (tokens != null) {
        dibb.reset(tokens);//from w  ww .j a v a2  s.  com
        credentials.readTokenStorageStream(dibb);
        tokens.rewind();
    }
    return credentials;
}

From source file:hivemall.topicmodel.ProbabilisticTopicModelBaseUDTF.java

protected final void runIterativeTraining(@Nonnegative final int iterations) throws HiveException {
    final ByteBuffer buf = this.inputBuf;
    final NioStatefulSegment dst = this.fileIO;
    assert (buf != null);
    assert (dst != null);
    final long numTrainingExamples = model.getDocCount();

    long numTrain = numTrainingExamples / miniBatchSize;
    if (numTrainingExamples % miniBatchSize != 0L) {
        numTrain++;/*  ww  w .  j  a  v a  2s .c om*/
    }

    final Reporter reporter = getReporter();
    final Counters.Counter iterCounter = (reporter == null) ? null
            : reporter.getCounter("hivemall.topicmodel.ProbabilisticTopicModel$Counter", "iteration");

    try {
        if (dst.getPosition() == 0L) {// run iterations w/o temporary file
            if (buf.position() == 0) {
                return; // no training example
            }
            buf.flip();

            int iter = 2;
            float perplexity = cumPerplexity / numTrain;
            float perplexityPrev;
            for (; iter <= iterations; iter++) {
                perplexityPrev = perplexity;
                cumPerplexity = 0.f;

                reportProgress(reporter);
                setCounterValue(iterCounter, iter);

                while (buf.remaining() > 0) {
                    int recordBytes = buf.getInt();
                    assert (recordBytes > 0) : recordBytes;
                    int wcLength = buf.getInt();
                    final String[] wordCounts = new String[wcLength];
                    for (int j = 0; j < wcLength; j++) {
                        wordCounts[j] = NIOUtils.getString(buf);
                    }
                    update(wordCounts);
                }
                buf.rewind();

                // mean perplexity over `numTrain` mini-batches
                perplexity = cumPerplexity / numTrain;
                logger.info("Mean perplexity over mini-batches: " + perplexity);
                if (Math.abs(perplexityPrev - perplexity) < eps) {
                    break;
                }
            }
            logger.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus "
                    + NumberUtils.formatNumber(numTrainingExamples * Math.min(iter, iterations))
                    + " training updates in total) ");
        } else {// read training examples in the temporary file and invoke train for each example
            // write training examples in buffer to a temporary file
            if (buf.remaining() > 0) {
                writeBuffer(buf, dst);
            }
            try {
                dst.flush();
            } catch (IOException e) {
                throw new HiveException("Failed to flush a file: " + dst.getFile().getAbsolutePath(), e);
            }
            if (logger.isInfoEnabled()) {
                File tmpFile = dst.getFile();
                logger.info(
                        "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: "
                                + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")");
            }

            // run iterations
            int iter = 2;
            float perplexity = cumPerplexity / numTrain;
            float perplexityPrev;
            for (; iter <= iterations; iter++) {
                perplexityPrev = perplexity;
                cumPerplexity = 0.f;

                setCounterValue(iterCounter, iter);

                buf.clear();
                dst.resetPosition();
                while (true) {
                    reportProgress(reporter);
                    // TODO prefetch
                    // writes training examples to a buffer in the temporary file
                    final int bytesRead;
                    try {
                        bytesRead = dst.read(buf);
                    } catch (IOException e) {
                        throw new HiveException("Failed to read a file: " + dst.getFile().getAbsolutePath(), e);
                    }
                    if (bytesRead == 0) { // reached file EOF
                        break;
                    }
                    assert (bytesRead > 0) : bytesRead;

                    // reads training examples from a buffer
                    buf.flip();
                    int remain = buf.remaining();
                    if (remain < SizeOf.INT) {
                        throw new HiveException("Illegal file format was detected");
                    }
                    while (remain >= SizeOf.INT) {
                        int pos = buf.position();
                        int recordBytes = buf.getInt();
                        remain -= SizeOf.INT;
                        if (remain < recordBytes) {
                            buf.position(pos);
                            break;
                        }

                        int wcLength = buf.getInt();
                        final String[] wordCounts = new String[wcLength];
                        for (int j = 0; j < wcLength; j++) {
                            wordCounts[j] = NIOUtils.getString(buf);
                        }
                        update(wordCounts);

                        remain -= recordBytes;
                    }
                    buf.compact();
                }

                // mean perplexity over `numTrain` mini-batches
                perplexity = cumPerplexity / numTrain;
                logger.info("Mean perplexity over mini-batches: " + perplexity);
                if (Math.abs(perplexityPrev - perplexity) < eps) {
                    break;
                }
            }
            logger.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples)
                    + " training examples on a secondary storage (thus "
                    + NumberUtils.formatNumber(numTrainingExamples * Math.min(iter, iterations))
                    + " training updates in total)");
        }
    } catch (Throwable e) {
        throw new HiveException("Exception caused in the iterative training", e);
    } finally {
        // delete the temporary file and release resources
        try {
            dst.close(true);
        } catch (IOException e) {
            throw new HiveException("Failed to close a file: " + dst.getFile().getAbsolutePath(), e);
        }
        this.inputBuf = null;
        this.fileIO = null;
    }
}

From source file:hivemall.GeneralLearnerBaseUDTF.java

protected final void runIterativeTraining(@Nonnegative final int iterations) throws HiveException {
    final ByteBuffer buf = this.inputBuf;
    final NioStatefulSegment dst = this.fileIO;
    assert (buf != null);
    assert (dst != null);
    final long numTrainingExamples = count;

    final Reporter reporter = getReporter();
    final Counters.Counter iterCounter = (reporter == null) ? null
            : reporter.getCounter("hivemall.GeneralLearnerBase$Counter", "iteration");

    try {/* w  ww.  j  av  a2 s. com*/
        if (dst.getPosition() == 0L) {// run iterations w/o temporary file
            if (buf.position() == 0) {
                return; // no training example
            }
            buf.flip();

            for (int iter = 2; iter <= iterations; iter++) {
                cvState.next();
                reportProgress(reporter);
                setCounterValue(iterCounter, iter);

                while (buf.remaining() > 0) {
                    int recordBytes = buf.getInt();
                    assert (recordBytes > 0) : recordBytes;
                    int featureVectorLength = buf.getInt();
                    final FeatureValue[] featureVector = new FeatureValue[featureVectorLength];
                    for (int j = 0; j < featureVectorLength; j++) {
                        featureVector[j] = readFeatureValue(buf, featureType);
                    }
                    float target = buf.getFloat();
                    train(featureVector, target);
                }
                buf.rewind();

                if (is_mini_batch) { // Update model with accumulated delta
                    batchUpdate();
                }

                if (cvState.isConverged(numTrainingExamples)) {
                    break;
                }
            }
            logger.info("Performed " + cvState.getCurrentIteration() + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus "
                    + NumberUtils.formatNumber(numTrainingExamples * cvState.getCurrentIteration())
                    + " training updates in total) ");
        } else {// read training examples in the temporary file and invoke train for each example
            // write training examples in buffer to a temporary file
            if (buf.remaining() > 0) {
                writeBuffer(buf, dst);
            }
            try {
                dst.flush();
            } catch (IOException e) {
                throw new HiveException("Failed to flush a file: " + dst.getFile().getAbsolutePath(), e);
            }
            if (logger.isInfoEnabled()) {
                File tmpFile = dst.getFile();
                logger.info(
                        "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: "
                                + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")");
            }

            // run iterations
            for (int iter = 2; iter <= iterations; iter++) {
                cvState.next();
                setCounterValue(iterCounter, iter);

                buf.clear();
                dst.resetPosition();
                while (true) {
                    reportProgress(reporter);
                    // TODO prefetch
                    // writes training examples to a buffer in the temporary file
                    final int bytesRead;
                    try {
                        bytesRead = dst.read(buf);
                    } catch (IOException e) {
                        throw new HiveException("Failed to read a file: " + dst.getFile().getAbsolutePath(), e);
                    }
                    if (bytesRead == 0) { // reached file EOF
                        break;
                    }
                    assert (bytesRead > 0) : bytesRead;

                    // reads training examples from a buffer
                    buf.flip();
                    int remain = buf.remaining();
                    if (remain < SizeOf.INT) {
                        throw new HiveException("Illegal file format was detected");
                    }
                    while (remain >= SizeOf.INT) {
                        int pos = buf.position();
                        int recordBytes = buf.getInt();
                        remain -= SizeOf.INT;

                        if (remain < recordBytes) {
                            buf.position(pos);
                            break;
                        }

                        int featureVectorLength = buf.getInt();
                        final FeatureValue[] featureVector = new FeatureValue[featureVectorLength];
                        for (int j = 0; j < featureVectorLength; j++) {
                            featureVector[j] = readFeatureValue(buf, featureType);
                        }
                        float target = buf.getFloat();
                        train(featureVector, target);

                        remain -= recordBytes;
                    }
                    buf.compact();
                }

                if (is_mini_batch) { // Update model with accumulated delta
                    batchUpdate();
                }

                if (cvState.isConverged(numTrainingExamples)) {
                    break;
                }
            }
            logger.info("Performed " + cvState.getCurrentIteration() + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples)
                    + " training examples on a secondary storage (thus "
                    + NumberUtils.formatNumber(numTrainingExamples * cvState.getCurrentIteration())
                    + " training updates in total)");
        }
    } catch (Throwable e) {
        throw new HiveException("Exception caused in the iterative training", e);
    } finally {
        // delete the temporary file and release resources
        try {
            dst.close(true);
        } catch (IOException e) {
            throw new HiveException("Failed to close a file: " + dst.getFile().getAbsolutePath(), e);
        }
        this.inputBuf = null;
        this.fileIO = null;
    }
}

From source file:org.shaman.terrain.polygonal.GraphToHeightmap.java

/**
 * Renders the given scene in a top-down manner in the given matrix
 *
 * @param matrix/*  www  .j  a  va  2 s  .c  o  m*/
 * @param scene
 */
private void render(float[][] matrix, final Spatial scene, final ColorRGBA background, float min, float max) {
    final ByteBuffer data = BufferUtils.createByteBuffer(size * size * 4 * 4);
    try {
        app.enqueue(new Callable<Object>() {
            @Override
            public Object call() throws Exception {
                //init
                Camera cam = new Camera(size, size);
                cam.setParallelProjection(true);
                final ViewPort view = new ViewPort("Off", cam);
                view.setBackgroundColor(background);
                view.setClearFlags(true, true, true);
                final FrameBuffer buffer = new FrameBuffer(size, size, 1);
                buffer.setDepthBuffer(Image.Format.Depth);
                buffer.setColorBuffer(Image.Format.RGBA32F);
                view.setOutputFrameBuffer(buffer);
                view.attachScene(scene);
                //render
                scene.setCullHint(Spatial.CullHint.Never);
                scene.updateGeometricState();
                view.setEnabled(true);
                app.getRenderManager().renderViewPort(view, 0);
                app.getRenderer().readFrameBufferWithFormat(buffer, data, Image.Format.RGBA32F);
                return new Object();
            }
        }).get();
    } catch (InterruptedException | ExecutionException ex) {
        Logger.getLogger(GraphToHeightmap.class.getName()).log(Level.SEVERE, "unable to render", ex);
        return;
    }
    data.rewind();
    for (int y = 0; y < size; ++y) {
        for (int x = 0; x < size; ++x) {
            float v = data.getFloat();
            v *= (max - min);
            v += min;
            matrix[x][y] = v;
            data.getFloat();
            data.getFloat();
            data.getFloat();
        }
    }
}

From source file:org.apache.bookkeeper.bookie.BookieShell.java

/**
 * Scan over an entry log file for a particular entry
 * /*from   ww  w.j  a  v  a  2s  .c  o m*/
 * @param logId
 *          Entry Log File id.
 * @param ledgerId
 *          id of the ledger
 * @param entryId
 *          entryId of the ledger we are looking for (-1 for all of the entries of the ledger)
 * @param printMsg
 *          Whether printing the entry data.
 * @throws Exception
 */
protected void scanEntryLogForSpecificEntry(long logId, final long lId, final long eId, final boolean printMsg)
        throws Exception {
    System.out.println("Scan entry log " + logId + " (" + Long.toHexString(logId) + ".log)" + " for LedgerId "
            + lId + ((eId == -1) ? "" : " for EntryId " + eId));
    final MutableBoolean entryFound = new MutableBoolean(false);
    scanEntryLog(logId, new EntryLogScanner() {
        @Override
        public boolean accept(long ledgerId) {
            return ((lId == ledgerId) && ((!entryFound.booleanValue()) || (eId == -1)));
        }

        @Override
        public void process(long ledgerId, long startPos, ByteBuffer entry) {
            long entrysLedgerId = entry.getLong();
            long entrysEntryId = entry.getLong();
            entry.rewind();
            if ((ledgerId == entrysLedgerId) && (ledgerId == lId) && ((entrysEntryId == eId)) || (eId == -1)) {
                entryFound.setValue(true);
                formatEntry(startPos, entry, printMsg);
            }
        }
    });
    if (!entryFound.booleanValue()) {
        System.out.println("LedgerId " + lId + ((eId == -1) ? "" : " EntryId " + eId)
                + " is not available in the entry log " + logId + " (" + Long.toHexString(logId) + ".log)");
    }
}

From source file:hivemall.mf.BPRMatrixFactorizationUDTF.java

private final void runIterativeTraining(@Nonnegative final int iterations) throws HiveException {
    final ByteBuffer inputBuf = this.inputBuf;
    final NioFixedSegment fileIO = this.fileIO;
    assert (inputBuf != null);
    assert (fileIO != null);
    final long numTrainingExamples = count;

    final Reporter reporter = getReporter();
    final Counter iterCounter = (reporter == null) ? null
            : reporter.getCounter("hivemall.mf.BPRMatrixFactorization$Counter", "iteration");

    try {/* www. ja va 2 s. c om*/
        if (lastWritePos == 0) {// run iterations w/o temporary file
            if (inputBuf.position() == 0) {
                return; // no training example
            }
            inputBuf.flip();

            int iter = 2;
            for (; iter <= iterations; iter++) {
                reportProgress(reporter);
                setCounterValue(iterCounter, iter);

                while (inputBuf.remaining() > 0) {
                    int u = inputBuf.getInt();
                    int i = inputBuf.getInt();
                    int j = inputBuf.getInt();
                    // invoke train
                    count++;
                    train(u, i, j);
                }
                cvState.multiplyLoss(0.5d);
                cvState.logState(iter, eta());
                if (cvState.isConverged(iter, numTrainingExamples)) {
                    break;
                }
                if (cvState.isLossIncreased()) {
                    etaEstimator.update(1.1f);
                } else {
                    etaEstimator.update(0.5f);
                }
                inputBuf.rewind();
            }
            LOG.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus "
                    + NumberUtils.formatNumber(count) + " training updates in total) ");
        } else {// read training examples in the temporary file and invoke train for each example

            // write training examples in buffer to a temporary file
            if (inputBuf.position() > 0) {
                writeBuffer(inputBuf, fileIO, lastWritePos);
            } else if (lastWritePos == 0) {
                return; // no training example
            }
            try {
                fileIO.flush();
            } catch (IOException e) {
                throw new HiveException("Failed to flush a file: " + fileIO.getFile().getAbsolutePath(), e);
            }
            if (LOG.isInfoEnabled()) {
                File tmpFile = fileIO.getFile();
                LOG.info(
                        "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: "
                                + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")");
            }

            // run iterations
            int iter = 2;
            for (; iter <= iterations; iter++) {
                setCounterValue(iterCounter, iter);

                inputBuf.clear();
                long seekPos = 0L;
                while (true) {
                    reportProgress(reporter);
                    // TODO prefetch
                    // writes training examples to a buffer in the temporary file
                    final int bytesRead;
                    try {
                        bytesRead = fileIO.read(seekPos, inputBuf);
                    } catch (IOException e) {
                        throw new HiveException("Failed to read a file: " + fileIO.getFile().getAbsolutePath(),
                                e);
                    }
                    if (bytesRead == 0) { // reached file EOF
                        break;
                    }
                    assert (bytesRead > 0) : bytesRead;
                    seekPos += bytesRead;

                    // reads training examples from a buffer
                    inputBuf.flip();
                    int remain = inputBuf.remaining();
                    assert (remain > 0) : remain;
                    for (; remain >= RECORD_BYTES; remain -= RECORD_BYTES) {
                        int u = inputBuf.getInt();
                        int i = inputBuf.getInt();
                        int j = inputBuf.getInt();
                        // invoke train
                        count++;
                        train(u, i, j);
                    }
                    inputBuf.compact();
                }
                cvState.multiplyLoss(0.5d);
                cvState.logState(iter, eta());
                if (cvState.isConverged(iter, numTrainingExamples)) {
                    break;
                }
                if (cvState.isLossIncreased()) {
                    etaEstimator.update(1.1f);
                } else {
                    etaEstimator.update(0.5f);
                }
            }
            LOG.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples)
                    + " training examples using a secondary storage (thus " + NumberUtils.formatNumber(count)
                    + " training updates in total)");
        }
    } finally {
        // delete the temporary file and release resources
        try {
            fileIO.close(true);
        } catch (IOException e) {
            throw new HiveException("Failed to close a file: " + fileIO.getFile().getAbsolutePath(), e);
        }
        this.inputBuf = null;
        this.fileIO = null;
    }
}

From source file:org.shaman.terrain.polygonal.GraphToHeightmap.java

/**
 * Renders the given scene in a top-down manner in the given matrix
 *
 * @param matrix/*from   w  ww .  j av a2  s  .c o m*/
 * @param scene
 */
private void renderColor(float[][][] matrix, final Spatial scene, final ColorRGBA background, float min,
        float max) {
    final ByteBuffer data = BufferUtils.createByteBuffer(size * size * 4 * 4);
    try {
        app.enqueue(new Callable<Object>() {
            @Override
            public Object call() throws Exception {
                //init
                Camera cam = new Camera(size, size);
                cam.setParallelProjection(true);
                final ViewPort view = new ViewPort("Off", cam);
                view.setBackgroundColor(background);
                view.setClearFlags(true, true, true);
                final FrameBuffer buffer = new FrameBuffer(size, size, 1);
                buffer.setDepthBuffer(Image.Format.Depth);
                buffer.setColorBuffer(Image.Format.RGBA32F);
                view.setOutputFrameBuffer(buffer);
                view.attachScene(scene);
                //render
                scene.setCullHint(Spatial.CullHint.Never);
                scene.updateGeometricState();
                view.setEnabled(true);
                app.getRenderManager().renderViewPort(view, 0);
                app.getRenderer().readFrameBufferWithFormat(buffer, data, Image.Format.RGBA32F);
                return new Object();
            }
        }).get();
    } catch (InterruptedException | ExecutionException ex) {
        Logger.getLogger(GraphToHeightmap.class.getName()).log(Level.SEVERE, "unable to render", ex);
        return;
    }
    data.rewind();
    for (int y = 0; y < size; ++y) {
        for (int x = 0; x < size; ++x) {
            float v;
            v = data.getFloat();
            v *= (max - min);
            v += min;
            matrix[x][y][0] = v;

            v = data.getFloat();
            v *= (max - min);
            v += min;
            matrix[x][y][1] = v;

            v = data.getFloat();
            v *= (max - min);
            v += min;
            matrix[x][y][2] = v;

            v = data.getFloat();
            v *= (max - min);
            v += min;
            matrix[x][y][3] = v;
        }
    }
}

From source file:org.apache.hadoop.raid.IAEncoder.java

protected void encodeStripe(InputStream[] blocks, long stripeStartOffset, long blockSize, byte[][] bufs,
        Progressable reporter) throws IOException {

    try {/*from  w  w w .j  a va2  s  . co m*/
        //trigger, pass file info
        for (int i = 0; i < threadNum; i++)
            fq[i].put(bufs);
    } catch (InterruptedException e) {
    }

    //seq number
    int s = 0;

    //number of data read
    int read = 0;

    //useless
    int cap = 1 + 11 * threadNum;

    //ByteBuffer[] buf = new ByteBuffer[cap];
    //use buffer to pass data, can be replaced by Byte[]
    ByteBuffer buf;

    while (read < blockSize) {
        //LOG.info("start read round: "+read);
        //indecate the last threadNum# packet
        boolean important = false;

        //useless
        int idx = s % cap;
        //if(buf[idx] == null) buf[idx] = ByteBuffer.allocate(bufSize*stripeSize+5);

        //initial buffer
        //LOG.info("allocating buffer");
        buf = ByteBuffer.allocate(bufSize * stripeSize + 64);
        //LOG.info("allocated buffer");
        //buf[idx].putInt(0, s);

        //seq number
        buf.putInt(stripeSize * bufSize, s);

        //check whether the last threadNum# packet
        if ((blockSize - read + bufSize - 1) / bufSize <= threadNum) {
            important = true;
            //buf[idx].put(4, (byte)1);
            buf.put(4 + stripeSize * bufSize, (byte) 1);
        } else {
            //buf[idx].put(4, (byte)0);
            buf.put(4 + stripeSize * bufSize, (byte) 0);
        }

        byte[] bufarr = buf.array();
        LOG.info("anchor Encode_stripe " + s + " Data_start_reading " + System.nanoTime());
        for (int i = 0; i < stripeSize; i++) {
            try {
                //RaidUtils.readTillEnd(blocks[i], buf[idx].array(), true, 5+i*bufSize, bufSize);
                //read data
                RaidUtils.readTillEnd(blocks[i], bufarr, true, i * bufSize, bufSize);
            } catch (IOException e) {
            }
        }
        //LOG.info(s+" read: "+bufarr[5]+" "+bufarr[5+bufSize]+" "+bufarr[5+bufSize*2]);
        LOG.info("anchor Encode_stripe " + s + " Data_read " + System.nanoTime());
        //buf[idx].rewind();

        //update position
        buf.rewind();

        int remain = -1;
        int chosen = -1;
        //check the most free ring buffer
        for (int i = 0; i < threadNum; i++) {
            int rc = q[i].remainingCapacity();
            if (remain < rc) {
                remain = rc;
                chosen = i;
            }
        }

        //decide to put the data to which ring buffer
        if (important) {
            chosen = (((int) blockSize - read + bufSize - 1) / bufSize - 1) % threadNum;
            //LOG.info("Important choose "+chosen);
        }

        //LOG.info("chosen number: "+chosen+" with seq: "+s);
        try {
            //out[chosen].put(buf[idx]);
            q[chosen].put(buf);
        } catch (InterruptedException e) {
        }
        LOG.info("anchor Encode_stripe " + s + " Data_pushed " + System.nanoTime());

        //update status
        s++;
        read += bufSize;
        //LOG.info("read: "+read);
    }
}

From source file:hivemall.fm.FactorizationMachineUDTF.java

protected void runTrainingIteration(int iterations) throws HiveException {
    final ByteBuffer inputBuf = this._inputBuf;
    final NioStatefullSegment fileIO = this._fileIO;
    assert (inputBuf != null);
    assert (fileIO != null);
    final long numTrainingExamples = _t;
    final boolean adaregr = _va_rand != null;

    final Reporter reporter = getReporter();
    final Counter iterCounter = (reporter == null) ? null
            : reporter.getCounter("hivemall.fm.FactorizationMachines$Counter", "iteration");

    try {// w  w w.j a  v  a 2 s .c om
        if (fileIO.getPosition() == 0L) {// run iterations w/o temporary file
            if (inputBuf.position() == 0) {
                return; // no training example
            }
            inputBuf.flip();

            int iter = 2;
            for (; iter <= iterations; iter++) {
                reportProgress(reporter);
                setCounterValue(iterCounter, iter);

                while (inputBuf.remaining() > 0) {
                    int bytes = inputBuf.getInt();
                    assert (bytes > 0) : bytes;
                    int xLength = inputBuf.getInt();
                    final Feature[] x = new Feature[xLength];
                    for (int j = 0; j < xLength; j++) {
                        x[j] = instantiateFeature(inputBuf);
                    }
                    double y = inputBuf.getDouble();
                    // invoke train
                    ++_t;
                    train(x, y, adaregr);
                }
                if (_cvState.isConverged(iter, numTrainingExamples)) {
                    break;
                }
                inputBuf.rewind();
            }
            LOG.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus "
                    + NumberUtils.formatNumber(_t) + " training updates in total) ");
        } else {// read training examples in the temporary file and invoke train for each example

            // write training examples in buffer to a temporary file
            if (inputBuf.remaining() > 0) {
                writeBuffer(inputBuf, fileIO);
            }
            try {
                fileIO.flush();
            } catch (IOException e) {
                throw new HiveException("Failed to flush a file: " + fileIO.getFile().getAbsolutePath(), e);
            }
            if (LOG.isInfoEnabled()) {
                File tmpFile = fileIO.getFile();
                LOG.info(
                        "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: "
                                + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")");
            }

            // run iterations
            int iter = 2;
            for (; iter <= iterations; iter++) {
                setCounterValue(iterCounter, iter);

                inputBuf.clear();
                fileIO.resetPosition();
                while (true) {
                    reportProgress(reporter);
                    // TODO prefetch
                    // writes training examples to a buffer in the temporary file
                    final int bytesRead;
                    try {
                        bytesRead = fileIO.read(inputBuf);
                    } catch (IOException e) {
                        throw new HiveException("Failed to read a file: " + fileIO.getFile().getAbsolutePath(),
                                e);
                    }
                    if (bytesRead == 0) { // reached file EOF
                        break;
                    }
                    assert (bytesRead > 0) : bytesRead;

                    // reads training examples from a buffer
                    inputBuf.flip();
                    int remain = inputBuf.remaining();
                    if (remain < INT_BYTES) {
                        throw new HiveException("Illegal file format was detected");
                    }
                    while (remain >= INT_BYTES) {
                        int pos = inputBuf.position();
                        int recordBytes = inputBuf.getInt();
                        remain -= INT_BYTES;
                        if (remain < recordBytes) {
                            inputBuf.position(pos);
                            break;
                        }

                        final int xLength = inputBuf.getInt();
                        final Feature[] x = new Feature[xLength];
                        for (int j = 0; j < xLength; j++) {
                            x[j] = instantiateFeature(inputBuf);
                        }
                        double y = inputBuf.getDouble();

                        // invoke training
                        ++_t;
                        train(x, y, adaregr);

                        remain -= recordBytes;
                    }
                    inputBuf.compact();
                }
                if (_cvState.isConverged(iter, numTrainingExamples)) {
                    break;
                }
            }
            LOG.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples)
                    + " training examples on a secondary storage (thus " + NumberUtils.formatNumber(_t)
                    + " training updates in total)");
        }
    } finally {
        // delete the temporary file and release resources
        try {
            fileIO.close(true);
        } catch (IOException e) {
            throw new HiveException("Failed to close a file: " + fileIO.getFile().getAbsolutePath(), e);
        }
        this._inputBuf = null;
        this._fileIO = null;
    }
}

From source file:com.healthmarketscience.jackcess.Table.java

/**
 * Read the table definition/*from ww  w.  ja va  2s .c  om*/
 */
private void readTableDefinition(ByteBuffer tableBuffer) throws IOException {
    if (LOG.isDebugEnabled()) {
        tableBuffer.rewind();
        LOG.debug("Table def block:\n" + ByteUtil.toHexString(tableBuffer, getFormat().SIZE_TDEF_HEADER));
    }
    _rowCount = tableBuffer.getInt(getFormat().OFFSET_NUM_ROWS);
    _lastLongAutoNumber = tableBuffer.getInt(getFormat().OFFSET_NEXT_AUTO_NUMBER);
    if (getFormat().OFFSET_NEXT_COMPLEX_AUTO_NUMBER >= 0) {
        _lastComplexTypeAutoNumber = tableBuffer.getInt(getFormat().OFFSET_NEXT_COMPLEX_AUTO_NUMBER);
    }
    _tableType = tableBuffer.get(getFormat().OFFSET_TABLE_TYPE);
    _maxColumnCount = tableBuffer.getShort(getFormat().OFFSET_MAX_COLS);
    _maxVarColumnCount = tableBuffer.getShort(getFormat().OFFSET_NUM_VAR_COLS);
    short columnCount = tableBuffer.getShort(getFormat().OFFSET_NUM_COLS);
    _logicalIndexCount = tableBuffer.getInt(getFormat().OFFSET_NUM_INDEX_SLOTS);
    _indexCount = tableBuffer.getInt(getFormat().OFFSET_NUM_INDEXES);

    int rowNum = ByteUtil.getUnsignedByte(tableBuffer, getFormat().OFFSET_OWNED_PAGES);
    int pageNum = ByteUtil.get3ByteInt(tableBuffer, getFormat().OFFSET_OWNED_PAGES + 1);
    _ownedPages = UsageMap.read(getDatabase(), pageNum, rowNum, false);
    rowNum = ByteUtil.getUnsignedByte(tableBuffer, getFormat().OFFSET_FREE_SPACE_PAGES);
    pageNum = ByteUtil.get3ByteInt(tableBuffer, getFormat().OFFSET_FREE_SPACE_PAGES + 1);
    _freeSpacePages = UsageMap.read(getDatabase(), pageNum, rowNum, false);

    for (int i = 0; i < _indexCount; i++) {
        _indexDatas.add(IndexData.create(this, tableBuffer, i, getFormat()));
    }

    int colOffset = getFormat().OFFSET_INDEX_DEF_BLOCK + _indexCount * getFormat().SIZE_INDEX_DEFINITION;
    int dispIndex = 0;
    for (int i = 0; i < columnCount; i++) {
        Column column = new Column(this, tableBuffer, colOffset + (i * getFormat().SIZE_COLUMN_HEADER),
                dispIndex++);
        _columns.add(column);
        if (column.isVariableLength()) {
            // also shove it in the variable columns list, which is ordered
            // differently from the _columns list
            _varColumns.add(column);
        }
    }
    tableBuffer.position(colOffset + (columnCount * getFormat().SIZE_COLUMN_HEADER));
    for (int i = 0; i < columnCount; i++) {
        Column column = _columns.get(i);
        column.setName(readName(tableBuffer));
    }
    Collections.sort(_columns);
    _autoNumColumns = getAutoNumberColumns(_columns);

    // setup the data index for the columns
    int colIdx = 0;
    for (Column col : _columns) {
        col.setColumnIndex(colIdx++);
    }

    // sort variable length columns based on their index into the variable
    // length offset table, because we will write the columns in this order
    Collections.sort(_varColumns, VAR_LEN_COLUMN_COMPARATOR);

    // read index column information
    for (int i = 0; i < _indexCount; i++) {
        _indexDatas.get(i).read(tableBuffer, _columns);
    }

    // read logical index info (may be more logical indexes than index datas)
    for (int i = 0; i < _logicalIndexCount; i++) {
        _indexes.add(new Index(tableBuffer, _indexDatas, getFormat()));
    }

    // read logical index names
    for (int i = 0; i < _logicalIndexCount; i++) {
        _indexes.get(i).setName(readName(tableBuffer));
    }

    Collections.sort(_indexes);

    // re-sort columns if necessary
    if (getDatabase().getColumnOrder() != ColumnOrder.DATA) {
        Collections.sort(_columns, DISPLAY_ORDER_COMPARATOR);
    }

    for (Column col : _columns) {
        // some columns need to do extra work after the table is completely
        // loaded
        col.postTableLoadInit();
    }
}