Example usage for java.nio ByteBuffer clear

List of usage examples for java.nio ByteBuffer clear

Introduction

In this page you can find the example usage for java.nio ByteBuffer clear.

Prototype

public final Buffer clear() 

Source Link

Document

Clears this buffer.

Usage

From source file:edu.hawaii.soest.kilonalu.flntu.FLNTUSource.java

/**
 * A method that executes the streaming of data from the source to the RBNB
 * server after all configuration of settings, connections to hosts, and
 * thread initiatizing occurs.  This method contains the detailed code for 
 * streaming the data and interpreting the stream.
 *//*from  w w  w . ja  va 2s  .c  om*/
protected boolean execute() {
    logger.debug("FLNTUSource.execute() called.");
    // do not execute the stream if there is no connection
    if (!isConnected())
        return false;

    boolean failed = false;

    SocketChannel socket = getSocketConnection();

    // while data are being sent, read them into the buffer
    try {
        // create four byte placeholders used to evaluate up to a four-byte 
        // window.  The FIFO layout looks like:
        //           -------------------------
        //   in ---> | One | Two |Three|Four |  ---> out
        //           -------------------------
        byte byteOne = 0x00, // set initial placeholder values
                byteTwo = 0x00, byteThree = 0x00, byteFour = 0x00;

        // Create a buffer that will store the sample bytes as they are read
        ByteBuffer sampleBuffer = ByteBuffer.allocate(getBufferSize());

        // create a byte buffer to store bytes from the TCP stream
        ByteBuffer buffer = ByteBuffer.allocateDirect(getBufferSize());

        // add a channel of data that will be pushed to the server.  
        // Each sample will be sent to the Data Turbine as an rbnb frame.
        ChannelMap rbnbChannelMap = new ChannelMap();

        // while there are bytes to read from the socket ...
        while (socket.read(buffer) != -1 || buffer.position() > 0) {

            // prepare the buffer for reading
            buffer.flip();

            // while there are unread bytes in the ByteBuffer
            while (buffer.hasRemaining()) {
                byteOne = buffer.get();
                logger.debug("char: " + (char) byteOne + "\t" + "b1: "
                        + new String(Hex.encodeHex((new byte[] { byteOne }))) + "\t" + "b2: "
                        + new String(Hex.encodeHex((new byte[] { byteTwo }))) + "\t" + "b3: "
                        + new String(Hex.encodeHex((new byte[] { byteThree }))) + "\t" + "b4: "
                        + new String(Hex.encodeHex((new byte[] { byteFour }))) + "\t" + "sample pos: "
                        + sampleBuffer.position() + "\t" + "sample rem: " + sampleBuffer.remaining() + "\t"
                        + "sample cnt: " + sampleByteCount + "\t" + "buffer pos: " + buffer.position() + "\t"
                        + "buffer rem: " + buffer.remaining() + "\t" + "state: " + state);

                // Use a State Machine to process the byte stream.
                switch (state) {

                case 0:

                    // sample sets begin with 'mvs 1\r\n' and end with 'mvs 0\r\n'.  Find the 
                    // beginning of the sample set using the 4-byte window (s 1\r\n)
                    // note bytes are in reverse order in the FIFO window
                    if (byteOne == 0x0A && byteTwo == 0x0D && byteThree == 0x31 && byteFour == 0x20) {
                        // we've found the beginning of a sample set, move on
                        state = 1;
                        break;

                    } else {
                        break;
                    }

                case 1: // read the rest of the bytes to the next EOL characters

                    // sample line is terminated by record delimiter byte (\r\n)
                    // note bytes are in reverse order in the FIFO window
                    if (byteOne == 0x0A && byteTwo == 0x0D && byteThree == 0x30 && byteFour == 0x20) {

                        // we've found the sample set ending, clear buffers and return
                        // to state 0 to wait for the next set
                        byteOne = 0x00;
                        byteTwo = 0x00;
                        byteThree = 0x00;
                        byteFour = 0x00;
                        sampleBuffer.clear();
                        sampleByteCount = 0;
                        rbnbChannelMap.Clear();
                        logger.debug("Cleared b1,b2,b3,b4. Cleared sampleBuffer. Cleared rbnbChannelMap.");
                        state = 0;

                        // if we're not at the sample set end, look for individual samples    
                    } else if (byteOne == 0x0A && byteTwo == 0x0D) {

                        // found the sample ending delimiter
                        // add in the sample delimiter to the sample buffer
                        if (sampleBuffer.remaining() > 0) {
                            sampleBuffer.put(byteOne);
                            sampleByteCount++;
                        } else {
                            sampleBuffer.compact();
                            logger.debug("Compacting sampleBuffer ...");
                            sampleBuffer.put(byteOne);
                            sampleByteCount++;

                        }

                        // extract just the length of the sample bytes out of the
                        // sample buffer, and place it in the channel map as a 
                        // byte array.  Then, send it to the data turbine.
                        byte[] sampleArray = new byte[sampleByteCount];
                        sampleBuffer.flip();
                        sampleBuffer.get(sampleArray);

                        // send the sample to the data turbine
                        rbnbChannelMap.PutTimeAuto("server");
                        String sampleString = new String(sampleArray, "US-ASCII");
                        int channelIndex = rbnbChannelMap.Add(getRBNBChannelName());
                        rbnbChannelMap.PutMime(channelIndex, "text/plain");
                        rbnbChannelMap.PutDataAsString(channelIndex, sampleString);
                        getSource().Flush(rbnbChannelMap);
                        logger.info("Sample: " + sampleString.substring(0, sampleString.length() - 2)
                                + " sent data to the DataTurbine. ");
                        byteOne = 0x00;
                        byteTwo = 0x00;
                        byteThree = 0x00;
                        byteFour = 0x00;
                        sampleBuffer.clear();
                        sampleByteCount = 0;
                        rbnbChannelMap.Clear();
                        logger.debug("Cleared b1,b2,b3,b4. Cleared sampleBuffer. Cleared rbnbChannelMap.");
                        break;

                    } else { // not 0x0

                        // still in the middle of the sample, keep adding bytes
                        sampleByteCount++; // add each byte found

                        if (sampleBuffer.remaining() > 0) {
                            sampleBuffer.put(byteOne);
                        } else {
                            sampleBuffer.compact();
                            logger.debug("Compacting sampleBuffer ...");
                            sampleBuffer.put(byteOne);

                        }

                        break;
                    } // end if for 0x0D20 EOL

                } // end switch statement

                // shift the bytes in the FIFO window
                byteFour = byteThree;
                byteThree = byteTwo;
                byteTwo = byteOne;

            } //end while (more unread bytes)

            // prepare the buffer to read in more bytes from the stream
            buffer.compact();

        } // end while (more socket bytes to read)
        socket.close();

    } catch (IOException e) {
        // handle exceptions
        // In the event of an i/o exception, log the exception, and allow execute()
        // to return false, which will prompt a retry.
        failed = true;
        e.printStackTrace();
        return !failed;
    } catch (SAPIException sapie) {
        // In the event of an RBNB communication  exception, log the exception, 
        // and allow execute() to return false, which will prompt a retry.
        failed = true;
        sapie.printStackTrace();
        return !failed;
    }

    return !failed;
}

From source file:edu.jhu.cvrg.services.nodeDataService.DataStaging.java

/** Reads the WFDB file from the brokerURL and stores it as the RdtData of a VisualizationData.
 * It is assuming that the file is in RDT format, with 3 leads.
 *
 * @param tempFile - name of a local RDT file containing ECG data. 
 * @param fileSize - used to size the file reading buffer.
 * @param offsetMilliSeconds - number of milliseconds from the beginning of the ECG at which to start the graph.
 * @param durationMilliSeconds - The requested length of the returned data subset, in milliseconds.
 * @param graphWidthPixels - Width of the zoomed graph in pixels(zoom factor*unzoomed width), hence the maximum points needed in the returned VisualizationData.
 * @param callback - call back handler class.
 *     /*from w  ww  .  j  a  va 2s .c o  m*/
 * @see org.cvrgrid.widgets.node.client.BrokerService#fetchSubjectVisualization(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String, long, int, int)
 */
private VisualizationData fetchWFDBdataSegment(String tempFile, long fileSize, int offsetMilliSeconds,
        int durationMilliSeconds, int graphWidthPixels) {
    BufferedInputStream rdtBis = null;
    VisualizationData visualizationData = new VisualizationData();
    try {
        //******************************************
        try {
            FileInputStream isFile = new FileInputStream(tempFile);
            //*******************************************
            //    * @param skippedSamples - number of samples to skip after each one returned. To adjust for graph resolution.
            int samplesPerPixel, skippedSamples, durationInSamples;
            rdtBis = new BufferedInputStream(isFile);

            //Read the 4 header bytes
            byte[] header = new byte[HEADERBYTES];
            int result = rdtBis.read(header, 0, HEADERBYTES);

            if (result == HEADERBYTES) {
                ByteBuffer bbHead = ByteBuffer.wrap(header);
                bbHead.order(BYTEORDER);

                short channels = bbHead.getShort();
                short samplingRate = bbHead.getShort(); // replaced with subjectData.setSamplingRate() 
                float fRateMsec = (float) (samplingRate / 1000.0);
                if (offsetMilliSeconds < 0)
                    offsetMilliSeconds = 0; // cannot read before the beginning of the file.
                int vizOffset = (int) (offsetMilliSeconds * fRateMsec);

                //-------------------------------------------------
                // Calculate and Set Visualization parameters
                final int REALBUFFERSIZE = (int) fileSize - HEADERBYTES;
                if (REALBUFFERSIZE % (channels * SHORTBYTES) != 0) {
                    System.err.println("rdt file is not aligned.");
                }
                int counts = REALBUFFERSIZE / (channels * SHORTBYTES);
                byte[][] body = new byte[counts][(channels * SHORTBYTES)];
                byte[] sample = new byte[(channels * SHORTBYTES)]; /** A single reading from all leads. **/
                try {

                    int requestedMaxPoints;
                    durationInSamples = (int) (fRateMsec * durationMilliSeconds);
                    if (durationInSamples > graphWidthPixels) {
                        samplesPerPixel = durationInSamples / graphWidthPixels;
                        requestedMaxPoints = graphWidthPixels;
                    } else {
                        samplesPerPixel = 1;
                        requestedMaxPoints = durationInSamples;
                    }
                    skippedSamples = samplesPerPixel - 1;

                    int availableSamples = counts - vizOffset; // total number of remaining samples from this offset.
                    int availablePoints = availableSamples / samplesPerPixel; // total number of graphable points from this offset.
                    int maxPoints = 0; // maximum data points that can be returned.
                    // ensure that the copying loop doesn't try to go past the end of the data file.
                    if (availablePoints > requestedMaxPoints) {
                        maxPoints = requestedMaxPoints;
                    } else { // Requested duration is longer than the remainder after the offset.
                        if (durationInSamples < counts) { // Requested duration is less than the file contains.
                            // move the offset back so the requested amount of samples can be returned.
                            vizOffset = counts - durationInSamples;
                            maxPoints = requestedMaxPoints;
                        } else { // Requested duration is longer than the file contains.
                            maxPoints = availablePoints;
                        }
                    }
                    visualizationData.setRdtDataLength(maxPoints);
                    visualizationData.setRdtDataLeads(channels);
                    visualizationData.setOffset(vizOffset);
                    visualizationData.setSkippedSamples(skippedSamples);
                    int msDuration = (counts * 1000) / samplingRate;
                    visualizationData.setMsDuration(msDuration);

                    //------------------------------------------------
                    // Read the rest of the file to get the data.
                    ByteBuffer bbSample;
                    double[][] tempData = new double[maxPoints][channels];
                    int fileOffset = vizOffset * channels * SHORTBYTES; //offset in bytes from the beginning of the file.

                    int index1, index2, s, outSample = 0;
                    index2 = vizOffset; // index of the first sample to return data for, index is in samples not bytes.
                    int length, bisOffset, bisLen = sample.length;
                    // read entire file into the local byte array "body"
                    for (index1 = 0; index1 < counts; index1++) {
                        bisOffset = HEADERBYTES + (index1 * bisLen);
                        s = 0;
                        for (int c = 0; c < (bisLen * 4); c++) { // make up to 4 attempts to read 
                            length = rdtBis.read(sample, s, 1);// read one byte into the byte array "sample", explicitly specifying which byte to read.
                            if (length == 1)
                                s++; // successfully read the byte, go to the next one.
                            if (s == bisLen)
                                break; // last byte has been read.
                        }

                        if (index1 == index2) { // add this sample the output data
                            bbSample = ByteBuffer.wrap(sample);
                            bbSample.order(BYTEORDER);

                            for (int ch = 0; ch < channels; ch++) {
                                short value = bbSample.getShort(); // reads a Short, increments position() by 2 bytes.
                                tempData[outSample][ch] = (double) value;
                            }

                            bbSample.clear();
                            index2 = index2 + 1 + skippedSamples;
                            outSample++;
                            if (outSample == maxPoints)
                                break;
                        }
                    }

                    visualizationData.setRdtData(tempData);

                    //*******************************************
                    isFile.close();
                } catch (FileNotFoundException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            } else {
                System.err.println(
                        "fetchSubjectVisualization failed, error occured while reading header of the RDT file:"
                                + tempFile);
            }
            //*******************************************
        } catch (IOException e1) {
            e1.printStackTrace();
        } finally {
            try {
                rdtBis.close();
            } catch (IOException e2) {
                e2.printStackTrace();
            }
        }
    } catch (Exception ex) {
        ex.printStackTrace();
    }
    return visualizationData;
}

From source file:edu.jhu.cvrg.services.nodeDataService.DataStaging.java

/** Reads the file from the brokerURL and stores it as the RdtData of a VisualizationData.
 * It is assuming that the file is in RDT format, with 3 leads.
 *
 * @param tempFile - name of a local RDT file containing ECG data. 
 * @param fileSize - used to size the file reading buffer.
 * @param offsetMilliSeconds - number of milliseconds from the beginning of the ECG at which to start the graph.
 * @param durationMilliSeconds - The requested length of the returned data subset, in milliseconds.
 * @param graphWidthPixels - Width of the zoomed graph in pixels(zoom factor*unzoomed width), hence the maximum points needed in the returned VisualizationData.
 * @param callback - call back handler class.
 *     /*  ww  w .  j  av a2s . com*/
 * @see org.cvrgrid.widgets.node.client.BrokerService#fetchSubjectVisualization(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String, long, int, int)
 */
private VisualizationData fetchSubjectVisualization(String tempFile, long fileSize, int offsetMilliSeconds,
        int durationMilliSeconds, int graphWidthPixels) {
    BufferedInputStream rdtBis = null;
    VisualizationData visualizationData = new VisualizationData();
    try {
        //******************************************
        try {
            FileInputStream isFile = new FileInputStream(tempFile);
            //*******************************************

            int samplesPerPixel, skippedSamples, durationInSamples;
            rdtBis = new BufferedInputStream(isFile);

            //Read the 4 header bytes
            byte[] header = new byte[HEADERBYTES];
            int result = rdtBis.read(header, 0, HEADERBYTES);

            if (result == HEADERBYTES) {
                ByteBuffer bbHead = ByteBuffer.wrap(header);
                bbHead.order(BYTEORDER);

                short channels = bbHead.getShort();
                short samplingRate = bbHead.getShort(); // replaced with subjectData.setSamplingRate() 
                float fRateMsec = (float) (samplingRate / 1000.0);
                if (offsetMilliSeconds < 0)
                    offsetMilliSeconds = 0; // cannot read before the beginning of the file.
                int vizOffset = (int) (offsetMilliSeconds * fRateMsec);

                //-------------------------------------------------
                // Calculate and Set Visualization parameters
                final int REALBUFFERSIZE = (int) fileSize - HEADERBYTES;
                if (REALBUFFERSIZE % (channels * SHORTBYTES) != 0) {
                    System.err.println("rdt file is not aligned.");
                }
                int counts = REALBUFFERSIZE / (channels * SHORTBYTES);
                byte[][] body = new byte[counts][(channels * SHORTBYTES)];
                byte[] sample = new byte[(channels * SHORTBYTES)]; /** A single reading from all leads. **/
                try {
                    @SuppressWarnings("unused") // used to test rdtBis.read for exceptions

                    int requestedMaxPoints;
                    durationInSamples = (int) (fRateMsec * durationMilliSeconds);
                    if (durationInSamples > graphWidthPixels) {
                        samplesPerPixel = durationInSamples / graphWidthPixels;
                        requestedMaxPoints = graphWidthPixels;
                    } else {
                        samplesPerPixel = 1;
                        requestedMaxPoints = durationInSamples;
                    }
                    skippedSamples = samplesPerPixel - 1;

                    int availableSamples = counts - vizOffset; // total number of remaining samples from this offset.
                    int availablePoints = availableSamples / samplesPerPixel; // total number of graphable points from this offset.
                    int maxPoints = 0; // maximum data points that can be returned.
                    // ensure that the copying loop doesn't try to go past the end of the data file.
                    if (availablePoints > requestedMaxPoints) {
                        maxPoints = requestedMaxPoints;
                    } else { // Requested duration is longer than the remainder after the offset.
                        if (durationInSamples < counts) { // Requested duration is less than the file contains.
                            // move the offset back so the requested amount of samples can be returned.
                            vizOffset = counts - durationInSamples;
                            maxPoints = requestedMaxPoints;
                        } else { // Requested duration is longer than the file contains.
                            maxPoints = availablePoints;
                        }
                    }
                    visualizationData.setRdtDataLength(maxPoints);
                    visualizationData.setRdtDataLeads(channels);
                    visualizationData.setOffset(vizOffset);
                    visualizationData.setSkippedSamples(skippedSamples);
                    int msDuration = (counts * 1000) / samplingRate;
                    visualizationData.setMsDuration(msDuration);

                    //------------------------------------------------
                    // Read the rest of the file to get the data.
                    ByteBuffer bbSample;
                    double[][] tempData = new double[maxPoints][channels];
                    int fileOffset = vizOffset * channels * SHORTBYTES; //offset in bytes from the beginning of the file.

                    int index1, index2, s, outSample = 0;
                    index2 = vizOffset; // index of the first sample to return data for, index is in samples not bytes.
                    int length, bisOffset, bisLen = sample.length;
                    // read entire file into the local byte array "body"
                    for (index1 = 0; index1 < counts; index1++) {
                        bisOffset = HEADERBYTES + (index1 * bisLen);
                        s = 0;
                        for (int c = 0; c < (bisLen * 4); c++) { // make up to 4 attempts to read 
                            length = rdtBis.read(sample, s, 1);// read one byte into the byte array "sample", explicitly specifying which byte to read.
                            if (length == 1)
                                s++; // successfully read the byte, go to the next one.
                            if (s == bisLen)
                                break; // last byte has been read.
                        }

                        if (index1 == index2) { // add this sample the output data
                            bbSample = ByteBuffer.wrap(sample);
                            bbSample.order(BYTEORDER);

                            for (int ch = 0; ch < channels; ch++) {
                                short value = bbSample.getShort(); // reads a Short, increments position() by 2 bytes.
                                tempData[outSample][ch] = (double) value;
                            }

                            bbSample.clear();
                            index2 = index2 + 1 + skippedSamples;
                            outSample++;
                            if (outSample == maxPoints)
                                break;
                        }
                    }

                    visualizationData.setRdtData(tempData);

                    //*******************************************
                    isFile.close();
                    //             br.close();
                } catch (FileNotFoundException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            } else {
                System.err.println(
                        "fetchSubjectVisualization failed, error occured while reading header of the RDT file:"
                                + tempFile);
            }
            //*******************************************
        } catch (IOException e1) {
            e1.printStackTrace();
        } finally {
            try {
                rdtBis.close();
            } catch (IOException e2) {
                e2.printStackTrace();
            }
        }
    } catch (Exception ex) {
        ex.printStackTrace();
    }
    return visualizationData;
}

From source file:hivemall.GeneralLearnerBaseUDTF.java

protected final void runIterativeTraining(@Nonnegative final int iterations) throws HiveException {
    final ByteBuffer buf = this.inputBuf;
    final NioStatefulSegment dst = this.fileIO;
    assert (buf != null);
    assert (dst != null);
    final long numTrainingExamples = count;

    final Reporter reporter = getReporter();
    final Counters.Counter iterCounter = (reporter == null) ? null
            : reporter.getCounter("hivemall.GeneralLearnerBase$Counter", "iteration");

    try {//  ww  w .j av a  2 s  .c om
        if (dst.getPosition() == 0L) {// run iterations w/o temporary file
            if (buf.position() == 0) {
                return; // no training example
            }
            buf.flip();

            for (int iter = 2; iter <= iterations; iter++) {
                cvState.next();
                reportProgress(reporter);
                setCounterValue(iterCounter, iter);

                while (buf.remaining() > 0) {
                    int recordBytes = buf.getInt();
                    assert (recordBytes > 0) : recordBytes;
                    int featureVectorLength = buf.getInt();
                    final FeatureValue[] featureVector = new FeatureValue[featureVectorLength];
                    for (int j = 0; j < featureVectorLength; j++) {
                        featureVector[j] = readFeatureValue(buf, featureType);
                    }
                    float target = buf.getFloat();
                    train(featureVector, target);
                }
                buf.rewind();

                if (is_mini_batch) { // Update model with accumulated delta
                    batchUpdate();
                }

                if (cvState.isConverged(numTrainingExamples)) {
                    break;
                }
            }
            logger.info("Performed " + cvState.getCurrentIteration() + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus "
                    + NumberUtils.formatNumber(numTrainingExamples * cvState.getCurrentIteration())
                    + " training updates in total) ");
        } else {// read training examples in the temporary file and invoke train for each example
            // write training examples in buffer to a temporary file
            if (buf.remaining() > 0) {
                writeBuffer(buf, dst);
            }
            try {
                dst.flush();
            } catch (IOException e) {
                throw new HiveException("Failed to flush a file: " + dst.getFile().getAbsolutePath(), e);
            }
            if (logger.isInfoEnabled()) {
                File tmpFile = dst.getFile();
                logger.info(
                        "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: "
                                + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")");
            }

            // run iterations
            for (int iter = 2; iter <= iterations; iter++) {
                cvState.next();
                setCounterValue(iterCounter, iter);

                buf.clear();
                dst.resetPosition();
                while (true) {
                    reportProgress(reporter);
                    // TODO prefetch
                    // writes training examples to a buffer in the temporary file
                    final int bytesRead;
                    try {
                        bytesRead = dst.read(buf);
                    } catch (IOException e) {
                        throw new HiveException("Failed to read a file: " + dst.getFile().getAbsolutePath(), e);
                    }
                    if (bytesRead == 0) { // reached file EOF
                        break;
                    }
                    assert (bytesRead > 0) : bytesRead;

                    // reads training examples from a buffer
                    buf.flip();
                    int remain = buf.remaining();
                    if (remain < SizeOf.INT) {
                        throw new HiveException("Illegal file format was detected");
                    }
                    while (remain >= SizeOf.INT) {
                        int pos = buf.position();
                        int recordBytes = buf.getInt();
                        remain -= SizeOf.INT;

                        if (remain < recordBytes) {
                            buf.position(pos);
                            break;
                        }

                        int featureVectorLength = buf.getInt();
                        final FeatureValue[] featureVector = new FeatureValue[featureVectorLength];
                        for (int j = 0; j < featureVectorLength; j++) {
                            featureVector[j] = readFeatureValue(buf, featureType);
                        }
                        float target = buf.getFloat();
                        train(featureVector, target);

                        remain -= recordBytes;
                    }
                    buf.compact();
                }

                if (is_mini_batch) { // Update model with accumulated delta
                    batchUpdate();
                }

                if (cvState.isConverged(numTrainingExamples)) {
                    break;
                }
            }
            logger.info("Performed " + cvState.getCurrentIteration() + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples)
                    + " training examples on a secondary storage (thus "
                    + NumberUtils.formatNumber(numTrainingExamples * cvState.getCurrentIteration())
                    + " training updates in total)");
        }
    } catch (Throwable e) {
        throw new HiveException("Exception caused in the iterative training", e);
    } finally {
        // delete the temporary file and release resources
        try {
            dst.close(true);
        } catch (IOException e) {
            throw new HiveException("Failed to close a file: " + dst.getFile().getAbsolutePath(), e);
        }
        this.inputBuf = null;
        this.fileIO = null;
    }
}

From source file:org.eclipsetrader.directa.internal.core.BrokerConnector.java

@Override
public void run() {
    Selector socketSelector;/* ww w.  j av a 2 s .c o m*/
    ByteBuffer dst = ByteBuffer.wrap(new byte[2048]);
    List<Position> positions = new ArrayList<Position>();

    try {
        // Create a non-blocking socket channel
        socketChannel = SocketChannel.open();
        socketChannel.configureBlocking(false);

        socketChannel.socket().setReceiveBufferSize(32768);
        socketChannel.socket().setSoLinger(true, 1);
        socketChannel.socket().setSoTimeout(0x15f90);
        socketChannel.socket().setReuseAddress(true);

        // Kick off connection establishment
        socketChannel.connect(new InetSocketAddress(server, port));

        // Create a new selector
        socketSelector = SelectorProvider.provider().openSelector();

        // Register the server socket channel, indicating an interest in
        // accepting new connections
        socketChannel.register(socketSelector, SelectionKey.OP_READ | SelectionKey.OP_CONNECT);
    } catch (Exception e) {
        Status status = new Status(IStatus.ERROR, Activator.PLUGIN_ID, 0, "Error connecting to orders monitor", //$NON-NLS-1$
                e);
        Activator.log(status);
        return;
    }

    for (;;) {
        try {
            if (socketSelector.select(30 * 1000) == 0) {
                logger.trace(">" + HEARTBEAT); //$NON-NLS-1$
                socketChannel.write(ByteBuffer.wrap(new String(HEARTBEAT + "\r\n").getBytes())); //$NON-NLS-1$
            }
        } catch (Exception e) {
            break;
        }

        // Iterate over the set of keys for which events are available
        Iterator<SelectionKey> selectedKeys = socketSelector.selectedKeys().iterator();
        while (selectedKeys.hasNext()) {
            SelectionKey key = selectedKeys.next();
            selectedKeys.remove();

            if (!key.isValid()) {
                continue;
            }

            try {
                // Check what event is available and deal with it
                if (key.isConnectable()) {
                    // Finish the connection. If the connection operation failed
                    // this will raise an IOException.
                    try {
                        socketChannel.finishConnect();
                    } catch (IOException e) {
                        // Cancel the channel's registration with our selector
                        key.cancel();
                        return;
                    }

                    // Register an interest in writing on this channel
                    key.interestOps(SelectionKey.OP_WRITE);
                }
                if (key.isWritable()) {
                    logger.trace(">" + LOGIN + WebConnector.getInstance().getUser()); //$NON-NLS-1$
                    socketChannel.write(ByteBuffer.wrap(
                            new String(LOGIN + WebConnector.getInstance().getUser() + "\r\n").getBytes())); //$NON-NLS-1$

                    // Register an interest in reading on this channel
                    key.interestOps(SelectionKey.OP_READ);
                }
                if (key.isReadable()) {
                    dst.clear();
                    int readed = socketChannel.read(dst);
                    if (readed > 0) {
                        String[] s = new String(dst.array(), 0, readed).split("\r\n"); //$NON-NLS-1$
                        for (int i = 0; i < s.length; i++) {
                            logger.trace("<" + s[i]); //$NON-NLS-1$

                            if (s[i].endsWith(";" + WebConnector.getInstance().getUser() + ";")) { //$NON-NLS-1$ //$NON-NLS-2$
                                logger.trace(">" + UNKNOWN70); //$NON-NLS-1$
                                socketChannel.write(ByteBuffer.wrap(new String(UNKNOWN70 + "\r\n").getBytes())); //$NON-NLS-1$
                                logger.trace(">" + UNKNOWN55); //$NON-NLS-1$
                                socketChannel.write(ByteBuffer.wrap(new String(UNKNOWN55 + "\r\n").getBytes())); //$NON-NLS-1$
                            }

                            if (s[i].indexOf(";6;5;") != -1 || s[i].indexOf(";8;0;") != -1) { //$NON-NLS-1$ //$NON-NLS-2$
                                try {
                                    OrderMonitor monitor = parseOrderLine(s[i]);

                                    OrderDelta[] delta;
                                    synchronized (orders) {
                                        if (!orders.contains(monitor)) {
                                            orders.add(monitor);
                                            delta = new OrderDelta[] {
                                                    new OrderDelta(OrderDelta.KIND_ADDED, monitor) };
                                        } else {
                                            delta = new OrderDelta[] {
                                                    new OrderDelta(OrderDelta.KIND_UPDATED, monitor) };
                                        }
                                    }
                                    fireUpdateNotifications(delta);

                                    if (monitor.getFilledQuantity() != null
                                            && monitor.getAveragePrice() != null) {
                                        Account account = WebConnector.getInstance().getAccount();
                                        account.updatePosition(monitor);
                                    }
                                } catch (ParseException e) {
                                    Status status = new Status(IStatus.ERROR, Activator.PLUGIN_ID, 0,
                                            "Error parsing line: " + s[i], e); //$NON-NLS-1$
                                    Activator.log(status);
                                }
                            }
                            if (s[i].indexOf(";6;0;") != -1) { //$NON-NLS-1$
                                updateStatusLine(s[i]);
                            }
                            if (s[i].indexOf(";7;0;") != -1) { //$NON-NLS-1$
                                try {
                                    positions.add(new Position(s[i]));
                                } catch (Exception e) {
                                    Status status = new Status(IStatus.ERROR, Activator.PLUGIN_ID, 0,
                                            "Error parsing line: " + s[i], e); //$NON-NLS-1$
                                    Activator.log(status);
                                }
                            }
                            if (s[i].indexOf(";7;9;") != -1) { //$NON-NLS-1$
                                Account account = WebConnector.getInstance().getAccount();
                                account.setPositions(positions.toArray(new Position[positions.size()]));
                                positions.clear();
                            }
                        }
                    }
                }
            } catch (Exception e) {
                Status status = new Status(IStatus.ERROR, Activator.PLUGIN_ID, 0, "Connection error", e); //$NON-NLS-1$
                Activator.log(status);
            }
        }
    }
}

From source file:hivemall.topicmodel.ProbabilisticTopicModelBaseUDTF.java

protected final void runIterativeTraining(@Nonnegative final int iterations) throws HiveException {
    final ByteBuffer buf = this.inputBuf;
    final NioStatefulSegment dst = this.fileIO;
    assert (buf != null);
    assert (dst != null);
    final long numTrainingExamples = model.getDocCount();

    long numTrain = numTrainingExamples / miniBatchSize;
    if (numTrainingExamples % miniBatchSize != 0L) {
        numTrain++;// w w  w  . ja  v a  2  s.c o m
    }

    final Reporter reporter = getReporter();
    final Counters.Counter iterCounter = (reporter == null) ? null
            : reporter.getCounter("hivemall.topicmodel.ProbabilisticTopicModel$Counter", "iteration");

    try {
        if (dst.getPosition() == 0L) {// run iterations w/o temporary file
            if (buf.position() == 0) {
                return; // no training example
            }
            buf.flip();

            int iter = 2;
            float perplexity = cumPerplexity / numTrain;
            float perplexityPrev;
            for (; iter <= iterations; iter++) {
                perplexityPrev = perplexity;
                cumPerplexity = 0.f;

                reportProgress(reporter);
                setCounterValue(iterCounter, iter);

                while (buf.remaining() > 0) {
                    int recordBytes = buf.getInt();
                    assert (recordBytes > 0) : recordBytes;
                    int wcLength = buf.getInt();
                    final String[] wordCounts = new String[wcLength];
                    for (int j = 0; j < wcLength; j++) {
                        wordCounts[j] = NIOUtils.getString(buf);
                    }
                    update(wordCounts);
                }
                buf.rewind();

                // mean perplexity over `numTrain` mini-batches
                perplexity = cumPerplexity / numTrain;
                logger.info("Mean perplexity over mini-batches: " + perplexity);
                if (Math.abs(perplexityPrev - perplexity) < eps) {
                    break;
                }
            }
            logger.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples) + " training examples on memory (thus "
                    + NumberUtils.formatNumber(numTrainingExamples * Math.min(iter, iterations))
                    + " training updates in total) ");
        } else {// read training examples in the temporary file and invoke train for each example
            // write training examples in buffer to a temporary file
            if (buf.remaining() > 0) {
                writeBuffer(buf, dst);
            }
            try {
                dst.flush();
            } catch (IOException e) {
                throw new HiveException("Failed to flush a file: " + dst.getFile().getAbsolutePath(), e);
            }
            if (logger.isInfoEnabled()) {
                File tmpFile = dst.getFile();
                logger.info(
                        "Wrote " + numTrainingExamples + " records to a temporary file for iterative training: "
                                + tmpFile.getAbsolutePath() + " (" + FileUtils.prettyFileSize(tmpFile) + ")");
            }

            // run iterations
            int iter = 2;
            float perplexity = cumPerplexity / numTrain;
            float perplexityPrev;
            for (; iter <= iterations; iter++) {
                perplexityPrev = perplexity;
                cumPerplexity = 0.f;

                setCounterValue(iterCounter, iter);

                buf.clear();
                dst.resetPosition();
                while (true) {
                    reportProgress(reporter);
                    // TODO prefetch
                    // writes training examples to a buffer in the temporary file
                    final int bytesRead;
                    try {
                        bytesRead = dst.read(buf);
                    } catch (IOException e) {
                        throw new HiveException("Failed to read a file: " + dst.getFile().getAbsolutePath(), e);
                    }
                    if (bytesRead == 0) { // reached file EOF
                        break;
                    }
                    assert (bytesRead > 0) : bytesRead;

                    // reads training examples from a buffer
                    buf.flip();
                    int remain = buf.remaining();
                    if (remain < SizeOf.INT) {
                        throw new HiveException("Illegal file format was detected");
                    }
                    while (remain >= SizeOf.INT) {
                        int pos = buf.position();
                        int recordBytes = buf.getInt();
                        remain -= SizeOf.INT;
                        if (remain < recordBytes) {
                            buf.position(pos);
                            break;
                        }

                        int wcLength = buf.getInt();
                        final String[] wordCounts = new String[wcLength];
                        for (int j = 0; j < wcLength; j++) {
                            wordCounts[j] = NIOUtils.getString(buf);
                        }
                        update(wordCounts);

                        remain -= recordBytes;
                    }
                    buf.compact();
                }

                // mean perplexity over `numTrain` mini-batches
                perplexity = cumPerplexity / numTrain;
                logger.info("Mean perplexity over mini-batches: " + perplexity);
                if (Math.abs(perplexityPrev - perplexity) < eps) {
                    break;
                }
            }
            logger.info("Performed " + Math.min(iter, iterations) + " iterations of "
                    + NumberUtils.formatNumber(numTrainingExamples)
                    + " training examples on a secondary storage (thus "
                    + NumberUtils.formatNumber(numTrainingExamples * Math.min(iter, iterations))
                    + " training updates in total)");
        }
    } catch (Throwable e) {
        throw new HiveException("Exception caused in the iterative training", e);
    } finally {
        // delete the temporary file and release resources
        try {
            dst.close(true);
        } catch (IOException e) {
            throw new HiveException("Failed to close a file: " + dst.getFile().getAbsolutePath(), e);
        }
        this.inputBuf = null;
        this.fileIO = null;
    }
}

From source file:com.esri.geoevent.solutions.adapter.geomessage.DefenseOutboundAdapter.java

@SuppressWarnings("incomplete-switch")
@Override//from   ww w  .ja  va2s. c om
public synchronized void receive(GeoEvent geoEvent) {

    ByteBuffer byteBuffer = ByteBuffer.allocate(10 * 1024);
    Integer wkid = -1;
    String message = "";

    message += "<geomessage v=\"1.0\">\n\r";
    message += "<_type>";
    message += messageType;
    message += "</_type>\n\r";
    message += "<_action>";
    message += "update";
    message += "</_action>\n\r";
    String messageid = UUID.randomUUID().toString();
    message += "<_id>";
    message += "{" + messageid + "}";
    message += "</_id>\n\r";
    MapGeometry geom = geoEvent.getGeometry();
    if (geom.getGeometry().getType() == com.esri.core.geometry.Geometry.Type.Point) {
        Point p = (Point) geom.getGeometry();
        message += "<_control_points>";
        message += ((Double) p.getX()).toString();
        message += ",";
        message += ((Double) p.getY()).toString();
        message += "</_control_points>\n\r";
        wkid = ((Integer) geom.getSpatialReference().getID());
    }

    if (wkid > 0) {
        String wkidValue = wkid.toString();
        message += "<_wkid>";
        message += wkidValue.toString();
        message += "</_wkid>\n\r";
    }
    GeoEventDefinition definition = geoEvent.getGeoEventDefinition();
    for (FieldDefinition fieldDefinition : definition.getFieldDefinitions()) {

        String attributeName = fieldDefinition.getName();
        Object value = geoEvent.getField(attributeName);

        if (value == null || value.equals("null")) {
            continue;
        }
        FieldType t = fieldDefinition.getType();
        if (t != FieldType.Geometry) {
            message += "<" + attributeName + ">";

            switch (t) {
            case String:
                // if(((String)value).isEmpty())
                // continue;
                message += value;
                break;
            case Date:
                Date date = (Date) value;
                message += (formatter.format(date));
                break;
            case Double:
                Double doubleValue = (Double) value;
                message += doubleValue.toString();
                break;
            case Float:
                Float floatValue = (Float) value;
                message += floatValue.toString();
                break;

            case Integer:
                Integer intValue = (Integer) value;
                message += intValue.toString();
                break;
            case Long:
                Long longValue = (Long) value;
                message += longValue.toString();
                break;
            case Short:
                Short shortValue = (Short) value;
                message += shortValue.toString();
                break;
            case Boolean:
                Boolean booleanValue = (Boolean) value;
                message += booleanValue.toString();
                break;

            }
            message += "</" + attributeName + ">\n\r";
        } else {
            if (definition.getIndexOf(attributeName) == definition.getIndexOf("GEOMETRY")) {
                continue;
            } else {
                String json = GeometryEngine.geometryToJson(wkid, (Geometry) value);
                message += "<" + attributeName + ">";
                message += json;
                message += "</" + attributeName + ">\n\r";
            }
            break;
        }

    }
    message += "</geomessage>";
    // stringBuffer.append("</geomessages>");
    message += "\r\n";

    ByteBuffer buf = charset.encode(message);
    if (buf.position() > 0)
        buf.flip();

    try {
        byteBuffer.put(buf);
    } catch (BufferOverflowException ex) {
        LOG.error(
                "Csv Outbound Adapter does not have enough room in the buffer to hold the outgoing data.  Either the receiving transport object is too slow to process the data, or the data message is too big.");
    }
    byteBuffer.flip();
    super.receive(byteBuffer, geoEvent.getTrackId(), geoEvent);
    byteBuffer.clear();
}

From source file:com.linkedin.databus.core.DbusEventBuffer.java

/**
 * Clears the buffer, assumes that requisite locks have
 * been obtained outside this method//from   w  w w .j av a 2 s.c  o  m
 *
 */
private void lockFreeClear() {
    _scnIndex.clear();
    _head.setPosition(0L);
    _tail.setPosition(0L);
    _currentWritePosition.setPosition(0L);
    _prevScn = -1L;
    _empty = true;
    _lastWrittenSequence = -1L;
    _timestampOfFirstEvent = 0;
    // TODO (medium) DDSDBUS-56:
    // what happens to the iterators that might be iterating over this buffer?
    // should we call a notifyClear() on them?
    for (ByteBuffer buf : _buffers) {
        buf.clear();
    }
    _notFull.signalAll();
    //      notifyIterators(head, tail);
}

From source file:com.linkedin.databus.core.DbusEventBuffer.java

/**
 * Used by readEventsInternal to move the partial event at the end to the beginning of the
 * staging buffer so we can try to read more data.
 *
 * @param readPos//from  ww  w .  j  av  a 2 s. c o  m
 * @param logDebugEnabled
 */
private void compactStgBuffer(ReadEventsReadPosition readPos, boolean logDebugEnabled) {
    final ByteBuffer readBuffer = readPos.getReadBuffer();

    readBuffer.clear();//despite its name, clear() does not remove the data
    if (readPos.hasNext()) {
        if (logDebugEnabled) {
            _log.debug("Copying " + readPos.bytesRemaining() + " bytes to the start of the readBuffer");
        }

        for (int i = 0; i < readPos.bytesRemaining(); ++i) {
            readBuffer.put(readBuffer.get(readPos.getPosition() + i));
        }
        readPos.startIteration();

        if (logDebugEnabled) {
            _log.debug("readBuffer after compaction: " + readBuffer + "; " + readPos);
        }
    }
}

From source file:edu.harvard.iq.dvn.ingest.dsb.impl.DvnNewJavaFieldCutter.java

public void cutColumns(InputStream in, int noCardsPerCase, int caseLength, String delimitor, String tabFileName)
        throws IOException {

    if (delimitor == null) {
        delimitor = defaultDelimitor;//w ww.j a  v a2  s. c  o m
    }

    OUT_LEN = colwidth; // calculated by parseList
    dbgLog.fine("out_len=" + OUT_LEN);

    String firstline = null;

    if (caseLength == 0) {

        int cread;
        int ccounter = 0;

        firstline = "";

        while (caseLength == 0 && (cread = in.read()) != -1) {
            ccounter++;
            if (cread == '\n') {
                caseLength = ccounter;
            }
            char c = (char) cread;
            firstline = firstline + c;
        }

    }

    if (caseLength == 0) {
        throw new IOException("Subsetting failed: could not read incoming byte stream. "
                + "(Requested file may be unavailable or missing)");

    }

    REC_LEN = caseLength;
    dbgLog.fine("REC_LEN=" + REC_LEN);

    for (int i = 0; i < cargSet.get(Long.valueOf(noCardsPerCase)).size(); i++) {
        int varEndOffset = cargSet.get(Long.valueOf(noCardsPerCase)).get(i).get(1);

        if (REC_LEN <= varEndOffset + 1) {
            throw new IOException("Failed to subset incoming byte stream. Invalid input. "
                    + "(Detected the first record of " + REC_LEN + " bytes; "
                    + "one of the columns requested ends at " + varEndOffset + " bytes).");
        }
    }

    Boolean dottednotation = false;
    Boolean foundData = false;

    // cutting a data file

    ReadableByteChannel rbc = Channels.newChannel(in);
    // input byte-buffer size = row-length + 1(=> new line char)
    ByteBuffer inbuffer = ByteBuffer.allocate(REC_LEN);

    OutputStream outs = new FileOutputStream(tabFileName);
    WritableByteChannel outc = Channels.newChannel(outs);
    ByteBuffer outbuffer = null;

    int pos = 0;
    int offset = 0;
    int outoffset = 0;

    int begin = 0;
    int end = 0;
    int blankoffset = 0;

    int blanktail = 0;
    int k;

    try {
        // lc: line counter
        int lc = 0;
        while (firstline != null || rbc.read(inbuffer) != -1) {

            if (firstline != null) {
                // we have the first line saved as a String:
                inbuffer.put(firstline.getBytes());
                firstline = null;
            }

            // calculate i-th card number
            lc++;
            k = lc % noCardsPerCase;
            if (k == 0) {
                k = noCardsPerCase;
            }
            //out.println("***** " +lc+ "-th line, recod k=" + k + " *****");
            byte[] line_read = new byte[OUT_LEN];
            byte[] junk = new byte[REC_LEN];
            byte[] line_final = new byte[OUT_LEN];

            //out.println("READ: " + offset);
            inbuffer.rewind();

            offset = 0;
            outoffset = 0;

            // how many variables are cut from this k-th card
            int noColumns = cargSet.get(Long.valueOf(k)).size();

            //out.println("noColumns=" + noColumns);
            //out.println("cargSet k =" + cargSet.get(Long.valueOf(k)));

            for (int i = 0; i < noColumns; i++) {
                //out.println("**** " + i +"-th col ****");
                begin = cargSet.get(Long.valueOf(k)).get(i).get(0); // bounds[2 * i];
                end = cargSet.get(Long.valueOf(k)).get(i).get(1); // bounds[2 * i + 1];

                //out.println("i: begin: " + begin + "\ti: end:" + end);

                try {
                    // throw away offect bytes
                    if (begin - offset - 1 > 0) {
                        inbuffer.get(junk, 0, (begin - offset - 1));
                    }
                    // get requested bytes
                    inbuffer.get(line_read, outoffset, (end - begin + 1));
                    // set outbound data
                    outbounds[2 * i] = outoffset;
                    outbounds[2 * i + 1] = outoffset + (end - begin);
                    // current position moved to outoffset
                    pos = outoffset;

                    dottednotation = false;
                    foundData = false;

                    blankoffset = 0;
                    blanktail = 0;

                    // as position increases
                    while (pos <= (outoffset + (end - begin))) {

                        //out.println("pos=" + pos + "\tline_read[pos]=" +
                        //    new String(line_read).replace("\000", "\052"));

                        // decimal octal
                        // 48 =>0 60
                        // 46 => . 56
                        // 32 = space 40

                        // dot: 
                        if (line_read[pos] == '\056') {
                            dottednotation = true;
                        }

                        // space:
                        if (line_read[pos] == '\040') {
                            if (foundData) {
                                blanktail = blanktail > 0 ? blanktail : pos - 1;
                            } else {
                                blankoffset = pos + 1;
                            }
                        } else {
                            foundData = true;
                            blanktail = 0;
                        }

                        pos++;
                    }
                    // increase the outoffset by width
                    outoffset += (end - begin + 1);
                    // dot false
                    if (!dottednotation) {
                        if (blankoffset > 0) {
                            // set outbound value to blankoffset
                            outbounds[2 * i] = blankoffset;
                        }
                        if (blanktail > 0) {
                            outbounds[2 * i + 1] = blanktail;
                        }
                    }

                } catch (BufferUnderflowException bufe) {
                    //bufe.printStackTrace();
                    throw new IOException(bufe.getMessage());
                }
                // set offset to the value of end-position
                offset = end;
            }

            outoffset = 0;
            // for each var
            for (int i = 0; i < noColumns; i++) {
                begin = outbounds[2 * i];
                end = outbounds[2 * i + 1];
                //out.println("begin=" + begin + "\t end=" + end);
                for (int j = begin; j <= end; j++) {
                    line_final[outoffset++] = line_read[j];
                }

                if (i < (noColumns - 1)) {
                    line_final[outoffset++] = '\011'; // tab x09
                } else {
                    if (k == cargSet.size()) {
                        line_final[outoffset++] = '\012'; // LF x0A
                    } else {
                        line_final[outoffset++] = '\011'; // tab x09
                    }
                }
            }
            //out.println("line_final=" +
            //    new String(line_final).replace("\000", "\052"));
            outbuffer = ByteBuffer.wrap(line_final, 0, outoffset);
            outc.write(outbuffer);
            inbuffer.clear();

        } // while loop
    } catch (IOException ex) {
        //ex.printStackTrace();
        throw new IOException("Failed to subset incoming fixed-field stream: " + ex.getMessage());
    }

}