Example usage for java.nio ByteBuffer capacity

List of usage examples for java.nio ByteBuffer capacity

Introduction

In this page you can find the example usage for java.nio ByteBuffer capacity.

Prototype

public final int capacity() 

Source Link

Document

Returns the capacity of this buffer.

Usage

From source file:com.skratchdot.electribe.model.esx.impl.SampleImpl.java

/**
 * <!-- begin-user-doc -->//from  ww  w.  j a  v  a 2 s .  c om
 * @return Returns a byte array that combines/mixes channel1 and channel2
 * <!-- end-user-doc -->
 * @generated NOT
 */
public byte[] getAudioDataChannelBoth() {
    if (this.isEmpty()) {
        return new byte[0];
    }
    ByteBuffer bufferChannel1 = ByteBuffer.wrap(this.getAudioDataChannel1());
    ByteBuffer bufferChannel2 = ByteBuffer.wrap(this.getAudioDataChannel2());
    ByteBuffer bufferChannelBoth = ByteBuffer.allocate(bufferChannel1.capacity());
    int dataChannel1;
    int dataChannel2;
    short dataChannelBoth;
    for (int j = 0; j < this.getNumberOfSampleFrames(); j++) {
        dataChannel1 = bufferChannel1.getShort();
        dataChannel2 = bufferChannel2.getShort();
        dataChannelBoth = (short) ((dataChannel1 + dataChannel2) / 2);
        bufferChannelBoth.putShort(dataChannelBoth);
    }
    return bufferChannelBoth.array();
}

From source file:org.opendaylight.lispflowmapping.implementation.serializer.MapRequestSerializer.java

public ByteBuffer serialize(MapRequest mapRequest) {
    int size = Length.HEADER_SIZE;
    if (mapRequest.getSourceEid() != null && mapRequest.getSourceEid().getLispAddressContainer() != null) {
        size += LispAddressSerializer.getInstance()
                .getAddressSize(LispAFIConvertor.toAFI(mapRequest.getSourceEid().getLispAddressContainer()));
    } else {/*from   w  w  w . j a v  a 2s .com*/
        size += 2;
    }
    if (mapRequest.getItrRloc() != null) {
        for (ItrRloc address : mapRequest.getItrRloc()) {
            size += LispAddressSerializer.getInstance()
                    .getAddressSize(LispAFIConvertor.toAFI(address.getLispAddressContainer()));
        }
    }
    if (mapRequest.getEidRecord() != null) {
        for (EidRecord record : mapRequest.getEidRecord()) {
            size += 2 + LispAddressSerializer.getInstance()
                    .getAddressSize(LispAFIConvertor.toAFI(record.getLispAddressContainer()));
        }
    }
    ByteBuffer requestBuffer = ByteBuffer.allocate(size);
    requestBuffer.put((byte) ((byte) (LispMessageEnum.MapRequest.getValue() << 4)
            | ByteUtil.boolToBit(BooleanUtils.isTrue(mapRequest.isAuthoritative()), Flags.AUTHORITATIVE)
            | ByteUtil.boolToBit(BooleanUtils.isTrue(mapRequest.isMapDataPresent()), Flags.MAP_DATA_PRESENT)
            | ByteUtil.boolToBit(BooleanUtils.isTrue(mapRequest.isProbe()), Flags.PROBE)
            | ByteUtil.boolToBit(BooleanUtils.isTrue(mapRequest.isSmr()), Flags.SMR)));
    requestBuffer.put((byte) (ByteUtil.boolToBit(BooleanUtils.isTrue(mapRequest.isPitr()), Flags.PITR)
            | ByteUtil.boolToBit(BooleanUtils.isTrue(mapRequest.isSmrInvoked()), Flags.SMR_INVOKED)));
    if (mapRequest.getItrRloc() != null) {
        int IRC = mapRequest.getItrRloc().size();
        if (IRC > 0) {
            IRC--;
        }
        requestBuffer.put((byte) (IRC));
    } else {
        requestBuffer.put((byte) 0);

    }
    if (mapRequest.getEidRecord() != null) {
        requestBuffer.put((byte) mapRequest.getEidRecord().size());
    } else {
        requestBuffer.put((byte) 0);

    }
    requestBuffer.putLong(NumberUtil.asLong(mapRequest.getNonce()));
    if (mapRequest.getSourceEid() != null && mapRequest.getSourceEid().getLispAddressContainer() != null) {
        LispAddressSerializer.getInstance().serialize(requestBuffer,
                LispAFIConvertor.toAFI(mapRequest.getSourceEid().getLispAddressContainer()));
    } else {
        requestBuffer.putShort((short) 0);
    }
    if (mapRequest.getItrRloc() != null) {
        for (ItrRloc address : mapRequest.getItrRloc()) {
            LispAddressSerializer.getInstance().serialize(requestBuffer,
                    LispAFIConvertor.toAFI(address.getLispAddressContainer()));
        }
    }
    if (mapRequest.getEidRecord() != null) {
        for (EidRecord record : mapRequest.getEidRecord()) {
            requestBuffer.put((byte) 0);
            requestBuffer.put((byte) record.getMask().byteValue());
            LispAddressSerializer.getInstance().serialize(requestBuffer,
                    LispAFIConvertor.toAFI(record.getLispAddressContainer()));
        }
    }
    if (mapRequest.getMapReply() != null) {
        ByteBuffer replyBuffer = ByteBuffer.allocate(
                EidToLocatorRecordSerializer.getInstance().getSerializationSize(mapRequest.getMapReply()));
        EidToLocatorRecordSerializer.getInstance().serialize(replyBuffer, mapRequest.getMapReply());
        ByteBuffer combinedBuffer = ByteBuffer.allocate(requestBuffer.capacity() + replyBuffer.capacity());
        combinedBuffer.put(requestBuffer.array());
        combinedBuffer.put(replyBuffer.array());
        return combinedBuffer;
    }
    return requestBuffer;
}

From source file:org.opendaylight.lispflowmapping.lisp.serializer.MapRequestSerializer.java

public MapRequest deserialize(ByteBuffer requestBuffer) {
    try {/*from   w  w w. j  a  va 2  s  . c  o m*/
        MapRequestBuilder builder = new MapRequestBuilder();

        byte typeAndFlags = requestBuffer.get();
        builder.setAuthoritative(ByteUtil.extractBit(typeAndFlags, Flags.AUTHORITATIVE));
        builder.setMapDataPresent(ByteUtil.extractBit(typeAndFlags, Flags.MAP_DATA_PRESENT));
        builder.setProbe(ByteUtil.extractBit(typeAndFlags, Flags.PROBE));
        builder.setSmr(ByteUtil.extractBit(typeAndFlags, Flags.SMR));

        byte moreFlags = requestBuffer.get();
        builder.setPitr(ByteUtil.extractBit(moreFlags, Flags.PITR));
        builder.setSmrInvoked(ByteUtil.extractBit(moreFlags, Flags.SMR_INVOKED));

        int itrCount = ByteUtil.getUnsignedByte(requestBuffer) + 1;
        int recordCount = ByteUtil.getUnsignedByte(requestBuffer);
        builder.setNonce(requestBuffer.getLong());
        LispAddressSerializerContext ctx = new LispAddressSerializerContext(
                LispAddressSerializerContext.MASK_LEN_MISSING);
        builder.setSourceEid(new SourceEidBuilder()
                .setEid(LispAddressSerializer.getInstance().deserializeEid(requestBuffer, ctx)).build());

        if (builder.getItrRloc() == null) {
            builder.setItrRloc(new ArrayList<ItrRloc>());
        }
        for (int i = 0; i < itrCount; i++) {
            builder.getItrRloc().add(new ItrRlocBuilder()
                    .setRloc(LispAddressSerializer.getInstance().deserializeRloc(requestBuffer)).build());
        }

        if (builder.getEidItem() == null) {
            builder.setEidItem(new ArrayList<EidItem>());
        }
        for (int i = 0; i < recordCount; i++) {
            builder.getEidItem().add(new EidItemBuilder()
                    .setEid(EidRecordSerializer.getInstance().deserialize(requestBuffer)).build());
        }
        if (builder.isMapDataPresent() && requestBuffer.hasRemaining()) {
            try {
                builder.setMapReply(
                        new org.opendaylight.yang.gen.v1.urn.opendaylight.lfm.lisp.proto.rev151105.maprequest.MapReplyBuilder()
                                .setMappingRecord(
                                        MappingRecordSerializer.getInstance().deserialize(requestBuffer))
                                .build())
                        .build();
            } catch (RuntimeException re) {
                LOG.warn("Couldn't deserialize Map-Reply encapsulated in Map-Request", re);
            }
        }
        return builder.build();
    } catch (RuntimeException re) {
        throw new LispSerializationException(
                "Couldn't deserialize Map-Request (len=" + requestBuffer.capacity() + ")", re);
    }
}

From source file:com.healthmarketscience.jackcess.impl.TableImpl.java

/**
 * Returns a single ByteBuffer which contains the entire table definition
 * (which may span multiple database pages).
 *///from   w  w w  . j  ava 2 s .  c o m
private ByteBuffer loadCompleteTableDefinitionBuffer(ByteBuffer tableBuffer) throws IOException {
    int nextPage = tableBuffer.getInt(getFormat().OFFSET_NEXT_TABLE_DEF_PAGE);
    ByteBuffer nextPageBuffer = null;
    while (nextPage != 0) {
        if (nextPageBuffer == null) {
            nextPageBuffer = getPageChannel().createPageBuffer();
        }
        getPageChannel().readPage(nextPageBuffer, nextPage);
        nextPage = nextPageBuffer.getInt(getFormat().OFFSET_NEXT_TABLE_DEF_PAGE);
        ByteBuffer newBuffer = PageChannel.createBuffer(tableBuffer.capacity() + getFormat().PAGE_SIZE - 8);
        newBuffer.put(tableBuffer);
        newBuffer.put(nextPageBuffer.array(), 8, getFormat().PAGE_SIZE - 8);
        tableBuffer = newBuffer;
        tableBuffer.flip();
    }
    return tableBuffer;
}

From source file:com.healthmarketscience.jackcess.Table.java

/**
 * @param database database which owns this table
 * @param tableBuffer Buffer to read the table with
 * @param pageNumber Page number of the table definition
 * @param name Table name/*from  w  w w . j a  va2s .co m*/
 * @param useBigIndex whether or not "big index support" should be enabled
 *                    for the table
 */
protected Table(Database database, ByteBuffer tableBuffer, int pageNumber, String name, int flags,
        boolean useBigIndex) throws IOException {
    _database = database;
    _tableDefPageNumber = pageNumber;
    _name = name;
    _flags = flags;
    _useBigIndex = useBigIndex;
    int nextPage = tableBuffer.getInt(getFormat().OFFSET_NEXT_TABLE_DEF_PAGE);
    ByteBuffer nextPageBuffer = null;
    while (nextPage != 0) {
        if (nextPageBuffer == null) {
            nextPageBuffer = getPageChannel().createPageBuffer();
        }
        getPageChannel().readPage(nextPageBuffer, nextPage);
        nextPage = nextPageBuffer.getInt(getFormat().OFFSET_NEXT_TABLE_DEF_PAGE);
        ByteBuffer newBuffer = getPageChannel()
                .createBuffer(tableBuffer.capacity() + getFormat().PAGE_SIZE - 8);
        newBuffer.put(tableBuffer);
        newBuffer.put(nextPageBuffer.array(), 8, getFormat().PAGE_SIZE - 8);
        tableBuffer = newBuffer;
        tableBuffer.flip();
    }
    readTableDefinition(tableBuffer);
    tableBuffer = null;
}

From source file:org.opendaylight.lispflowmapping.implementation.serializer.MapRequestSerializer.java

public MapRequest deserialize(ByteBuffer requestBuffer) {
    try {/* w w  w  .j  a va 2 s . co m*/
        MapRequestBuilder builder = new MapRequestBuilder();

        byte typeAndFlags = requestBuffer.get();
        builder.setAuthoritative(ByteUtil.extractBit(typeAndFlags, Flags.AUTHORITATIVE));
        builder.setMapDataPresent(ByteUtil.extractBit(typeAndFlags, Flags.MAP_DATA_PRESENT));
        builder.setProbe(ByteUtil.extractBit(typeAndFlags, Flags.PROBE));
        builder.setSmr(ByteUtil.extractBit(typeAndFlags, Flags.SMR));

        byte moreFlags = requestBuffer.get();
        builder.setPitr(ByteUtil.extractBit(moreFlags, Flags.PITR));
        builder.setSmrInvoked(ByteUtil.extractBit(moreFlags, Flags.SMR_INVOKED));

        int itrCount = ByteUtil.getUnsignedByte(requestBuffer) + 1;
        int recordCount = ByteUtil.getUnsignedByte(requestBuffer);
        builder.setNonce(requestBuffer.getLong());
        builder.setSourceEid(
                new SourceEidBuilder()
                        .setLispAddressContainer(LispAFIConvertor
                                .toContainer(LispAddressSerializer.getInstance().deserialize(requestBuffer)))
                        .build());

        if (builder.getItrRloc() == null) {
            builder.setItrRloc(new ArrayList<ItrRloc>());
        }
        for (int i = 0; i < itrCount; i++) {
            builder.getItrRloc()
                    .add(new ItrRlocBuilder().setLispAddressContainer(LispAFIConvertor
                            .toContainer(LispAddressSerializer.getInstance().deserialize(requestBuffer)))
                            .build());
        }

        if (builder.getEidRecord() == null) {
            builder.setEidRecord(new ArrayList<EidRecord>());
        }
        for (int i = 0; i < recordCount; i++) {
            builder.getEidRecord().add(EidRecordSerializer.getInstance().deserialize(requestBuffer));
        }
        if (builder.isMapDataPresent() && requestBuffer.hasRemaining()) {
            try {
                builder.setMapReply(
                        new org.opendaylight.yang.gen.v1.urn.opendaylight.lfm.control.plane.rev150314.maprequest.MapReplyBuilder(
                                new EidToLocatorRecordBuilder(
                                        EidToLocatorRecordSerializer.getInstance().deserialize(requestBuffer))
                                                .build()).build());
            } catch (RuntimeException re) {
                LOG.warn("couldn't deserialize map reply encapsulated in map request. {}", re.getMessage());
            }
        }
        return builder.build();
    } catch (RuntimeException re) {
        throw new LispSerializationException(
                "Couldn't deserialize Map-Request (len=" + requestBuffer.capacity() + ")", re);
    }
}

From source file:edu.brown.hstore.HStoreSite.java

protected void invocationQueue(ByteBuffer buffer, ClientInputHandler handler, Connection c) {
    int messageSize = buffer.capacity();
    RpcCallback<ClientResponseImpl> callback = new ClientResponseCallback(this.clientInterface, c, messageSize);
    this.clientInterface.increaseBackpressure(messageSize);

    if (this.preProcessorQueue != null) {
        this.preProcessorQueue.add(Pair.of(buffer, callback));
    } else {//w w  w. j av a2  s .  com
        this.invocationProcess(buffer, callback);
    }
}

From source file:org.opendaylight.lispflowmapping.lisp.serializer.MapRegisterSerializer.java

public MapRegister deserialize(ByteBuffer registerBuffer, InetAddress sourceRloc) {
    try {/*w  w w.j a va  2  s  . co  m*/
        MapRegisterBuilder builder = new MapRegisterBuilder();
        builder.setMappingRecordItem(new ArrayList<MappingRecordItem>());

        byte typeAndFlags = registerBuffer.get();
        boolean xtrSiteIdPresent = ByteUtil.extractBit(typeAndFlags, Flags.XTRSITEID);
        builder.setProxyMapReply(ByteUtil.extractBit(typeAndFlags, Flags.PROXY));
        builder.setXtrSiteIdPresent(xtrSiteIdPresent);

        registerBuffer.position(registerBuffer.position() + Length.RES);
        byte mergeAndMapReply = registerBuffer.get();
        builder.setWantMapNotify(ByteUtil.extractBit(mergeAndMapReply, Flags.WANT_MAP_NOTIFY));
        builder.setMergeEnabled(ByteUtil.extractBit(mergeAndMapReply, Flags.MERGE_ENABLED));
        byte recordCount = (byte) ByteUtil.getUnsignedByte(registerBuffer);
        builder.setNonce(registerBuffer.getLong());
        builder.setKeyId(registerBuffer.getShort());
        short authenticationLength = registerBuffer.getShort();
        byte[] authenticationData = new byte[authenticationLength];
        registerBuffer.get(authenticationData);
        builder.setAuthenticationData(authenticationData);

        if (xtrSiteIdPresent) {
            List<MappingRecordBuilder> mrbs = new ArrayList<MappingRecordBuilder>();
            for (int i = 0; i < recordCount; i++) {
                mrbs.add(MappingRecordSerializer.getInstance().deserializeToBuilder(registerBuffer));
            }
            byte[] xtrIdBuf = new byte[Length.XTRID_SIZE];
            registerBuffer.get(xtrIdBuf);
            XtrId xtrId = new XtrId(xtrIdBuf);
            byte[] siteIdBuf = new byte[Length.SITEID_SIZE];
            registerBuffer.get(siteIdBuf);
            SiteId siteId = new SiteId(siteIdBuf);
            builder.setXtrId(xtrId);
            builder.setSiteId(siteId);
            for (MappingRecordBuilder mrb : mrbs) {
                mrb.setXtrId(xtrId);
                mrb.setSiteId(siteId);
                mrb.setSourceRloc(getSourceRloc(sourceRloc));
                builder.getMappingRecordItem()
                        .add(new MappingRecordItemBuilder().setMappingRecord(mrb.build()).build());
            }
        } else {
            for (int i = 0; i < recordCount; i++) {
                builder.getMappingRecordItem()
                        .add(new MappingRecordItemBuilder()
                                .setMappingRecord(
                                        MappingRecordSerializer.getInstance().deserialize(registerBuffer))
                                .build());
            }
        }

        registerBuffer.limit(registerBuffer.position());
        byte[] mapRegisterBytes = new byte[registerBuffer.position()];
        registerBuffer.position(0);
        registerBuffer.get(mapRegisterBytes);
        return builder.build();
    } catch (RuntimeException re) {
        throw new LispSerializationException(
                "Couldn't deserialize Map-Register (len=" + registerBuffer.capacity() + ")", re);
    }

}

From source file:edu.hawaii.soest.hioos.storx.StorXParser.java

/**
 * Parses the binary STOR-X file.  The binary file format is a sequence of
 * 'frames' that all begin with 'SAT'.  The parser creates a list with the
 * individual frames.  Some frames are StorX frames (SATSTX), some are from 
 * external sensors (ISUS: 'SATNLB', 'SATNDB'; SBE CTD: 'SATSBE')
 *
 * @param fileBuffer - the binary data file as a ByteBuffer
 *///from   w  ww  .j a  v  a 2  s.  co m
public void parse(ByteBuffer fileBuffer) throws Exception {

    logger.debug("StorXParser.parse() called.");

    this.fileBuffer = fileBuffer;
    //logger.debug(this.fileBuffer.toString());

    try {

        // Create a buffer that will store a single frame of the file
        ByteBuffer frameBuffer = ByteBuffer.allocate(1024);

        // create four byte placeholders used to evaluate up to a four-byte 
        // window.  The FIFO layout looks like:
        //           ---------------------------
        //   in ---> | Four | Three | Two | One |  ---> out
        //           ---------------------------
        byte byteOne = 0x00, // set initial placeholder values
                byteTwo = 0x00, byteThree = 0x00, byteFour = 0x00;

        int frameByteCount = 0; // keep track of bytes per frame
        int frameCount = 0; // keep track of frames

        this.fileBuffer.position(0);
        this.fileBuffer.limit(this.fileBuffer.capacity());

        while (this.fileBuffer.hasRemaining()) {

            // load the next byte into the FIFO window
            byteOne = fileBuffer.get();

            // show the byte stream coming in
            //logger.debug("b1: " + new String(Hex.encodeHex(new byte[]{byteOne}))   + "\t" +
            //             "b2: " + new String(Hex.encodeHex(new byte[]{byteTwo}))   + "\t" +
            //             "b3: " + new String(Hex.encodeHex(new byte[]{byteThree})) + "\t" +
            //             "b4: " + new String(Hex.encodeHex(new byte[]{byteFour}))  + "\t" +
            //             "st: " + Integer.toString(this.state)                     + "\t" +
            //             "po: " + this.fileBuffer.position()                       + "\t" +
            //             "cp: " + this.fileBuffer.capacity()
            //             );

            // evaluate the bytes, separate the file frame by frame (SAT ...)
            switch (this.state) {

            case 0: // find a frame beginning (SAT) 53 41 54

                if (byteOne == 0x54 && byteTwo == 0x41 && byteThree == 0x53) {

                    // found a line, add the beginning to the line buffer 
                    frameBuffer.put(byteThree);
                    frameBuffer.put(byteTwo);
                    frameBuffer.put(byteOne);

                    frameByteCount = frameByteCount + 3;

                    this.state = 1;
                    break;

                } else {
                    break;

                }

            case 1: // find the next frame beginning (SAT) 53 41 54

                if ((byteOne == 0x54 && byteTwo == 0x41 && byteThree == 0x53)
                        || fileBuffer.position() == fileBuffer.capacity()) {

                    // we have a line ending. store the line in the arrayList
                    frameBuffer.put(byteOne);
                    frameByteCount++;
                    frameBuffer.flip();
                    byte[] frameArray = frameBuffer.array();
                    ByteBuffer currentFrameBuffer;

                    if (fileBuffer.position() == fileBuffer.capacity()) {

                        // create a true copy of the byte array subset (no trailing 'SAT')
                        byte[] frameCopy = new byte[frameByteCount];
                        System.arraycopy(frameArray, 0, frameCopy, 0, frameByteCount);
                        currentFrameBuffer = ByteBuffer.wrap(frameCopy);

                    } else {

                        // create a true copy of the byte array subset (less the 'SAT')
                        byte[] frameCopy = new byte[frameByteCount - 3];
                        System.arraycopy(frameArray, 0, frameCopy, 0, frameByteCount - 3);
                        currentFrameBuffer = ByteBuffer.wrap(frameCopy);

                    }

                    // parse the current frame and add it to the frameMap

                    frameCount++;

                    // create a map to store frames as they are encountered
                    BasicHierarchicalMap frameMap = new BasicHierarchicalMap();

                    // peek at the first six header bytes as a string
                    byte[] sixBytes = new byte[6];
                    currentFrameBuffer.get(sixBytes);
                    currentFrameBuffer.position(0);
                    String frameHeader = new String(sixBytes, "US-ASCII");

                    // determine the frame type based on the header
                    if (frameHeader.matches(this.STOR_X_HEADER_ID)) {
                        frameMap.put("rawFrame", currentFrameBuffer);
                        frameMap.put("id", frameHeader);
                        frameMap.put("type", frameHeader.substring(3, 6));
                        frameMap.put("serialNumber", null);
                        frameMap.put("date", null);
                        String headerString = new String(currentFrameBuffer.array());
                        // trim trailing null characters and line endings
                        int nullIndex = headerString.indexOf(0);
                        headerString = headerString.substring(0, nullIndex).trim();
                        frameMap.put("parsedFrameObject", headerString);

                        // Add the frame to the frames map
                        this.framesMap.add("/frames/frame", (BasicHierarchicalMap) frameMap.clone());

                        frameMap.removeAll("frame");
                        currentFrameBuffer.clear();

                    } else if (frameHeader.matches(this.STOR_X_FRAME_ID)) {

                        // test if the frame is complete
                        if (currentFrameBuffer.capacity() == this.STOR_X_FRAME_SIZE) {

                            // convert the frame buffer to a StorXFrame
                            StorXFrame storXFrame = new StorXFrame(currentFrameBuffer);

                            frameMap.put("rawFrame", currentFrameBuffer);
                            frameMap.put("id", frameHeader);
                            frameMap.put("type", frameHeader.substring(3, 6));
                            frameMap.put("serialNumber", storXFrame.getSerialNumber());
                            frameMap.put("date", parseTimestamp(storXFrame.getTimestamp()));
                            frameMap.put("parsedFrameObject", storXFrame);

                            // Add the frame to the frames map
                            this.framesMap.add("/frames/frame", (BasicHierarchicalMap) frameMap.clone());

                            frameMap.removeAll("frame");
                            currentFrameBuffer.clear();

                        } else {
                            logger.debug(frameHeader + " frame " + frameCount + " length is "
                                    + currentFrameBuffer.capacity() + " not " + this.STOR_X_FRAME_SIZE);
                        }

                    } else if (frameHeader.matches(this.SBE_CTD_FRAME_ID)) {

                        // convert the frame buffer to a CTDFrame
                        CTDFrame ctdFrame = new CTDFrame(currentFrameBuffer);

                        // add in a sample if it matches a general data sample pattern
                        if (ctdFrame.getSample().matches(" [0-9].*[0-9]\r\n")) {

                            // extract the sample bytes from the frame
                            frameMap.put("rawFrame", currentFrameBuffer);
                            frameMap.put("id", frameHeader);
                            frameMap.put("type", frameHeader.substring(3, 6));
                            frameMap.put("serialNumber", ctdFrame.getSerialNumber());
                            frameMap.put("date", parseTimestamp(ctdFrame.getTimestamp()));
                            frameMap.put("parsedFrameObject", ctdFrame);

                            // Add the frame to the frames map
                            this.framesMap.add("/frames/frame", (BasicHierarchicalMap) frameMap.clone());

                        } else {
                            logger.debug("This CTD frame is not a data sample."
                                    + " Skipping it. The string is: " + ctdFrame.getSample());
                        }

                        frameMap.removeAll("frame");
                        currentFrameBuffer.clear();

                    } else if (frameHeader.matches(this.ISUS_DARK_FRAME_ID)) {

                        // test if the frame is complete
                        if (currentFrameBuffer.capacity() == this.ISUS_FRAME_SIZE) {

                            // convert the frame buffer to a ISUSFrame
                            ISUSFrame isusFrame = new ISUSFrame(currentFrameBuffer);

                            frameMap.put("rawFrame", currentFrameBuffer);
                            frameMap.put("id", frameHeader);
                            frameMap.put("type", frameHeader.substring(3, 6));
                            frameMap.put("serialNumber", isusFrame.getSerialNumber());
                            frameMap.put("date", parseTimestamp(isusFrame.getTimestamp()));
                            frameMap.put("parsedFrameObject", isusFrame);

                            // Add the frame to the frames map
                            this.framesMap.add("/frames/frame", (BasicHierarchicalMap) frameMap.clone());

                            frameMap.removeAll("frame");
                            currentFrameBuffer.clear();

                        } else {
                            logger.debug(frameHeader + " frame " + frameCount + " length is "
                                    + currentFrameBuffer.capacity() + " not " + this.ISUS_FRAME_SIZE);
                        }

                        currentFrameBuffer.clear();

                    } else if (frameHeader.matches(this.ISUS_LIGHT_FRAME_ID)) {

                        // test if the frame is complete
                        if (currentFrameBuffer.capacity() == this.ISUS_FRAME_SIZE) {

                            // convert the frame buffer to a ISUSFrame
                            ISUSFrame isusFrame = new ISUSFrame(currentFrameBuffer);

                            frameMap.put("rawFrame", currentFrameBuffer);
                            frameMap.put("id", frameHeader);
                            frameMap.put("type", frameHeader.substring(3, 6));
                            frameMap.put("serialNumber", isusFrame.getSerialNumber());
                            frameMap.put("date", parseTimestamp(isusFrame.getTimestamp()));
                            frameMap.put("parsedFrameObject", isusFrame);

                            // Add the frame to the frames map
                            this.framesMap.add("/frames/frame", (BasicHierarchicalMap) frameMap.clone());

                            frameMap.removeAll("frame");
                            currentFrameBuffer.clear();

                        } else {
                            logger.debug(frameHeader + " frame " + frameCount + " length is "
                                    + currentFrameBuffer.capacity() + " not " + this.ISUS_FRAME_SIZE);
                        }

                        currentFrameBuffer.clear();

                    } else {
                        logger.info("The current frame type is not recognized. "
                                + "Discarding it.  The header was: " + frameHeader);
                        currentFrameBuffer.clear();

                    }

                    // reset the frame buffer for the next frame, but add the 'SAT'
                    // bytes already encountered
                    frameBuffer.clear();
                    frameByteCount = 0;
                    this.fileBuffer.position(this.fileBuffer.position() - 3);
                    this.state = 0;
                    break;

                } else {

                    // no full line yet, keep adding bytes
                    frameBuffer.put(byteOne);
                    frameByteCount++;
                    break;

                }

            } // end switch()

            // shift the bytes in the FIFO window
            byteFour = byteThree;
            byteThree = byteTwo;
            byteTwo = byteOne;

        } // end while()

        logger.debug(this.framesMap.toXMLString(1000));

    } catch (Exception e) {
        logger.debug("Failed to parse the data file.  The error message was:" + e.getMessage());
        e.printStackTrace();

    }

}

From source file:edu.umass.cs.gigapaxos.SQLPaxosLogger.java

private static void compactLogfile(File file, PaxosPacketizer packetizer, MessageLogDiskMap msgLog,
        FileIDMap fidMap) throws IOException, JSONException {
    RandomAccessFile raf = null, rafTmp = null;
    File tmpFile = new File(file.toString() + TMP_FILE_SUFFIX);
    int tmpFileSize = 0;
    boolean compacted = false, neededAtAll = false;
    HashMap<String, ArrayList<LogIndexEntry>> logIndexEntries = new HashMap<String, ArrayList<LogIndexEntry>>();

    // quick delete
    if (fidMap.isRemovable(file.toString(), msgLog)) {
        deleteFile(file, msgLog);//from ww  w . j a  va  2s  .  c  om
        log.log(Level.INFO, "{0} quick-garbage-collected file {1}", new Object[] { msgLog.disk, file });
        return;
    } else
        log.log(Level.FINE, "{0} not quick-GCing file {1} because dependent paxosIDs = {2}",
                new Object[] { msgLog.disk, file, fidMap.fidMap.get(file.toString()) });

    if (System.currentTimeMillis() - file.lastModified() < LOGFILE_AGE_THRESHOLD * 1000)
        return;

    try {
        long t = System.currentTimeMillis();
        raf = new RandomAccessFile(file.toString(), "r");
        rafTmp = new RandomAccessFile(tmpFile.toString(), "rw");
        while (raf.getFilePointer() < raf.length()) {
            long offset = rafTmp.getFilePointer();
            int length = raf.readInt();
            byte[] msg = new byte[length];
            raf.readFully(msg);
            PaxosPacket pp = packetizer != null ? packetizer.stringToPaxosPacket(msg
            // new String(msg, CHARSET)
            ) : PaxosPacket.getPaxosPacket(new String(msg, CHARSET));
            if (!logIndexEntries.containsKey(pp.getPaxosID()))
                logIndexEntries.put(pp.getPaxosID(), new ArrayList<LogIndexEntry>());
            logIndexEntries.get(pp.getPaxosID()).add(new LogIndexEntry(getSlot(pp), getBallot(pp).ballotNumber,
                    getBallot(pp).coordinatorID, pp.getType().getInt(), file.toString(), offset, length));

            if (isLogMsgNeeded(pp, msgLog)) {
                ByteBuffer bbuf = ByteBuffer.allocate(length + 4);
                bbuf.putInt(length);
                bbuf.put(msg);
                rafTmp.write(bbuf.array());
                neededAtAll = true;
                tmpFileSize += bbuf.capacity();
            } else {
                compacted = true;
                log.log(Level.FINE, "From logfile {0} garbage collecting {1}",
                        new Object[] { file, pp.getSummary() });
            }
        }
        DelayProfiler.updateDelay("compact", t);
    } finally {
        if (raf != null)
            raf.close();
        if (rafTmp != null) {
            rafTmp.getChannel().force(true);
            rafTmp.close();
        }
    }
    assert (tmpFile.exists() && tmpFile.length() == tmpFileSize) : tmpFile.length() + " != " + tmpFileSize;
    if (compacted && neededAtAll)
        synchronized (msgLog) {
            modifyLogfileAndLogIndex(file, tmpFile, logIndexEntries, msgLog, fidMap);
            log.log(Level.INFO, "{0} compacted logfile {1}", new Object[] { msgLog, file });
        }
    else if (!neededAtAll) {
        log.log(Level.INFO, "Deleting logfile {0} as its log entries are no longer needed",
                new Object[] { file });
        deleteFile(file, msgLog);
    } else
        // !compacted
        log.log(Level.INFO, "Logfile {0} unchanged upon compaction attempt", new Object[] { file });
    assert (tmpFile.toString().endsWith(TMP_FILE_SUFFIX));
    if (tmpFile.exists())
        deleteFile(tmpFile, msgLog);
}