Example usage for java.nio ByteBuffer getLong

List of usage examples for java.nio ByteBuffer getLong

Introduction

In this page you can find the example usage for java.nio ByteBuffer getLong.

Prototype

public abstract long getLong();

Source Link

Document

Returns the long at the current position and increases the position by 8.

Usage

From source file:com.healthmarketscience.jackcess.impl.DatabaseImpl.java

/**
 * Returns the password mask retrieved from the given header page and
 * format, or {@code null} if this format does not use a password mask.
 *//*from   w ww  .java2  s .  c  o m*/
static byte[] getPasswordMask(ByteBuffer buffer, JetFormat format) {
    // get extra password mask if necessary (the extra password mask is
    // generated from the database creation date stored in the header)
    int pwdMaskPos = format.OFFSET_HEADER_DATE;
    if (pwdMaskPos < 0) {
        return null;
    }

    buffer.position(pwdMaskPos);
    double dateVal = Double.longBitsToDouble(buffer.getLong());

    byte[] pwdMask = new byte[4];
    PageChannel.wrap(pwdMask).putInt((int) dateVal);

    return pwdMask;
}

From source file:com.sonymobile.android.media.internal.VUParser.java

protected boolean parseODSMData(IsoTrack odsmTrack) {
    int kObjectSize = 11;
    SampleTable sampleTable = odsmTrack.getSampleTable();
    if (sampleTable.getSampleCount() > 1) {
        // TODO: Should multiple entries be supported?
        return false;
    }/*from ww w  .j  a v a  2 s  . c om*/
    mSinfList = new ArrayList<SinfData>(2);

    ByteBuffer stszData = sampleTable.getStszData();
    stszData.rewind();
    stszData.getInt(); // version and flags

    int dataSize = stszData.getInt();
    if (dataSize == 0) {
        stszData.getInt(); // sample_count
        dataSize = stszData.getInt();
    }

    byte[] data = new byte[dataSize];
    try {
        ByteBuffer stcoData = sampleTable.getStcoData();
        stcoData.rewind();

        stcoData.getInt(); // version and flags
        stcoData.getInt(); // entry_count

        long sampleOffset = 0;
        if (sampleTable.isUsingLongChunkOffsets()) {
            sampleOffset = stcoData.getLong();
        } else {
            sampleOffset = 0xFFFFFFFFL & stcoData.getInt();
        }

        mDataSource.readAt(sampleOffset, data, dataSize);
        ByteBuffer dataBuffer = ByteBuffer.wrap(data);
        byte updateTag = dataBuffer.get();
        if (updateTag != 1) {
            return false;
        }
        int size = 0;
        int sizePart = 0;
        do {
            sizePart = (dataBuffer.get() & 0xFF);
            size = ((size << 7) & 0xFFFFFF80) | (sizePart & 0x7F);
        } while (sizePart > 128);
        while (size >= kObjectSize) {
            byte descriptorTag = dataBuffer.get();
            if (descriptorTag != 17) {
                // not mp4 descriptor
                return false;
            }
            dataBuffer.get(); // ODLength
            dataBuffer.getShort(); // 10 bit ObjectDescriptorID, 1 bit
                                   // URL_FLAG and 5 bit reserved

            byte esTag = dataBuffer.get();
            if (esTag != 0x0F) {
                return false;
            }
            dataBuffer.get(); // ES Length
            short esTrackReferenceIndex = dataBuffer.getShort();
            byte ipmpDescriptorPointer = dataBuffer.get();
            if (ipmpDescriptorPointer != 0x0A) {
                // unexpected pointer
                return false;
            }
            dataBuffer.get(); // ipmpLength
            byte ipmpDescriptorId = dataBuffer.get();
            SinfData sinfData = new SinfData();
            sinfData.esIdReference = esTrackReferenceIndex;
            sinfData.ipmpDescriptorId = ipmpDescriptorId;
            mSinfList.add(sinfData);
            size -= kObjectSize;
        }
        dataBuffer.get(); // IPMP Descriptor Update Tag
        int sinfCount = mSinfList.size();
        size = 0;
        sizePart = 0;
        do {
            sizePart = (dataBuffer.get() & 0xFF);
            size = ((size << 7) & 0xFFFFFF80) | (sizePart & 0x7F);
        } while (sizePart > 128);
        while (size > 0) {
            dataBuffer.get(); // IPMP Descriptor Tag
            int ipmpByteCount = 1;
            int ipmpLength = 0;
            sizePart = 0;
            do {
                sizePart = (dataBuffer.get() & 0xFF);
                ipmpByteCount++;
                ipmpLength = ((ipmpLength << 7) & 0xFFFFFF80) | (sizePart & 0x7F);
            } while (sizePart > 128);
            ipmpByteCount += ipmpLength;
            byte ipmpDescriptorId = dataBuffer.get();
            dataBuffer.getShort(); // IPMPS Type
            byte[] ipmpData = new byte[ipmpLength - 3];
            dataBuffer.get(ipmpData);
            SinfData sinfData = null;
            for (int i = 0; i < sinfCount; i++) {
                sinfData = mSinfList.get(i);
                if (sinfData.ipmpDescriptorId == ipmpDescriptorId) {
                    sinfData.ipmpData = new byte[ipmpData.length];
                    for (int j = 0; j < ipmpData.length; j++) {
                        sinfData.ipmpData[j] = ipmpData[j];
                    }
                    break;
                }
            }
            size -= ipmpByteCount;
        }
        int ipmpDataLength = 0;
        for (int i = 0; i < sinfCount; i++) {
            SinfData sinfData = mSinfList.get(i);
            ipmpDataLength += sinfData.ipmpData.length;
        }

        int ipmpMetaDataLength = 16 // MARLIN_SYSTEM_ID
                + 4 // size of all SINF data
                + 4 // size of SINF box id
                + 4 * sinfCount // trackIndex * sinfCount
                + 4 * sinfCount // ipmpLength * sinfCount
                + ipmpDataLength; // size of ipmpData
        byte[] ipmpMetaData = new byte[ipmpMetaDataLength];
        int offset = 16;

        for (int i = 0; i < offset; i++) {
            int hexVal = Integer.parseInt(Util.MARLIN_SYSTEM_ID.substring(i * 2, i * 2 + 2), 16);
            ipmpMetaData[i] = (byte) hexVal;
        }
        ipmpMetaData[offset++] = (byte) ((ipmpDataLength >> 24) & 0xFF);
        ipmpMetaData[offset++] = (byte) ((ipmpDataLength >> 16) & 0xFF);
        ipmpMetaData[offset++] = (byte) ((ipmpDataLength >> 8) & 0xFF);
        ipmpMetaData[offset++] = (byte) (ipmpDataLength & 0xFF);
        ipmpMetaData[offset++] = 0x73; // S
        ipmpMetaData[offset++] = 0x69; // I
        ipmpMetaData[offset++] = 0x6E; // N
        ipmpMetaData[offset++] = 0x66; // F

        int numTracks = mTracks.size();
        for (int i = 0; i < numTracks; i++) {
            IsoTrack track = (IsoTrack) mTracks.get(i);
            for (int j = 0; j < sinfCount; j++) {
                SinfData sinfData = mSinfList.get(j);
                if (sinfData.esIdReference == track.getTrackId()) {
                    track.getMetaData().addValue(MetaData.KEY_IPMP_DATA, sinfData.ipmpData);
                    // track index
                    ipmpMetaData[offset++] = (byte) ((i >> 24) & 0xFF);
                    ipmpMetaData[offset++] = (byte) ((i >> 16) & 0xFF);
                    ipmpMetaData[offset++] = (byte) ((i >> 8) & 0xFF);
                    ipmpMetaData[offset++] = (byte) (i & 0xFF);

                    // sinf data length
                    ipmpMetaData[offset++] = (byte) ((sinfData.ipmpData.length >> 24) & 0xFF);
                    ipmpMetaData[offset++] = (byte) ((sinfData.ipmpData.length >> 16) & 0xFF);
                    ipmpMetaData[offset++] = (byte) ((sinfData.ipmpData.length >> 8) & 0xFF);
                    ipmpMetaData[offset++] = (byte) (sinfData.ipmpData.length & 0xFF);

                    System.arraycopy(sinfData.ipmpData, 0, ipmpMetaData, offset, sinfData.ipmpData.length);

                    byte[] tempData = new byte[4 + sinfData.ipmpData.length];
                    tempData[0] = (byte) ((sinfData.ipmpData.length >> 24) & 0xFF);
                    tempData[1] = (byte) ((sinfData.ipmpData.length >> 16) & 0xFF);
                    tempData[2] = (byte) ((sinfData.ipmpData.length >> 8) & 0xFF);
                    tempData[3] = (byte) (sinfData.ipmpData.length & 0xFF);
                    System.arraycopy(sinfData.ipmpData, 0, tempData, 4, sinfData.ipmpData.length);

                    // Create JSON for this track
                    String jsonData = null;
                    try {
                        jsonData = Util.getJSONIPMPData(tempData);
                    } catch (JSONException e) {
                        if (LOGS_ENABLED)
                            Log.e(TAG, "Exception when creating JSON object" + e);
                        return false;
                    }
                    track.getMediaFormat().setString(KEY_DRM_UUID, Util.MARLIN_SYSTEM_ID);
                    track.getMediaFormat().setString(KEY_MARLIN_JSON, jsonData);

                    offset += sinfData.ipmpData.length;
                    break;
                }
            }
        }

        mIpmpMetaData = ipmpMetaData;

        addMetaDataValue(KEY_IPMP_DATA, mIpmpMetaData);

        mCurrentTrack.getMetaData().addValue(KEY_MIME_TYPE, MimeType.OCTET_STREAM);

    } catch (IOException e) {
        if (LOGS_ENABLED)
            Log.e(TAG, "IOException when parsing ODSM data");
    }
    return true;
}

From source file:com.l2jfree.loginserver.L2LoginIdentifier.java

private synchronized void load() {
    if (isLoaded())
        return;/*from ww  w  .jav  a  2s.c o m*/

    File f = new File(System.getProperty("user.home", null), FILENAME);
    ByteBuffer bb = ByteBuffer.allocateDirect(8);

    if (!f.exists() || f.length() != 8) {
        _uid = getRandomUID();
        _loaded = true;
        _log.info("A new UID has been generated for this login server.");

        FileOutputStream fos = null;
        try {
            f.createNewFile();
            fos = new FileOutputStream(f);
            FileChannel fc = fos.getChannel();
            bb.putLong(getUID());
            bb.flip();
            fc.write(bb);
            fos.flush();
        } catch (IOException e) {
            _log.warn("Could not store login server's UID!", e);
        } finally {
            IOUtils.closeQuietly(fos);
            f.setReadOnly();
        }
    } else {
        FileInputStream fis = null;
        try {
            fis = new FileInputStream(f);
            FileChannel fc = fis.getChannel();
            fc.read(bb);
        } catch (IOException e) {
            _log.warn("Could not read stored login server's UID!", e);
        } finally {
            IOUtils.closeQuietly(fis);
        }

        if (bb.position() > 0) {
            bb.flip();
            _uid = bb.getLong();
        } else
            _uid = getRandomUID();
        _loaded = true;
    }
}

From source file:org.apache.carbondata.core.util.CarbonUtil.java

public static ValueEncoderMeta deserializeEncoderMetaNew(byte[] encodeMeta) {
    ByteBuffer buffer = ByteBuffer.wrap(encodeMeta);
    char measureType = buffer.getChar();
    ValueEncoderMeta valueEncoderMeta = new ValueEncoderMeta();
    valueEncoderMeta.setType(measureType);
    switch (measureType) {
    case CarbonCommonConstants.DOUBLE_MEASURE:
        valueEncoderMeta.setMaxValue(buffer.getDouble());
        valueEncoderMeta.setMinValue(buffer.getDouble());
        valueEncoderMeta.setUniqueValue(buffer.getDouble());
        break;/*from   www .  j a  va  2  s.co m*/
    case CarbonCommonConstants.BIG_DECIMAL_MEASURE:
        valueEncoderMeta.setMaxValue(0.0);
        valueEncoderMeta.setMinValue(0.0);
        valueEncoderMeta.setUniqueValue(0.0);
        break;
    case CarbonCommonConstants.BIG_INT_MEASURE:
        valueEncoderMeta.setMaxValue(buffer.getLong());
        valueEncoderMeta.setMinValue(buffer.getLong());
        valueEncoderMeta.setUniqueValue(buffer.getLong());
        break;
    default:
        throw new IllegalArgumentException("invalid measure type");
    }
    valueEncoderMeta.setDecimal(buffer.getInt());
    valueEncoderMeta.setDataTypeSelected(buffer.get());
    return valueEncoderMeta;
}

From source file:edu.harvard.iq.dataverse.dataaccess.TabularSubsetGenerator.java

private long[] extractColumnOffsets(File rotatedImageFile, int varcount, int casecount) throws IOException {
    BufferedInputStream rotfileStream = new BufferedInputStream(new FileInputStream(rotatedImageFile));

    byte[] offsetHeader = new byte[varcount * 8];
    long[] byteOffsets = new long[varcount];

    int readlen = rotfileStream.read(offsetHeader);

    if (readlen != varcount * 8) {
        throw new IOException("Could not read " + varcount * 8 + " header bytes from the rotated file.");
    }/*from   ww  w .ja  v  a  2 s .c  o  m*/

    for (int varindex = 0; varindex < varcount; varindex++) {
        byte[] offsetBytes = new byte[8];
        System.arraycopy(offsetHeader, varindex * 8, offsetBytes, 0, 8);

        ByteBuffer offsetByteBuffer = ByteBuffer.wrap(offsetBytes);
        byteOffsets[varindex] = offsetByteBuffer.getLong();

        //System.out.println(byteOffsets[varindex]);
    }

    rotfileStream.close();

    return byteOffsets;
}

From source file:edu.harvard.iq.dataverse.dataaccess.TabularSubsetGenerator.java

private void reverseRotatedImage(File rotfile, int varcount, int casecount) throws IOException {
    // open the file, read in the offset header: 
    BufferedInputStream rotfileStream = new BufferedInputStream(new FileInputStream(rotfile));

    byte[] offsetHeader = new byte[varcount * 8];
    long[] byteOffsets = new long[varcount];

    int readlen = rotfileStream.read(offsetHeader);

    if (readlen != varcount * 8) {
        throw new IOException("Could not read " + varcount * 8 + " header bytes from the rotated file.");
    }// w w w . j  a  v  a  2s.c o m

    for (int varindex = 0; varindex < varcount; varindex++) {
        byte[] offsetBytes = new byte[8];
        System.arraycopy(offsetHeader, varindex * 8, offsetBytes, 0, 8);

        ByteBuffer offsetByteBuffer = ByteBuffer.wrap(offsetBytes);
        byteOffsets[varindex] = offsetByteBuffer.getLong();

        //System.out.println(byteOffsets[varindex]);
    }

    String[][] reversedMatrix = new String[casecount][varcount];

    long offset = varcount * 8;
    byte[] columnBytes;

    for (int varindex = 0; varindex < varcount; varindex++) {
        long columnLength = byteOffsets[varindex] - offset;

        columnBytes = new byte[(int) columnLength];
        readlen = rotfileStream.read(columnBytes);

        if (readlen != columnLength) {
            throw new IOException("Could not read " + columnBytes + " bytes for column " + varindex);
        }
        /*
        String columnString = new String(columnBytes);
        //System.out.print(columnString);
        String[] values = columnString.split("\n", -1);
                
        if (values.length < casecount) {
        throw new IOException("count mismatch: "+values.length+" tokens found for column "+varindex);
        }
                
        for (int caseindex = 0; caseindex < casecount; caseindex++) {
        reversedMatrix[caseindex][varindex] = values[caseindex];
        }*/

        int bytecount = 0;
        int byteoffset = 0;
        int caseindex = 0;
        //System.out.println("generating value vector for column "+varindex);
        while (bytecount < columnLength) {
            if (columnBytes[bytecount] == '\n') {
                String token = new String(columnBytes, byteoffset, bytecount - byteoffset);
                reversedMatrix[caseindex++][varindex] = token;
                byteoffset = bytecount + 1;
            }
            bytecount++;
        }

        if (caseindex != casecount) {
            throw new IOException("count mismatch: " + caseindex + " tokens found for column " + varindex);
        }
        offset = byteOffsets[varindex];
    }

    for (int caseindex = 0; caseindex < casecount; caseindex++) {
        for (int varindex = 0; varindex < varcount; varindex++) {
            System.out.print(reversedMatrix[caseindex][varindex]);
            if (varindex < varcount - 1) {
                System.out.print("\t");
            } else {
                System.out.print("\n");
            }
        }
    }

    rotfileStream.close();

}

From source file:com.moz.fiji.schema.FormattedEntityId.java

/**
 * Decode a byte array containing an hbase row key into an ordered list corresponding to
 * the key format in the layout file.//from  ww  w.java2 s. c  o  m
 *
 * @param format The row key format as specified in the layout file.
 * @param hbaseRowKey A byte array containing the hbase row key.
 * @return An ordered list of component values in the key.
 */
private static List<Object> makeFijiRowKey(RowKeyFormat2 format, byte[] hbaseRowKey) {
    if (hbaseRowKey.length == 0) {
        throw new EntityIdException("Invalid hbase row key");
    }
    List<Object> fijiRowKey = new ArrayList<Object>();
    // skip over the hash
    int pos = format.getSalt().getHashSize();
    // we are suppressing materialization, so the components cannot be retrieved.
    int fijiRowElem = 0;
    if (format.getSalt().getSuppressKeyMaterialization()) {
        if (pos < hbaseRowKey.length) {
            throw new EntityIdException("Extra bytes in key after hash when materialization is" + "suppressed");
        }
        return null;
    }
    ByteBuffer buf;

    while (fijiRowElem < format.getComponents().size() && pos < hbaseRowKey.length) {
        switch (format.getComponents().get(fijiRowElem).getType()) {
        case STRING:
            // Read the row key until we encounter a Null (0) byte or end.
            int endpos = pos;
            while (endpos < hbaseRowKey.length && (hbaseRowKey[endpos] != (byte) 0)) {
                endpos += 1;
            }
            String str = null;
            try {
                str = new String(hbaseRowKey, pos, endpos - pos, "UTF-8");
            } catch (UnsupportedEncodingException e) {
                LOG.error(e.toString());
                throw new EntityIdException(String.format("UnsupportedEncoding for component %d", fijiRowElem));
            }
            fijiRowKey.add(str);
            pos = endpos + 1;
            break;
        case INTEGER:
            // Toggle highest order bit to return to original 2's complement.
            hbaseRowKey[pos] = (byte) ((int) hbaseRowKey[pos] ^ (int) Byte.MIN_VALUE);
            try {
                buf = ByteBuffer.wrap(hbaseRowKey, pos, Integer.SIZE / Byte.SIZE);
            } catch (IndexOutOfBoundsException e) {
                throw new EntityIdException("Malformed hbase Row Key");
            }
            fijiRowKey.add(Integer.valueOf(buf.getInt()));
            pos = pos + Integer.SIZE / Byte.SIZE;
            break;
        case LONG:
            // Toggle highest order bit to return to original 2's complement.
            hbaseRowKey[pos] = (byte) ((int) hbaseRowKey[pos] ^ (int) Byte.MIN_VALUE);
            try {
                buf = ByteBuffer.wrap(hbaseRowKey, pos, Long.SIZE / Byte.SIZE);
            } catch (IndexOutOfBoundsException e) {
                throw new EntityIdException("Malformed hbase Row Key");
            }
            fijiRowKey.add(Long.valueOf(buf.getLong()));
            pos = pos + Long.SIZE / Byte.SIZE;
            break;
        default:
            throw new RuntimeException("Invalid code path");
        }
        fijiRowElem += 1;
    }

    // Fail if there are extra bytes in hbase row key.
    if (pos < hbaseRowKey.length) {
        throw new EntityIdException("Extra bytes in hbase row key cannot be mapped to any " + "component");
    }

    // Fail if we encounter nulls before it is legal to do so.
    if (fijiRowElem < format.getNullableStartIndex()) {
        throw new EntityIdException("Too few components decoded from hbase row key. Component " + "number "
                + fijiRowElem + " cannot be null");
    }

    // finish up with nulls for everything that wasn't in the key
    for (; fijiRowElem < format.getComponents().size(); fijiRowElem++) {
        fijiRowKey.add(null);
    }

    return fijiRowKey;
}

From source file:org.kiji.schema.FormattedEntityId.java

/**
 * Decode a byte array containing an hbase row key into an ordered list corresponding to
 * the key format in the layout file./*from   ww w  . j av a  2s .c o m*/
 *
 * @param format The row key format as specified in the layout file.
 * @param hbaseRowKey A byte array containing the hbase row key.
 * @return An ordered list of component values in the key.
 */
private static List<Object> makeKijiRowKey(RowKeyFormat2 format, byte[] hbaseRowKey) {
    if (hbaseRowKey.length == 0) {
        throw new EntityIdException("Invalid hbase row key");
    }
    List<Object> kijiRowKey = new ArrayList<Object>();
    // skip over the hash
    int pos = format.getSalt().getHashSize();
    // we are suppressing materialization, so the components cannot be retrieved.
    int kijiRowElem = 0;
    if (format.getSalt().getSuppressKeyMaterialization()) {
        if (pos < hbaseRowKey.length) {
            throw new EntityIdException("Extra bytes in key after hash when materialization is" + "suppressed");
        }
        return null;
    }
    ByteBuffer buf;

    while (kijiRowElem < format.getComponents().size() && pos < hbaseRowKey.length) {
        switch (format.getComponents().get(kijiRowElem).getType()) {
        case STRING:
            // Read the row key until we encounter a Null (0) byte or end.
            int endpos = pos;
            while (endpos < hbaseRowKey.length && (hbaseRowKey[endpos] != (byte) 0)) {
                endpos += 1;
            }
            String str = null;
            try {
                str = new String(hbaseRowKey, pos, endpos - pos, "UTF-8");
            } catch (UnsupportedEncodingException e) {
                LOG.error(e.toString());
                throw new EntityIdException(String.format("UnsupportedEncoding for component %d", kijiRowElem));
            }
            kijiRowKey.add(str);
            pos = endpos + 1;
            break;
        case INTEGER:
            // Toggle highest order bit to return to original 2's complement.
            hbaseRowKey[pos] = (byte) ((int) hbaseRowKey[pos] ^ (int) Byte.MIN_VALUE);
            try {
                buf = ByteBuffer.wrap(hbaseRowKey, pos, Integer.SIZE / Byte.SIZE);
            } catch (IndexOutOfBoundsException e) {
                throw new EntityIdException("Malformed hbase Row Key");
            }
            kijiRowKey.add(Integer.valueOf(buf.getInt()));
            pos = pos + Integer.SIZE / Byte.SIZE;
            break;
        case LONG:
            // Toggle highest order bit to return to original 2's complement.
            hbaseRowKey[pos] = (byte) ((int) hbaseRowKey[pos] ^ (int) Byte.MIN_VALUE);
            try {
                buf = ByteBuffer.wrap(hbaseRowKey, pos, Long.SIZE / Byte.SIZE);
            } catch (IndexOutOfBoundsException e) {
                throw new EntityIdException("Malformed hbase Row Key");
            }
            kijiRowKey.add(Long.valueOf(buf.getLong()));
            pos = pos + Long.SIZE / Byte.SIZE;
            break;
        default:
            throw new RuntimeException("Invalid code path");
        }
        kijiRowElem += 1;
    }

    // Fail if there are extra bytes in hbase row key.
    if (pos < hbaseRowKey.length) {
        throw new EntityIdException("Extra bytes in hbase row key cannot be mapped to any " + "component");
    }

    // Fail if we encounter nulls before it is legal to do so.
    if (kijiRowElem < format.getNullableStartIndex()) {
        throw new EntityIdException("Too few components decoded from hbase row key. Component " + "number "
                + kijiRowElem + " cannot be null");
    }

    // finish up with nulls for everything that wasn't in the key
    for (; kijiRowElem < format.getComponents().size(); kijiRowElem++) {
        kijiRowKey.add(null);
    }

    return kijiRowKey;
}

From source file:org.apache.bookkeeper.bookie.Bookie.java

void readJournal() throws IOException, BookieException {
    long startTs = MathUtils.now();
    journal.replay(new JournalScanner() {
        @Override//w w w.  j a  v  a 2  s  .com
        public void process(int journalVersion, long offset, ByteBuffer recBuff) throws IOException {
            long ledgerId = recBuff.getLong();
            long entryId = recBuff.getLong();
            try {
                LOG.debug("Replay journal - ledger id : {}, entry id : {}.", ledgerId, entryId);
                if (entryId == METAENTRY_ID_LEDGER_KEY) {
                    if (journalVersion >= JournalChannel.V3) {
                        int masterKeyLen = recBuff.getInt();
                        byte[] masterKey = new byte[masterKeyLen];

                        recBuff.get(masterKey);
                        masterKeyCache.put(ledgerId, masterKey);
                    } else {
                        throw new IOException("Invalid journal. Contains journalKey " + " but layout version ("
                                + journalVersion + ") is too old to hold this");
                    }
                } else if (entryId == METAENTRY_ID_FENCE_KEY) {
                    if (journalVersion >= JournalChannel.V4) {
                        byte[] key = masterKeyCache.get(ledgerId);
                        if (key == null) {
                            key = ledgerStorage.readMasterKey(ledgerId);
                        }
                        LedgerDescriptor handle = handles.getHandle(ledgerId, key);
                        handle.setFenced();
                    } else {
                        throw new IOException("Invalid journal. Contains fenceKey " + " but layout version ("
                                + journalVersion + ") is too old to hold this");
                    }
                } else {
                    byte[] key = masterKeyCache.get(ledgerId);
                    if (key == null) {
                        key = ledgerStorage.readMasterKey(ledgerId);
                    }
                    LedgerDescriptor handle = handles.getHandle(ledgerId, key);

                    recBuff.rewind();
                    handle.addEntry(recBuff);
                }
            } catch (NoLedgerException nsle) {
                LOG.debug("Skip replaying entries of ledger {} since it was deleted.", ledgerId);
            } catch (BookieException be) {
                throw new IOException(be);
            }
        }
    });
    long elapsedTs = MathUtils.now() - startTs;
    LOG.info("Finished replaying journal in {} ms.", elapsedTs);
}

From source file:org.apache.bookkeeper.bookie.Bookie.java

/**
 * Retrieve the ledger descriptor for the ledger which entry should be added to.
 * The LedgerDescriptor returned from this method should be eventually freed with
 * #putHandle().//from w  w w. j av a2  s  .c  o m
 *
 * @throws BookieException if masterKey does not match the master key of the ledger
 */
private LedgerDescriptor getLedgerForEntry(ByteBuffer entry, byte[] masterKey)
        throws IOException, BookieException {
    long ledgerId = entry.getLong();
    LedgerDescriptor l = handles.getHandle(ledgerId, masterKey);
    if (!masterKeyCache.containsKey(ledgerId)) {
        // new handle, we should add the key to journal ensure we can rebuild
        ByteBuffer bb = ByteBuffer.allocate(8 + 8 + 4 + masterKey.length);
        bb.putLong(ledgerId);
        bb.putLong(METAENTRY_ID_LEDGER_KEY);
        bb.putInt(masterKey.length);
        bb.put(masterKey);
        bb.flip();

        if (null == masterKeyCache.putIfAbsent(ledgerId, masterKey)) {
            journal.logAddEntry(bb, new NopWriteCallback(), null);
        }
    }
    return l;
}