Example usage for java.nio ByteBuffer getInt

List of usage examples for java.nio ByteBuffer getInt

Introduction

In this page you can find the example usage for java.nio ByteBuffer getInt.

Prototype

public abstract int getInt();

Source Link

Document

Returns the int at the current position and increases the position by 4.

Usage

From source file:com.moz.fiji.schema.FormattedEntityId.java

/**
 * Decode a byte array containing an hbase row key into an ordered list corresponding to
 * the key format in the layout file./*from   w  w  w  .  j av  a  2s.co  m*/
 *
 * @param format The row key format as specified in the layout file.
 * @param hbaseRowKey A byte array containing the hbase row key.
 * @return An ordered list of component values in the key.
 */
private static List<Object> makeFijiRowKey(RowKeyFormat2 format, byte[] hbaseRowKey) {
    if (hbaseRowKey.length == 0) {
        throw new EntityIdException("Invalid hbase row key");
    }
    List<Object> fijiRowKey = new ArrayList<Object>();
    // skip over the hash
    int pos = format.getSalt().getHashSize();
    // we are suppressing materialization, so the components cannot be retrieved.
    int fijiRowElem = 0;
    if (format.getSalt().getSuppressKeyMaterialization()) {
        if (pos < hbaseRowKey.length) {
            throw new EntityIdException("Extra bytes in key after hash when materialization is" + "suppressed");
        }
        return null;
    }
    ByteBuffer buf;

    while (fijiRowElem < format.getComponents().size() && pos < hbaseRowKey.length) {
        switch (format.getComponents().get(fijiRowElem).getType()) {
        case STRING:
            // Read the row key until we encounter a Null (0) byte or end.
            int endpos = pos;
            while (endpos < hbaseRowKey.length && (hbaseRowKey[endpos] != (byte) 0)) {
                endpos += 1;
            }
            String str = null;
            try {
                str = new String(hbaseRowKey, pos, endpos - pos, "UTF-8");
            } catch (UnsupportedEncodingException e) {
                LOG.error(e.toString());
                throw new EntityIdException(String.format("UnsupportedEncoding for component %d", fijiRowElem));
            }
            fijiRowKey.add(str);
            pos = endpos + 1;
            break;
        case INTEGER:
            // Toggle highest order bit to return to original 2's complement.
            hbaseRowKey[pos] = (byte) ((int) hbaseRowKey[pos] ^ (int) Byte.MIN_VALUE);
            try {
                buf = ByteBuffer.wrap(hbaseRowKey, pos, Integer.SIZE / Byte.SIZE);
            } catch (IndexOutOfBoundsException e) {
                throw new EntityIdException("Malformed hbase Row Key");
            }
            fijiRowKey.add(Integer.valueOf(buf.getInt()));
            pos = pos + Integer.SIZE / Byte.SIZE;
            break;
        case LONG:
            // Toggle highest order bit to return to original 2's complement.
            hbaseRowKey[pos] = (byte) ((int) hbaseRowKey[pos] ^ (int) Byte.MIN_VALUE);
            try {
                buf = ByteBuffer.wrap(hbaseRowKey, pos, Long.SIZE / Byte.SIZE);
            } catch (IndexOutOfBoundsException e) {
                throw new EntityIdException("Malformed hbase Row Key");
            }
            fijiRowKey.add(Long.valueOf(buf.getLong()));
            pos = pos + Long.SIZE / Byte.SIZE;
            break;
        default:
            throw new RuntimeException("Invalid code path");
        }
        fijiRowElem += 1;
    }

    // Fail if there are extra bytes in hbase row key.
    if (pos < hbaseRowKey.length) {
        throw new EntityIdException("Extra bytes in hbase row key cannot be mapped to any " + "component");
    }

    // Fail if we encounter nulls before it is legal to do so.
    if (fijiRowElem < format.getNullableStartIndex()) {
        throw new EntityIdException("Too few components decoded from hbase row key. Component " + "number "
                + fijiRowElem + " cannot be null");
    }

    // finish up with nulls for everything that wasn't in the key
    for (; fijiRowElem < format.getComponents().size(); fijiRowElem++) {
        fijiRowKey.add(null);
    }

    return fijiRowKey;
}

From source file:org.kiji.schema.FormattedEntityId.java

/**
 * Decode a byte array containing an hbase row key into an ordered list corresponding to
 * the key format in the layout file.//from  www. j a  v  a 2  s.c  o m
 *
 * @param format The row key format as specified in the layout file.
 * @param hbaseRowKey A byte array containing the hbase row key.
 * @return An ordered list of component values in the key.
 */
private static List<Object> makeKijiRowKey(RowKeyFormat2 format, byte[] hbaseRowKey) {
    if (hbaseRowKey.length == 0) {
        throw new EntityIdException("Invalid hbase row key");
    }
    List<Object> kijiRowKey = new ArrayList<Object>();
    // skip over the hash
    int pos = format.getSalt().getHashSize();
    // we are suppressing materialization, so the components cannot be retrieved.
    int kijiRowElem = 0;
    if (format.getSalt().getSuppressKeyMaterialization()) {
        if (pos < hbaseRowKey.length) {
            throw new EntityIdException("Extra bytes in key after hash when materialization is" + "suppressed");
        }
        return null;
    }
    ByteBuffer buf;

    while (kijiRowElem < format.getComponents().size() && pos < hbaseRowKey.length) {
        switch (format.getComponents().get(kijiRowElem).getType()) {
        case STRING:
            // Read the row key until we encounter a Null (0) byte or end.
            int endpos = pos;
            while (endpos < hbaseRowKey.length && (hbaseRowKey[endpos] != (byte) 0)) {
                endpos += 1;
            }
            String str = null;
            try {
                str = new String(hbaseRowKey, pos, endpos - pos, "UTF-8");
            } catch (UnsupportedEncodingException e) {
                LOG.error(e.toString());
                throw new EntityIdException(String.format("UnsupportedEncoding for component %d", kijiRowElem));
            }
            kijiRowKey.add(str);
            pos = endpos + 1;
            break;
        case INTEGER:
            // Toggle highest order bit to return to original 2's complement.
            hbaseRowKey[pos] = (byte) ((int) hbaseRowKey[pos] ^ (int) Byte.MIN_VALUE);
            try {
                buf = ByteBuffer.wrap(hbaseRowKey, pos, Integer.SIZE / Byte.SIZE);
            } catch (IndexOutOfBoundsException e) {
                throw new EntityIdException("Malformed hbase Row Key");
            }
            kijiRowKey.add(Integer.valueOf(buf.getInt()));
            pos = pos + Integer.SIZE / Byte.SIZE;
            break;
        case LONG:
            // Toggle highest order bit to return to original 2's complement.
            hbaseRowKey[pos] = (byte) ((int) hbaseRowKey[pos] ^ (int) Byte.MIN_VALUE);
            try {
                buf = ByteBuffer.wrap(hbaseRowKey, pos, Long.SIZE / Byte.SIZE);
            } catch (IndexOutOfBoundsException e) {
                throw new EntityIdException("Malformed hbase Row Key");
            }
            kijiRowKey.add(Long.valueOf(buf.getLong()));
            pos = pos + Long.SIZE / Byte.SIZE;
            break;
        default:
            throw new RuntimeException("Invalid code path");
        }
        kijiRowElem += 1;
    }

    // Fail if there are extra bytes in hbase row key.
    if (pos < hbaseRowKey.length) {
        throw new EntityIdException("Extra bytes in hbase row key cannot be mapped to any " + "component");
    }

    // Fail if we encounter nulls before it is legal to do so.
    if (kijiRowElem < format.getNullableStartIndex()) {
        throw new EntityIdException("Too few components decoded from hbase row key. Component " + "number "
                + kijiRowElem + " cannot be null");
    }

    // finish up with nulls for everything that wasn't in the key
    for (; kijiRowElem < format.getComponents().size(); kijiRowElem++) {
        kijiRowKey.add(null);
    }

    return kijiRowKey;
}

From source file:org.voltdb.utils.CatalogUtil.java

/**
 * Retrieve the catalog and deployment configuration from zookeeper.
 * NOTE: In general, people who want the catalog and/or deployment should
 * be getting it from the current CatalogContext, available from
 * VoltDB.instance().  This is primarily for startup and for use by
 * @UpdateApplicationCatalog.  If you think this is where you need to
 * be getting catalog or deployment from, consider carefully if that's
 * really what you want to do. --izzy 12/8/2014
 *//*from www.j a  v a 2  s . c om*/
public static CatalogAndIds getCatalogFromZK(ZooKeeper zk) throws KeeperException, InterruptedException {
    ByteBuffer versionAndBytes = ByteBuffer.wrap(zk.getData(VoltZK.catalogbytes, false, null));
    int version = versionAndBytes.getInt();
    long catalogTxnId = versionAndBytes.getLong();
    long catalogUniqueId = versionAndBytes.getLong();
    byte[] catalogHash = new byte[20]; // sha-1 hash size
    versionAndBytes.get(catalogHash);
    byte[] deploymentHash = new byte[20]; // sha-1 hash size
    versionAndBytes.get(deploymentHash);
    int catalogLength = versionAndBytes.getInt();
    byte[] catalogBytes = new byte[catalogLength];
    versionAndBytes.get(catalogBytes);
    int deploymentLength = versionAndBytes.getInt();
    byte[] deploymentBytes = new byte[deploymentLength];
    versionAndBytes.get(deploymentBytes);
    versionAndBytes = null;
    return new CatalogAndIds(catalogTxnId, catalogUniqueId, version, catalogHash, deploymentHash, catalogBytes,
            deploymentBytes);
}

From source file:org.apache.jackrabbit.oak.plugins.segment.file.TarReader.java

/**
 * Loads the optional pre-compiled graph entry from the given tar file.
 *
 * @return graph buffer, or {@code null} if one was not found
 * @throws IOException if the tar file could not be read
 *///from ww w  .j a  v  a 2  s  .c  o  m
private ByteBuffer loadGraph() throws IOException {
    // read the graph metadata just before the tar index entry
    int pos = access.length() - 2 * BLOCK_SIZE - getEntrySize(index.remaining());
    ByteBuffer meta = access.read(pos - 16, 16);
    int crc32 = meta.getInt();
    int count = meta.getInt();
    int bytes = meta.getInt();
    int magic = meta.getInt();

    if (magic != GRAPH_MAGIC) {
        return null; // magic byte mismatch
    }

    if (count < 0 || bytes < count * 16 + 16 || BLOCK_SIZE + bytes > pos) {
        log.warn("Invalid graph metadata in tar file {}", file);
        return null; // impossible uuid and/or byte counts
    }

    // this involves seeking backwards in the file, which might not
    // perform well, but that's OK since we only do this once per file
    ByteBuffer graph = access.read(pos - bytes, bytes);

    byte[] b = new byte[bytes - 16];
    graph.mark();
    graph.get(b);
    graph.reset();

    CRC32 checksum = new CRC32();
    checksum.update(b);
    if (crc32 != (int) checksum.getValue()) {
        log.warn("Invalid graph checksum in tar file {}", file);
        return null; // checksum mismatch
    }

    return graph;
}

From source file:net.pms.util.AudioUtils.java

/**
 * Parses the old RealAudio 1.0 and 2.0 formats that's not supported by
 * neither {@link org.jaudiotagger} nor MediaInfo. Returns {@code false} if
 * {@code channel} isn't one of these formats or the parsing fails.
 * <p>/*from w w  w . jav  a2  s.co m*/
 * Primary references:
 * <ul>
 * <li><a href="https://wiki.multimedia.cx/index.php/RealMedia">RealAudio on
 * MultimediaWiki</a></li>
 * <li><a
 * href="https://github.com/FFmpeg/FFmpeg/blob/master/libavformat/rmdec.c"
 * >FFmpeg rmdec.c</a></li>
 * </ul>
 *
 * @param channel the {@link Channel} containing the input. Size will only
 *            be parsed if {@code channel} is a {@link FileChannel}
 *            instance.
 * @param media the {@link DLNAMediaInfo} instance to write the parsing
 *            results to.
 * @return {@code true} if the {@code channel} input is in RealAudio 1.0 or
 *         2.0 format and the parsing succeeds; false otherwise
 */
public static boolean parseRealAudio(ReadableByteChannel channel, DLNAMediaInfo media) {
    final byte[] magicBytes = { 0x2E, 0x72, 0x61, (byte) 0xFD };
    ByteBuffer buffer = ByteBuffer.allocate(8);
    buffer.order(ByteOrder.BIG_ENDIAN);
    DLNAMediaAudio audio = new DLNAMediaAudio();
    try {
        int count = channel.read(buffer);
        if (count < 4) {
            LOGGER.trace("Input is too short to be RealAudio");
            return false;
        }
        buffer.flip();
        byte[] signature = new byte[4];
        buffer.get(signature);
        if (!Arrays.equals(magicBytes, signature)) {
            if (LOGGER.isTraceEnabled()) {
                LOGGER.trace("Input signature ({}) mismatches RealAudio version 1.0 or 2.0",
                        new String(signature, StandardCharsets.US_ASCII));
            }
            return false;
        }
        media.setContainer(FormatConfiguration.RA);
        short version = buffer.getShort();
        int reportedHeaderSize = 0;
        int reportedDataSize = 0;
        if (version == 3) {
            audio.setCodecA(FormatConfiguration.REALAUDIO_14_4);
            audio.getAudioProperties().setNumberOfChannels(1);
            audio.getAudioProperties().setSampleFrequency(8000);
            short headerSize = buffer.getShort();

            buffer = ByteBuffer.allocate(headerSize);
            channel.read(buffer);
            buffer.flip();
            buffer.position(8);
            int bytesPerMinute = buffer.getShort() & 0xFFFF;
            reportedDataSize = buffer.getInt();
            byte b = buffer.get();
            if (b != 0) {
                byte[] title = new byte[b & 0xFF];
                buffer.get(title);
                String titleString = new String(title, StandardCharsets.US_ASCII);
                audio.setSongname(titleString);
                audio.setAudioTrackTitleFromMetadata(titleString);
            }
            b = buffer.get();
            if (b != 0) {
                byte[] artist = new byte[b & 0xFF];
                buffer.get(artist);
                audio.setArtist(new String(artist, StandardCharsets.US_ASCII));
            }
            audio.setBitRate(bytesPerMinute * 8 / 60);
            media.setBitrate(bytesPerMinute * 8 / 60);
        } else if (version == 4 || version == 5) {
            buffer = ByteBuffer.allocate(14);
            channel.read(buffer);
            buffer.flip();
            buffer.get(signature);
            if (!".ra4".equals(new String(signature, StandardCharsets.US_ASCII))) {
                LOGGER.debug("Invalid RealAudio 2.0 signature \"{}\"",
                        new String(signature, StandardCharsets.US_ASCII));
                return false;
            }
            reportedDataSize = buffer.getInt();
            buffer.getShort(); //skip version repeated
            reportedHeaderSize = buffer.getInt();

            buffer = ByteBuffer.allocate(reportedHeaderSize);
            channel.read(buffer);
            buffer.flip();
            buffer.getShort(); // skip codec flavor
            buffer.getInt(); // skip coded frame size
            buffer.getInt(); // skip unknown
            long bytesPerMinute = buffer.getInt() & 0xFFFFFFFFL;
            buffer.getInt(); // skip unknown
            buffer.getShort(); // skip sub packet
            buffer.getShort(); // skip frame size
            buffer.getShort(); // skip sub packet size
            buffer.getShort(); // skip unknown
            if (version == 5) {
                buffer.position(buffer.position() + 6); // skip unknown
            }
            short sampleRate = buffer.getShort();
            buffer.getShort(); // skip unknown
            short sampleSize = buffer.getShort();
            short nrChannels = buffer.getShort();
            byte[] fourCC;
            if (version == 4) {
                buffer.position(buffer.get() + buffer.position()); // skip interleaver id
                fourCC = new byte[buffer.get()];
                buffer.get(fourCC);
            } else {
                buffer.getFloat(); // skip deinterlace id
                fourCC = new byte[4];
                buffer.get(fourCC);
            }
            String fourCCString = new String(fourCC, StandardCharsets.US_ASCII).toLowerCase(Locale.ROOT);
            switch (fourCCString) {
            case "lpcJ":
                audio.setCodecA(FormatConfiguration.REALAUDIO_14_4);
                break;
            case "28_8":
                audio.setCodecA(FormatConfiguration.REALAUDIO_28_8);
                break;
            case "dnet":
                audio.setCodecA(FormatConfiguration.AC3);
                break;
            case "sipr":
                audio.setCodecA(FormatConfiguration.SIPRO);
                break;
            case "cook":
                audio.setCodecA(FormatConfiguration.COOK);
            case "atrc":
                audio.setCodecA(FormatConfiguration.ATRAC);
            case "ralf":
                audio.setCodecA(FormatConfiguration.RALF);
            case "raac":
                audio.setCodecA(FormatConfiguration.AAC_LC);
            case "racp":
                audio.setCodecA(FormatConfiguration.HE_AAC);
            default:
                LOGGER.debug("Unknown RealMedia codec FourCC \"{}\" - parsing failed", fourCCString);
                return false;
            }

            if (buffer.hasRemaining()) {
                parseRealAudioMetaData(buffer, audio, version);
            }

            audio.setBitRate((int) (bytesPerMinute * 8 / 60));
            media.setBitrate((int) (bytesPerMinute * 8 / 60));
            audio.setBitsperSample(sampleSize);
            audio.getAudioProperties().setNumberOfChannels(nrChannels);
            audio.getAudioProperties().setSampleFrequency(sampleRate);
        } else {
            LOGGER.error("Could not parse RealAudio format - unknown format version {}", version);
            return false;
        }

        media.getAudioTracksList().add(audio);
        long fileSize = 0;
        if (channel instanceof FileChannel) {
            fileSize = ((FileChannel) channel).size();
            media.setSize(fileSize);
        }
        // Duration is estimated based on bitrate and might not be accurate
        if (audio.getBitRate() > 0) {
            int dataSize;
            if (fileSize > 0 && reportedHeaderSize > 0) {
                int fullHeaderSize = reportedHeaderSize + (version == 3 ? 8 : 16);
                if (reportedDataSize > 0) {
                    dataSize = (int) Math.min(reportedDataSize, fileSize - fullHeaderSize);
                } else {
                    dataSize = (int) (fileSize - fullHeaderSize);
                }
            } else {
                dataSize = reportedDataSize;
            }
            media.setDuration((double) dataSize / audio.getBitRate() * 8);
        }

    } catch (IOException e) {
        LOGGER.debug("Error while trying to parse RealAudio version 1 or 2: {}", e.getMessage());
        LOGGER.trace("", e);
        return false;
    }
    if (PMS.getConfiguration() != null
            && !PMS.getConfiguration().getAudioThumbnailMethod().equals(CoverSupplier.NONE)
            && (StringUtils.isNotBlank(media.getFirstAudioTrack().getSongname())
                    || StringUtils.isNotBlank(media.getFirstAudioTrack().getArtist()))) {
        ID3v1Tag tag = new ID3v1Tag();
        if (StringUtils.isNotBlank(media.getFirstAudioTrack().getSongname())) {
            tag.setTitle(media.getFirstAudioTrack().getSongname());
        }
        if (StringUtils.isNotBlank(media.getFirstAudioTrack().getArtist())) {
            tag.setArtist(media.getFirstAudioTrack().getArtist());
        }
        try {
            media.setThumb(DLNAThumbnail.toThumbnail(CoverUtil.get().getThumbnail(tag), 640, 480, ScaleType.MAX,
                    ImageFormat.SOURCE, false));
        } catch (IOException e) {
            LOGGER.error("An error occurred while generating thumbnail for RealAudio source: [\"{}\", \"{}\"]",
                    tag.getFirstTitle(), tag.getFirstArtist());
        }
    }
    media.setThumbready(true);
    media.setMediaparsed(true);

    return true;
}

From source file:com.linkedin.pinot.common.utils.DataTable.java

/**
 *
 * @param buffer/*from   w w  w. j ava 2 s . co  m*/
 */
public DataTable(byte[] buffer) {
    final ByteBuffer input = ByteBuffer.wrap(buffer);
    dataTableSerDe = DataTableSerDeRegistry.getInstance().get();

    // Assert that version can be de-serialized.
    version = Version.valueOf(input.getInt());
    deserializeDataTable(input);
}

From source file:au.org.ala.delta.intkey.model.IntkeyDatasetFileReader.java

/**
 * Read header information from the characters file
 * //w  w  w .  jav a 2 s  .  com
 * @param charBinFile
 *            The characters file
 * @param charFileHeader
 *            The object to store header information in
 */
private static void readCharactersFileHeader(BinFile charBinFile, CharactersFileHeader charFileHeader) {
    // read first record which contains header file information;

    ByteBuffer headerBytes = readRecord(charBinFile, 1);

    // read first record of characters file
    charFileHeader.setNC(headerBytes.getInt()); // 0

    headerBytes.getInt(); // 1 - maxDes - not used.

    charFileHeader.setRpCdes(headerBytes.getInt()); // 2
    charFileHeader.setRpStat(headerBytes.getInt()); // 3
    charFileHeader.setRpChlp(headerBytes.getInt()); // 4
    charFileHeader.setRpChlpGrp(headerBytes.getInt()); // 5
    charFileHeader.setRpChlpFmt1(headerBytes.getInt()); // 6
    charFileHeader.setRpChlpFmt2(headerBytes.getInt()); // 7
    charFileHeader.setRpCImagesC(headerBytes.getInt()); // 8
    charFileHeader.setRpStartupImages(headerBytes.getInt()); // 9
    charFileHeader.setRpCKeyImages(headerBytes.getInt()); // 10
    charFileHeader.setRpTKeyImages(headerBytes.getInt()); // 11
    charFileHeader.setRpHeading(headerBytes.getInt()); // 12
    charFileHeader.setRpRegSubHeading(headerBytes.getInt()); // record
                                                             // pointer to
                                                             // registration
                                                             // subheading
                                                             // (13)
    charFileHeader.setRpValidationString(headerBytes.getInt()); // record
                                                                // pointer
                                                                // to
                                                                // validation
                                                                // string
                                                                // for
                                                                // registered
                                                                // dataset
                                                                // (14)

    headerBytes.getInt(); // 15 - record number for character mask - not
                          // used.

    charFileHeader.setRpOrWord(headerBytes.getInt()); // 16
    charFileHeader.setRpCheckForCd(headerBytes.getInt()); // 17
    charFileHeader.setRpFont(headerBytes.getInt()); // 18
    charFileHeader.setRpItemSubHead(headerBytes.getInt()); // 19

    headerBytes.position(Constants.RECORD_LENGTH_INTEGERS - 1);

    charFileHeader.setCptr(headerBytes.getInt());
}

From source file:org.apache.jackrabbit.oak.segment.file.TarReader.java

/**
 * Loads the optional pre-compiled graph entry from the given tar file.
 *
 * @return graph buffer, or {@code null} if one was not found
 * @throws IOException if the tar file could not be read
 *//*from   w w w.ja v  a2s .co m*/
private ByteBuffer loadGraph() throws IOException {
    // read the graph metadata just before the tar index entry
    int pos = access.length() - 2 * BLOCK_SIZE - getEntrySize(index.remaining() + 16);
    ByteBuffer meta = access.read(pos - 16, 16);
    int crc32 = meta.getInt();
    int count = meta.getInt();
    int bytes = meta.getInt();
    int magic = meta.getInt();

    if (magic != GRAPH_MAGIC) {
        return null; // magic byte mismatch
    }

    if (count < 0 || bytes < count * 16 + 16 || BLOCK_SIZE + bytes > pos) {
        log.warn("Invalid graph metadata in tar file {}", file);
        return null; // impossible uuid and/or byte counts
    }

    // this involves seeking backwards in the file, which might not
    // perform well, but that's OK since we only do this once per file
    ByteBuffer graph = access.read(pos - bytes, bytes);

    byte[] b = new byte[bytes - 16];
    graph.mark();
    graph.get(b);
    graph.reset();

    CRC32 checksum = new CRC32();
    checksum.update(b);
    if (crc32 != (int) checksum.getValue()) {
        log.warn("Invalid graph checksum in tar file {}", file);
        return null; // checksum mismatch
    }

    hasGraph = true;
    return graph;
}

From source file:org.commoncrawl.hadoop.mergeutils.SequenceFileSpillWriter.java

public SequenceFileSpillWriter(FileSystem fileSystem, Configuration conf, Path outputFilePath,
        Class<KeyType> keyClass, Class<ValueType> valueClass,
        SequenceFileIndexWriter<KeyType, ValueType> optionalIndexWriter, CompressionCodec codec,
        short replicationFactor) throws IOException {

    _bufferQueue = new LinkedBlockingQueue<QueuedBufferItem>(
            conf.getInt(QUEUE_CAPACITY_PARAM, BUFFER_QUEUE_CAPACITY));
    _spillBufferSize = conf.getInt(SPILL_WRITER_BUFFER_SIZE_PARAM, DEFAULT_SPILL_BUFFER_SIZE);
    _outputStream = fileSystem.create(outputFilePath, true, 10 * 1024 * 1024, replicationFactor,
            fileSystem.getDefaultBlockSize());
    // allocate buffer ...
    _activeBuffer = ByteBuffer.allocate(_spillBufferSize);
    // assign index writer ..
    _indexWriter = optionalIndexWriter;// ww  w  .  j a v  a  2  s .c om

    if (codec != null) {
        writer = SequenceFile.createWriter(conf, _outputStream, keyClass, valueClass, CompressionType.BLOCK,
                codec);
    } else {
        writer = SequenceFile.createWriter(conf, _outputStream, keyClass, valueClass, CompressionType.NONE,
                null);
    }

    _writerThread = new Thread(new Runnable() {

        @Override
        public void run() {
            // LOG.info("Writer Thread Starting");

            while (true) {

                QueuedBufferItem queuedBufferItem = null;

                try {
                    queuedBufferItem = _bufferQueue.take();
                } catch (InterruptedException e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
                if (queuedBufferItem._buffer == null) {
                    // LOG.info("Writer Thread received empty buffer item. Exiting");
                    return;
                } else {

                    ByteBuffer theBuffer = queuedBufferItem._buffer;

                    // LOG.info("Writer Thread received item. Limit:" +
                    // theBuffer.limit());

                    // get byte pointer
                    byte[] bufferAsBytes = theBuffer.array();

                    int itemsWritten = 0;
                    long timeStart = System.currentTimeMillis();

                    while (theBuffer.remaining() != 0) {

                        // now read in key length
                        int keyLen = theBuffer.getInt();
                        // mark key position
                        int keyPos = theBuffer.position();
                        // now skip past key length
                        theBuffer.position(keyPos + keyLen);
                        // read value length
                        int valueLen = theBuffer.getInt();
                        // mark value position
                        int valuePosition = theBuffer.position();
                        // now skip past it ...
                        theBuffer.position(valuePosition + valueLen);
                        // now write this out to the sequence file ...

                        try {
                            spillRawRecord2(bufferAsBytes, keyPos, keyLen, bufferAsBytes, valuePosition,
                                    valueLen);
                        } catch (IOException e) {
                            LOG.error("Writer Thread Failed with Error:" + CCStringUtils.stringifyException(e));
                            _writerException = e;
                            return;
                        }
                        itemsWritten++;
                    }
                    // LOG.info("Writer Thread Finished With Buffer. Wrote:"+
                    // itemsWritten + " in:" + (System.currentTimeMillis() -
                    // timeStart));
                }
            }
        }

    });
    _writerThread.start();
}

From source file:au.org.ala.delta.intkey.model.IntkeyDatasetFileReader.java

/**
 * Read header information from the items (taxa) file
 * //from   w w  w. ja  va 2s  .  com
 * @param itemBinFile
 *            The items (taxa) file
 * @param itemFileHeader
 *            The object to store header information in
 */
private static void readItemsFileHeader(BinFile itemBinFile, ItemsFileHeader itemFileHeader) {

    ByteBuffer headerBytes = readRecord(itemBinFile, 1);

    itemFileHeader.setNItem(headerBytes.getInt()); // number of items (0)
    itemFileHeader.setNChar(headerBytes.getInt()); // number of characters
                                                   // (1)
    itemFileHeader.setMs(headerBytes.getInt()); // maximum number of states
                                                // (2)

    headerBytes.getInt(); // 3 - MaxDat - not used
    itemFileHeader.setLRec(headerBytes.getInt()); // 4 - record length used
                                                  // in items file

    itemFileHeader.setRpTnam(headerBytes.getInt()); // record pointer to
                                                    // taxon names (5)
    itemFileHeader.setRpSpec(headerBytes.getInt()); // record pointer to
                                                    // specifications (6)
    itemFileHeader.setRpMini(headerBytes.getInt()); // record pointer to
                                                    // minima of integer
                                                    // characters (7)
    itemFileHeader.setLDep(headerBytes.getInt()); // length of dependency
                                                  // array (8)
    itemFileHeader.setRpCdep(headerBytes.getInt()); // record pointer to
                                                    // character dependency
                                                    // array (9)
    itemFileHeader.setLinvdep(headerBytes.getInt()); // length of inverted
                                                     // dependency array
                                                     // (10)
    itemFileHeader.setRpInvdep(headerBytes.getInt()); // record pointer to
                                                      // inverted
                                                      // dependency array
                                                      // (11)
    itemFileHeader.setRpCdat(headerBytes.getInt()); // record pointer to
                                                    // data for each
                                                    // character (12)
    itemFileHeader.setLSbnd(headerBytes.getInt()); // length of state
                                                   // bounds array (13)
    itemFileHeader.setLkstat(Math.max(1, headerBytes.getInt())); // length
                                                                 // of key
                                                                 // states
                                                                 // array
                                                                 // (14)

    itemFileHeader.setMajorVer(headerBytes.getInt()); // 15

    itemFileHeader.setRpNkbd(headerBytes.getInt()); // record pointer to
                                                    // key state bounds
                                                    // array (16)
    itemFileHeader.setMaxInt(headerBytes.getInt()); // maximum integer
                                                    // value (17)

    headerBytes.getInt(); // 18 - Maxtxt1 - not used
    headerBytes.getInt(); // 19 - Maxtxt2 - not used
    itemFileHeader.setMinorVer(headerBytes.getInt()); // 20

    itemFileHeader.setTaxonImageChar(headerBytes.getInt()); // character
                                                            // specifying
                                                            // taxon images
                                                            // (21)
    itemFileHeader.setRpCimagesI(headerBytes.getInt()); // pointer to
                                                        // character images
                                                        // (22)
    itemFileHeader.setRpTimages(headerBytes.getInt()); // pointer to taxon
                                                       // images (23)
    itemFileHeader.setEnableDeltaOutput(headerBytes.getInt()); // whether
                                                               // to allow
                                                               // DELTA
                                                               // output
                                                               // via
                                                               // OUTPUT
                                                               // SUMMARY
                                                               // command
                                                               // (24)
    itemFileHeader.setChineseFmt(headerBytes.getInt()); // whether chinese
                                                        // character set
                                                        // (25)
    itemFileHeader.setRpCsynon(headerBytes.getInt()); // record pointer to
                                                      // characters for
                                                      // synonomy (26)
    itemFileHeader.setRpOmitOr(headerBytes.getInt()); // record pointer to
                                                      // "omit or" list of
                                                      // characters (27)
    itemFileHeader.setRpNext(headerBytes.getInt()); // pointer to second
                                                    // parameter record
                                                    // (28)

    itemFileHeader.setDupItemPtr(headerBytes.getInt()); // pointer to
                                                        // duplicated item
                                                        // name mask (29:
                                                        // Constants.LREC -
                                                        // 3)
    itemFileHeader.setTptr(headerBytes.getInt()); // pointer to b-tree and
                                                  // image masks appended
                                                  // to items file (30:
                                                  // Constants.LREC - 2)
    itemFileHeader.setLbtree(headerBytes.getInt()); // length of btree in
                                                    // bytes (31:
                                                    // Constants.LREC - 1)

    if (itemFileHeader.getRpNext() > 0) {
        ByteBuffer secondHeaderBytes = readRecord(itemBinFile, itemFileHeader.getRpNext());

        itemFileHeader.setRpUseCc(secondHeaderBytes.getInt());
        int rpTlinks1 = secondHeaderBytes.getInt();
        itemFileHeader.setRpOmitPeriod(secondHeaderBytes.getInt());
        itemFileHeader.setRpNewPara(secondHeaderBytes.getInt());
        itemFileHeader.setRpNonAutoCc(secondHeaderBytes.getInt());
        int rpTlinks2 = secondHeaderBytes.getInt();

        itemFileHeader.setRpTlinks(new int[] { rpTlinks1, rpTlinks2 });

    } else {
        itemFileHeader.setRpUseCc(0);
        itemFileHeader.setRpTlinks(new int[] { 0, 0 });
        itemFileHeader.setRpOmitPeriod(0);
        itemFileHeader.setRpNewPara(0);
        itemFileHeader.setRpNonAutoCc(0);
    }
}