Example usage for java.nio ByteBuffer putInt

List of usage examples for java.nio ByteBuffer putInt

Introduction

In this page you can find the example usage for java.nio ByteBuffer putInt.

Prototype

public abstract ByteBuffer putInt(int index, int value);

Source Link

Document

Writes the given int to the specified index of this buffer.

Usage

From source file:com.unister.semweb.drums.api.SearchForTest.java

/**
 * Converts the given <code>value</code> into an byte array. The final byte array will have
 * <code>overallBytes</code>.
 *///www.j ava  2  s  .  c  o  m
private byte[] convert(int value, int overallBytes) {
    ByteBuffer buffer = ByteBuffer.allocate(overallBytes);
    buffer.putInt(overallBytes - 4, value);
    buffer.flip();
    return buffer.array();
}

From source file:edu.uci.ics.hyracks.dataflow.std.sort.util.DeletableFrameTupleAppenderTest.java

ByteBuffer makeAFrame(int capacity, int count, int deletedBytes) throws HyracksDataException {
    ByteBuffer buffer = ByteBuffer.allocate(capacity);
    int metaOffset = capacity - 4;
    buffer.putInt(metaOffset, deletedBytes);
    metaOffset -= 4;/*from   www .  jav a  2s.c o m*/
    buffer.putInt(metaOffset, count);
    metaOffset -= 4;
    for (int i = 0; i < count; i++, metaOffset -= 4) {
        makeARecord(builder, i);
        for (int x = 0; x < builder.getFieldEndOffsets().length; x++) {
            buffer.putInt(builder.getFieldEndOffsets()[x]);
        }
        buffer.put(builder.getByteArray(), 0, builder.getSize());
        assert (metaOffset > buffer.position());
        buffer.putInt(metaOffset, buffer.position());

    }
    return buffer;
}

From source file:de.micromata.genome.logging.spi.ifiles.IndexHeader.java

public void writeFileHeader(OutputStream os, File indexFile, IndexDirectory indexDirectory) throws IOException {
    indexDirectoryIdx = indexDirectory.createNewLogIdxFile(indexFile);
    ByteBuffer lbb = ByteBuffer.wrap(new byte[Long.BYTES]);
    ByteBuffer ibb = ByteBuffer.wrap(new byte[Integer.BYTES]);
    os.write(INDEX_FILE_TYPE);// w w  w  . ja  va  2  s.  c om
    os.write(INDEX_FILE_VERSION);
    lbb.putLong(0, System.currentTimeMillis());
    os.write(lbb.array());
    ibb.putInt(0, indexDirectoryIdx);
    os.write(ibb.array());
    ByteArrayOutputStream bout = new ByteArrayOutputStream();
    for (Pair<String, Integer> headerp : headerOrder) {
        String hn = StringUtils.rightPad(headerp.getFirst(), HEADER_NAME_LENGTH);
        bout.write(hn.getBytes());
        ibb.putInt(0, headerp.getSecond());
        bout.write(ibb.array());
    }
    byte[] headerar = bout.toByteArray();
    int idxOffset = FILE_TYPE_LENGTH + FILE_VERSION_LENGTH + Long.BYTES /* timestamp */
            + Integer.BYTES /** indexDirectory */
            + Integer.BYTES /* indexOfset */
            + headerar.length;
    ibb.putInt(0, idxOffset);
    os.write(ibb.array());
    os.write(headerar);
    os.flush();
}

From source file:org.cablelabs.playready.cryptfile.PlayReadyPSSH.java

@Override
public Element generateContentProtection(Document d) throws IOException {
    Element e = super.generateContentProtection(d);

    switch (cpType) {

    case CENC:/*  w  ww. j ava 2  s.  c  o  m*/
        e.appendChild(generateCENCContentProtectionData(d));
        break;

    case MSPRO:
        Element pro = d.createElement(MSPRO_ELEMENT);

        // Generate base64-encoded PRO
        ByteBuffer ba = ByteBuffer.allocate(4);
        ba.order(ByteOrder.LITTLE_ENDIAN);
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        DataOutputStream dos = new DataOutputStream(baos);

        // PlayReady Header Object Size field
        ba.putInt(0, proSize);
        dos.write(ba.array());

        // Number of Records field
        ba.putShort(0, (short) wrmHeaders.size());
        dos.write(ba.array(), 0, 2);

        for (WRMHeader header : wrmHeaders) {

            byte[] wrmData = header.getWRMHeaderData();

            // Record Type (always 1 for WRM Headers)
            ba.putShort(0, (short) 1);
            dos.write(ba.array(), 0, 2);

            // Record Length
            ba.putShort(0, (short) wrmData.length);
            dos.write(ba.array(), 0, 2);

            // Data
            dos.write(wrmData);
        }

        pro.setTextContent(Base64.encodeBase64String(baos.toByteArray()));

        e.appendChild(pro);
        break;
    }

    return e;
}

From source file:au.org.ala.delta.translation.dist.DistItemsFileWriter.java

private void encodeUnorderedMultistateAttribute(ByteBuffer work, int wordOffset, int bitOffset,
        IdentificationKeyCharacter keyChar, MultiStateAttribute attribute) {
    List<Integer> states = keyChar.getPresentStates(attribute);

    int word = work.getInt(wordOffset * 4);

    for (int state : states) {
        int bit = bitOffset + state - 1;
        if (bit > 31) {
            work.putInt(wordOffset * 4, word);

            wordOffset++;/*from   w ww  .ja  v  a 2s . co  m*/
            word = work.getInt(wordOffset * 4);
            bitOffset -= 32;
            bit = bitOffset + state - 1;
        }
        word |= (1 << bit);
    }
    work.putInt(wordOffset * 4, word);
}

From source file:com.github.ambry.utils.UtilsTest.java

@Test
public void testReadBuffers() throws IOException {
    byte[] buf = new byte[40004];
    new Random().nextBytes(buf);
    ByteBuffer inputBuf = ByteBuffer.wrap(buf);
    inputBuf.putInt(0, 40000);
    ByteBuffer outputBuf = Utils.readIntBuffer(new DataInputStream(new ByteBufferInputStream(inputBuf)));
    for (int i = 0; i < 40000; i++) {
        Assert.assertEquals(buf[i + 4], outputBuf.array()[i]);
    }//from ww  w.j  ava  2  s .  c o  m
    // 0 size
    inputBuf.rewind();
    inputBuf.putInt(0, 0);
    outputBuf = Utils.readIntBuffer(new DataInputStream(new ByteBufferInputStream(inputBuf)));
    Assert.assertEquals("Output should be of length 0", 0, outputBuf.array().length);
    // negative size
    inputBuf.rewind();
    inputBuf.putInt(0, -1);
    try {
        Utils.readIntBuffer(new DataInputStream(new ByteBufferInputStream(inputBuf)));
        Assert.fail("Should have encountered exception with negative length.");
    } catch (IllegalArgumentException e) {
    }

    buf = new byte[10];
    new Random().nextBytes(buf);
    inputBuf = ByteBuffer.wrap(buf);
    inputBuf.putShort(0, (short) 8);
    outputBuf = Utils.readShortBuffer(new DataInputStream(new ByteBufferInputStream(inputBuf)));
    for (int i = 0; i < 8; i++) {
        Assert.assertEquals(buf[i + 2], outputBuf.array()[i]);
    }
    // 0 size
    inputBuf.rewind();
    inputBuf.putShort(0, (short) 0);
    outputBuf = Utils.readShortBuffer(new DataInputStream(new ByteBufferInputStream(inputBuf)));
    Assert.assertEquals("Output should be of length 0", 0, outputBuf.array().length);
    // negative size
    inputBuf.rewind();
    inputBuf.putShort(0, (short) -1);
    try {
        Utils.readShortBuffer(new DataInputStream(new ByteBufferInputStream(inputBuf)));
        Assert.fail("Should have encountered exception with negative length.");
    } catch (IllegalArgumentException e) {
    }
}

From source file:edu.mbl.jif.imaging.mmtiff.MultipageTiffReader.java

private int writeIndexMap(long filePosition) throws IOException {
    //Write 4 byte header, 4 byte number of entries, and 20 bytes for each entry
    int numMappings = indexMap_.size();
    ByteBuffer buffer = ByteBuffer.allocate(8 + 20 * numMappings).order(byteOrder_);
    buffer.putInt(0, MultipageTiffWriter.INDEX_MAP_HEADER);
    buffer.putInt(4, numMappings);//from ww  w .j  a va  2 s .c o  m
    int position = 2;
    for (String label : indexMap_.keySet()) {
        String[] indecies = label.split("_");
        for (String index : indecies) {
            buffer.putInt(4 * position, Integer.parseInt(index));
            position++;
        }
        buffer.putInt(4 * position, indexMap_.get(label).intValue());
        position++;
    }
    fileChannel_.write(buffer, filePosition);

    ByteBuffer header = ByteBuffer.allocate(8).order(byteOrder_);
    header.putInt(0, MultipageTiffWriter.INDEX_MAP_OFFSET_HEADER);
    header.putInt(4, (int) filePosition);
    fileChannel_.write(header, 8);
    return buffer.capacity();
}

From source file:edu.mbl.jif.imaging.mmtiff.MultipageTiffReader.java

/**
 * This code is intended for use in the scenario in which a datset terminates before properly
 * closing, thereby preventing the multipage tiff writer from putting in the index map,
 * comments, channels, and OME XML in the ImageDescription tag location
 *///from  w  ww  . ja  v a2 s.c  o m
private void fixIndexMap(long firstIFD) throws IOException, JSONException {
    int choice = JOptionPane.showConfirmDialog(null,
            "This dataset cannot be opened bcause it appears to have \n"
                    + "been improperly saved.  Would you like Micro-Manger to attempt to fix it?",
            "Micro-Manager", JOptionPane.YES_NO_OPTION);
    if (choice == JOptionPane.NO_OPTION) {
        return;
    }
    long filePosition = firstIFD;
    indexMap_ = new HashMap<String, Long>();
    //      final ProgressBar progressBar = new ProgressBar("Fixing dataset", 0, (int) (fileChannel_.size() / 2L));
    //      progressBar.setRange(0, (int) (fileChannel_.size() / 2L));
    //      progressBar.setProgress(0);
    //      progressBar.setVisible(true);
    long nextIFDOffsetLocation = 0;
    IFDData data = null;
    while (filePosition > 0) {
        try {
            data = readIFD(filePosition);
            TaggedImage ti = readTaggedImage(data);
            if (ti.tags == null) { //Blank placeholder image, dont add to index map
                filePosition = data.nextIFD;
                nextIFDOffsetLocation = data.nextIFDOffsetLocation;
                continue;
            }
            String label = null;
            label = MDUtils.getLabel(ti.tags);
            if (label == null) {
                break;
            }
            indexMap_.put(label, filePosition);
            final int progress = (int) (filePosition / 2L);
            //            SwingUtilities.invokeLater(new Runnable() {
            //               public void run() {
            //                  progressBar.setProgress(progress);
            //               }
            //            });
            filePosition = data.nextIFD;
            nextIFDOffsetLocation = data.nextIFDOffsetLocation;
        } catch (Exception e) {
            break;
        }
    }
    //progressBar.setVisible(false);

    filePosition += writeIndexMap(filePosition);

    ByteBuffer buffer = ByteBuffer.allocate(4);
    buffer.order(byteOrder_);
    buffer.putInt(0, 0);
    fileChannel_.write(buffer, nextIFDOffsetLocation);
    raFile_.setLength(filePosition + 8);

    fileChannel_.close();
    raFile_.close();
    //reopen
    createFileChannel();

    ReportingUtils.showMessage("Dataset succcessfully repaired! Resave file to reagain full funtionality");
}

From source file:org.apache.hadoop.raid.JRSDecoder.java

void writeFixedBlock(FSDataInputStream[] inputs, int[] erasedLocations, long limit, byte[] outBuf,
        JRSStreamFactory sf) throws IOException {

    // Loop while the number of skipped + read bytes is less than the max.
    int seq = 0;//  w  ww.j a v  a2  s  .  co m

    int target = erasedLocations[0];

    for (long read = 0; read < limit;) {

        int failNum = erasedLocations.length;
        int bufOffset = bufSize * stripeSize;
        ByteBuffer buf = ByteBuffer.allocate(bufOffset + 64);
        buf.putInt(bufOffset, seq);

        boolean important = false;

        //last threadNum# packet checked
        if ((limit - read + bufSize - 1) / bufSize <= threadNum) {
            important = true;
            buf.put(bufOffset + 4, (byte) 1);
        } else {
            buf.put(bufOffset + 4, (byte) 0);
        }
        LOG.info("anchor Decode_stripe " + seq + " Data_reading " + System.nanoTime());
        //read packets
        buf.rewind();
        erasedLocations = readFromInputs(inputs, erasedLocations, buf, sf, seq);
        LOG.info("anchor Decode_stripe " + seq + " Data_read " + System.nanoTime());

        int toRead = (int) Math.min((long) bufSize, limit - read);

        buf.rewind();

        //finding the best ring buffer
        int remain = -1;
        int chosen = -1;
        for (int i = 0; i < threadNum; i++) {
            int rc = q[i].remainingCapacity();
            if (remain < rc) {
                remain = rc;
                chosen = i;
            }
        }
        if (important) {
            chosen = (((int) (limit - read) + bufSize - 1) / bufSize - 1) % threadNum;
        }
        DecodePackage dp = (new DecodePackage(erasedLocations, buf)).limit(limit).outputBuffer(outBuf)
                .target(target);
        //dispatch
        boolean flag = true;
        while (flag) {
            flag = false;
            try {
                q[chosen].put(dp);
            } catch (InterruptedException e) {
                flag = true;
            }
        }
        LOG.info("anchor Decode_stripe " + seq + " Data_pushed " + System.nanoTime());

        seq++;
        read += toRead;
    }

    //waiting for the end of the decode
    for (int i = 0; i < threadNum; i++) {
        boolean flag = true;
        while (flag) {
            flag = false;
            try {
                p[i].take();
            } catch (InterruptedException e) {
                flag = true;
            }
        }
    }
}

From source file:org.apache.hadoop.raid.JRSDecoder.java

void writeFixedBlock(FSDataInputStream[] inputs, int[] erasedLocations,
        Map<Integer, LocatedBlock> corruptStripe, File[] lbfs, long[] limits, JRSStreamFactory sf)
        throws IOException {

    long limit = 0;

    for (int i = 0; i < limits.length; i++)
        if (limit < limits[i])
            limit = limits[i];/*from  w w w  .  j a va2s .  co m*/

    // Loop while the number of skipped + read bytes is less than the max.
    int seq = 0;

    for (long read = 0; read < limit;) {

        int failNum = erasedLocations.length;
        int bufOffset = bufSize * stripeSize;
        ByteBuffer buf = ByteBuffer.allocate(bufOffset + 64);
        buf.putInt(bufOffset, seq);

        boolean important = false;

        //last threadNum# packet checked
        if ((limit - read + bufSize - 1) / bufSize <= threadNum) {
            important = true;
            buf.put(bufOffset + 4, (byte) 1);
        } else {
            buf.put(bufOffset + 4, (byte) 0);
        }
        LOG.info("anchor Decode_stripe " + seq + " Data_reading " + System.nanoTime());
        //read packets
        buf.rewind();
        erasedLocations = readFromInputs(inputs, erasedLocations, buf, sf, seq);
        LOG.info("anchor Decode_stripe " + seq + " Data_read " + System.nanoTime());

        int toRead = (int) Math.min((long) bufSize, limit - read);

        buf.rewind();

        //finding the best ring buffer
        int remain = -1;
        int chosen = -1;
        for (int i = 0; i < threadNum; i++) {
            int rc = q[i].remainingCapacity();
            if (remain < rc) {
                remain = rc;
                chosen = i;
            }
        }
        if (important) {
            chosen = (((int) (limit - read) + bufSize - 1) / bufSize - 1) % threadNum;
        }

        DecodePackage dp = (new DecodePackage(erasedLocations, buf)).limits(limits).localFiles(lbfs);

        //dispatch
        boolean flag = true;
        while (flag) {
            flag = false;
            try {
                q[chosen].put(dp);
            } catch (InterruptedException e) {
                flag = true;
            }
        }
        LOG.info("anchor Decode_stripe " + seq + " Data_pushed " + System.nanoTime());

        seq++;
        read += toRead;
    }

    //waiting for the end of the decode
    for (int i = 0; i < threadNum; i++) {
        boolean flag = true;
        while (flag) {
            flag = false;
            try {
                p[i].take();
            } catch (InterruptedException e) {
                flag = true;
            }
        }

    }

}