Example usage for java.nio.channels ReadableByteChannel read

List of usage examples for java.nio.channels ReadableByteChannel read

Introduction

In this page you can find the example usage for java.nio.channels ReadableByteChannel read.

Prototype

public int read(ByteBuffer dst) throws IOException;

Source Link

Document

Reads a sequence of bytes from this channel into the given buffer.

Usage

From source file:org.alfresco.contentstore.AbstractContentStore.java

protected int applyPatch(ReadableByteChannel inChannel, WritableByteChannel outChannel,
        PatchDocument patchDocument) throws IOException {
    InChannel c = new InChannel(inChannel, patchDocument.getMatchedBlocks(), patchDocument.getBlockSize());

    int totalWritten = 0;

    int blockIndex = -1;

    //        int blockIndex = c.nextBlock();
    //        if(blockIndex > -1)
    //        {//w  w w . j av  a  2s .  c o  m
    for (Patch patch : patchDocument.getPatches()) {
        int lastMatchingBlockIndex = patch.getLastMatchIndex();

        blockIndex = c.nextBlock();
        while (blockIndex != -1 && blockIndex <= lastMatchingBlockIndex) {
            int bytesWritten = outChannel.write(c.currentBlock);
            totalWritten += bytesWritten;
            if (bytesWritten != c.bytesRead) {
                throw new RuntimeException("Wrote too few bytes, " + c.blockSize + ", " + bytesWritten);
            }

            blockIndex = c.nextBlock();
            if (blockIndex == -1) {
                break;
            }
        }

        // apply patch
        int patchSize = patch.getSize();
        ReadableByteChannel patchChannel = Channels.newChannel(patch.getStream());
        ByteBuffer patchBB = ByteBuffer.allocate(patchSize);
        int bytesRead = patchChannel.read(patchBB);
        patchBB.flip();
        int bytesWritten = outChannel.write(patchBB);
        totalWritten += bytesWritten;
        if (bytesWritten != bytesRead) {
            throw new RuntimeException("Wrote too few bytes, expected " + bytesRead + ", got " + bytesWritten);
        }
    }

    // we're done with all the patches, add the remaining blocks
    while (blockIndex != -1) {
        int bytesWritten = outChannel.write(c.currentBlock);
        totalWritten += bytesWritten;
        if (bytesWritten != c.bytesRead) {
            throw new RuntimeException("Wrote too few bytes");
        }

        blockIndex = c.nextBlock();
    }
    //        }

    return totalWritten;
}

From source file:org.alfresco.repo.content.AbstractWritableContentStoreTest.java

/**
 * Tests random access writing//from  w w w.  ja v  a  2 s. c o  m
 * <p>
 * Only executes if the writer implements {@link RandomAccessContent}.
 */
@Test
public void testRandomAccessWrite() throws Exception {
    ContentWriter writer = getWriter();

    FileChannel fileChannel = writer.getFileChannel(true);
    assertNotNull("No channel given", fileChannel);

    // check that no other content access is allowed
    try {
        writer.getWritableChannel();
        fail("Second channel access allowed");
    } catch (RuntimeException e) {
        // expected
    }

    // write some content in a random fashion (reverse order)
    byte[] content = new byte[] { 1, 2, 3 };
    for (int i = content.length - 1; i >= 0; i--) {
        ByteBuffer buffer = ByteBuffer.wrap(content, i, 1);
        fileChannel.write(buffer, i);
    }

    // close the channel
    fileChannel.close();
    assertTrue("Writer not closed", writer.isClosed());

    // check the content
    ContentReader reader = writer.getReader();
    ReadableByteChannel channelReader = reader.getReadableChannel();
    ByteBuffer buffer = ByteBuffer.allocateDirect(3);
    int count = channelReader.read(buffer);
    assertEquals("Incorrect number of bytes read", 3, count);
    for (int i = 0; i < content.length; i++) {
        assertEquals("Content doesn't match", content[i], buffer.get(i));
    }

    // get a new writer from the store, using the existing content and perform a truncation check
    ContentContext writerTruncateCtx = new ContentContext(writer.getReader(), null);
    ContentWriter writerTruncate = getStore().getWriter(writerTruncateCtx);
    assertEquals("Content size incorrect", 0, writerTruncate.getSize());
    // get the channel with truncation
    FileChannel fcTruncate = writerTruncate.getFileChannel(true);
    fcTruncate.close();
    assertEquals("Content not truncated", 0, writerTruncate.getSize());

    // get a new writer from the store, using the existing content and perform a non-truncation check
    ContentContext writerNoTruncateCtx = new ContentContext(writer.getReader(), null);
    ContentWriter writerNoTruncate = getStore().getWriter(writerNoTruncateCtx);
    assertEquals("Content size incorrect", 0, writerNoTruncate.getSize());
    // get the channel without truncation
    FileChannel fcNoTruncate = writerNoTruncate.getFileChannel(false);
    fcNoTruncate.close();
    assertEquals("Content was truncated", writer.getSize(), writerNoTruncate.getSize());
}

From source file:com.github.jinahya.verbose.codec.BinaryCodecTest.java

protected final void encodeDecode(final ReadableByteChannel expectedChannel) throws IOException {

    if (expectedChannel == null) {
        throw new NullPointerException("null expectedChannel");
    }//from  w  ww .j  a  v  a2 s .  c  o  m

    final Path encodedPath = Files.createTempFile("test", null);
    getRuntime().addShutdownHook(new Thread(() -> {
        try {
            Files.delete(encodedPath);
        } catch (final IOException ioe) {
            ioe.printStackTrace(System.err);
        }
    }));
    final WritableByteChannel encodedChannel = FileChannel.open(encodedPath, StandardOpenOption.WRITE);

    final ByteBuffer decodedBuffer = ByteBuffer.allocate(128);
    final ByteBuffer encodedBuffer = ByteBuffer.allocate(decodedBuffer.capacity() << 1);

    while (expectedChannel.read(decodedBuffer) != -1) {
        decodedBuffer.flip(); // limit -> position; position -> zero
        encoder.encode(decodedBuffer, encodedBuffer);
        encodedBuffer.flip();
        encodedChannel.write(encodedBuffer);
        encodedBuffer.compact(); // position -> n  + 1; limit -> capacity
        decodedBuffer.compact();
    }

    decodedBuffer.flip();
    while (decodedBuffer.hasRemaining()) {
        encoder.encode(decodedBuffer, encodedBuffer);
        encodedBuffer.flip();
        encodedChannel.write(encodedBuffer);
        encodedBuffer.compact();
    }

    encodedBuffer.flip();
    while (encodedBuffer.hasRemaining()) {
        encodedChannel.write(encodedBuffer);
    }
}

From source file:org.bytesoft.openjtcc.supports.logger.DbTransactionLoggerImpl.java

private byte[] streamToByteArray(InputStream input) {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    ReadableByteChannel in = null;
    WritableByteChannel out = null;
    ByteBuffer buffer = ByteBuffer.allocate(1024);
    try {/*from  w  w w.  j av  a  2  s  .c  om*/
        in = Channels.newChannel(input);
        out = Channels.newChannel(baos);
        while (in.read(buffer) != -1) {
            buffer.flip();
            out.write(buffer);
            buffer.clear();
        }
    } catch (IOException ex) {
        // ignore
    } finally {
        if (out != null) {
            try {
                out.close();
            } catch (IOException e) {
                // ignore
            }
        }
        if (baos != null) {
            try {
                baos.close();
            } catch (IOException e) {
                // ignore
            }
        }
    }
    return baos.toByteArray();
}

From source file:org.alfresco.contentstore.ChecksumTest.java

@Test
public void test11() throws IOException {
    try (InputStream in = getClass().getClassLoader().getResourceAsStream("marbles-uncompressed.tif");
            InputStream in1 = getClass().getClassLoader().getResourceAsStream("marbles-uncompressed1.tif")) {
        ByteBuffer buf1 = ByteBuffer.allocate(8192);
        ByteBuffer buf2 = ByteBuffer.allocate(8192);
        ReadableByteChannel channel1 = Channels.newChannel(in);
        ReadableByteChannel channel2 = Channels.newChannel(in1);
        int numRead1 = -1;
        int numRead2 = -1;
        int total = 0;
        int same = 0;
        int i = 0;
        do {//from  w w w .  j ava2 s. c  o m
            total = 0;
            same = 0;

            numRead1 = channel1.read(buf1);
            numRead2 = channel2.read(buf2);
            i += 8192;
            //                buf1.clear();
            //                buf2.clear();
            //
            //                numRead1 = channel1.read(buf1);
            //                numRead2 = channel2.read(buf2);

            buf1.flip();
            buf2.flip();

            if (numRead1 > 0 && numRead2 > 0) {
                if (numRead1 <= numRead2) {
                    while (buf1.hasRemaining()) {
                        total++;
                        byte b1 = buf1.get();
                        byte b2 = buf2.get();
                        if (b1 == b2) {
                            same++;
                        }
                    }
                } else {
                    while (buf2.hasRemaining()) {
                        total++;
                        byte b1 = buf1.get();
                        byte b2 = buf2.get();
                        if (b1 == b2) {
                            same++;
                        }
                    }
                }
            }

            buf1.clear();
            buf2.clear();
        } while (numRead1 > 0 && numRead2 > 0 && same < total);
        //            while(numRead1 > 0 && numRead1 == numRead2);

        System.out.println(i + ", " + numRead1 + ", " + numRead2 + ", " + total + ", " + same + ", "
                + (double) same / total);
    }
}

From source file:edu.northwestern.jcr.adapter.fedora.persistence.FedoraConnectorREST.java

/**
 * Wrapper of getDatastreamDissemination in REST.
 *
 * @param pid pid of the object// www  . j a v a2s .c o m
 * @param dsID id of the datastream
 * @return byte content of the data stream
 */
public byte[] getDataStream(String pid, String dsID) {
    HttpInputStream inputStream;
    ReadableByteChannel channel;
    ByteBuffer buf;
    byte[] bytes;
    int numRead = 0;
    int length = 0;

    try {
        inputStream = fc.get(
                String.format("/objects/%s/datastreams/%s/content", URLEncoder.encode(pid, "UTF-8"), dsID),
                true, false);
    } catch (Exception e) {
        return null;
    }

    channel = Channels.newChannel(inputStream);
    // Create a direct ByteBuffer
    buf = ByteBuffer.allocateDirect(10 * 1024 * 1024);

    while (numRead >= 0) {
        // Read bytes from the channel
        try {
            numRead = channel.read(buf);
        } catch (Exception e) {
            return null;
        }

        if (numRead > 0) {
            length += numRead;
        }
    }

    bytes = new byte[length];
    // reset the position of the buffer to zero
    buf.rewind();
    buf.get(bytes);

    return bytes;
}

From source file:org.commoncrawl.util.ArcFileReader.java

@Test
public void testReader(InputStream stream) throws IOException {

    setIOTimeoutValue(30000);//from   w  ww  .jav a  2s .c  o  m

    resetState();

    Thread thread = new Thread(new Runnable() {

        public void run() {
            try {

                while (hasMoreItems()) {
                    ArcFileItem item = new ArcFileItem();

                    getNextItem(item);

                    LOG.info("GOT Item URL:" + item.getUri() + " StreamPos:" + item.getArcFilePos()
                            + " Content Length:" + item.getContent().getCount());
                    for (ArcFileHeaderItem headerItem : item.getHeaderItems()) {
                        if (headerItem.isFieldDirty(ArcFileHeaderItem.Field_ITEMKEY)) {
                            // LOG.info("Header Item:" + headerItem.getItemKey() + " :" +
                            // headerItem.getItemValue());
                        } else {
                            // LOG.info("Header Item:" + headerItem.getItemValue());
                        }
                    }
                    // LOG.info("Content Length:" + item.getContent().getCount());
                    // LOG.info("Content:");
                    /*
                     * ByteArrayInputStream inputStream = new
                     * ByteArrayInputStream(item.getContent
                     * ().getReadOnlyBytes(),0,item.getContent().getCount());
                     * BufferedReader reader = new BufferedReader(new
                     * InputStreamReader(inputStream,Charset.forName("ASCII"))); String
                     * line = null; while ((line = reader.readLine()) != null) {
                     * LOG.info(line); }
                     */
                }
                LOG.info("NO MORE ITEMS... BYE");
            } catch (IOException e) {
                LOG.error(StringUtils.stringifyException(e));
            }
        }

    });

    // run the thread ...
    thread.start();

    ReadableByteChannel channel = Channels.newChannel(stream);

    try {

        int totalBytesRead = 0;
        for (;;) {

            ByteBuffer buffer = ByteBuffer.allocate(32768);

            int bytesRead = channel.read(buffer);
            // LOG.info("Read "+bytesRead + " From File");

            if (bytesRead == -1) {
                finished();
                break;
            } else {
                buffer.flip();
                totalBytesRead += buffer.remaining();
                available(buffer);
            }
        }
    } finally {
        channel.close();
    }

    // now wait for thread to die ...
    LOG.info("Done Reading File.... Waiting for ArcFileThread to DIE");
    try {
        thread.join();
    } catch (InterruptedException e) {
        e.printStackTrace();
    }
    LOG.info("Done Reading File.... ArcFileThread to DIED");
}

From source file:org.commoncrawl.hadoop.io.deprecated.ArcFileReader.java

@Test
public void testReader(File file) throws Exception {

    checkCRLFStateMachine();//from  w w w  . ja  va  2 s.co  m

    setIOTimeoutValue(30000);

    resetState();

    Thread thread = new Thread(new Runnable() {

        public void run() {
            try {

                while (hasMoreItems()) {
                    ArcFileItem item = new ArcFileItem();

                    getNextItem(item);

                    LOG.info("GOT Item URL:" + item.getUri() + " StreamPos:" + item.getArcFilePos()
                            + " Content Length:" + item.getContent().getCount());
                    for (ArcFileHeaderItem headerItem : item.getHeaderItems()) {
                        if (headerItem.isFieldDirty(ArcFileHeaderItem.Field_ITEMKEY)) {
                            // LOG.info("Header Item:" + headerItem.getItemKey() + " :" +
                            // headerItem.getItemValue());
                        } else {
                            // LOG.info("Header Item:" + headerItem.getItemValue());
                        }
                    }
                    // LOG.info("Content Length:" + item.getContent().getCount());
                    // LOG.info("Content:");
                    /*
                     * ByteArrayInputStream inputStream = new
                     * ByteArrayInputStream(item.getContent
                     * ().getReadOnlyBytes(),0,item.getContent().getCount());
                     * BufferedReader reader = new BufferedReader(new
                     * InputStreamReader(inputStream,Charset.forName("ASCII"))); String
                     * line = null; while ((line = reader.readLine()) != null) {
                     * LOG.info(line); }
                     */
                }
                LOG.info("NO MORE ITEMS... BYE");
            } catch (IOException e) {
                LOG.error(StringUtils.stringifyException(e));
            }
        }

    });

    // run the thread ...
    thread.start();

    ReadableByteChannel channel = Channels.newChannel(new FileInputStream(file));

    try {

        int totalBytesRead = 0;
        for (;;) {

            ByteBuffer buffer = ByteBuffer.allocate(32768);

            int bytesRead = channel.read(buffer);
            // LOG.info("Read "+bytesRead + " From File");

            if (bytesRead == -1) {
                finished();
                break;
            } else {
                buffer.flip();
                totalBytesRead += buffer.remaining();
                available(buffer);
            }
        }
    } finally {
        channel.close();
    }

    // now wait for thread to die ...
    LOG.info("Done Reading File.... Waiting for ArcFileThread to DIE");
    thread.join();
    LOG.info("Done Reading File.... ArcFileThread to DIED");
}

From source file:edu.harvard.iq.dvn.ingest.dsb.impl.DvnNewJavaFieldCutter.java

public void cutColumns(InputStream in, int noCardsPerCase, int caseLength, String delimitor, String tabFileName)
        throws IOException {

    if (delimitor == null) {
        delimitor = defaultDelimitor;//from   w  w w  .  ja  va 2 s.  c  o m
    }

    OUT_LEN = colwidth; // calculated by parseList
    dbgLog.fine("out_len=" + OUT_LEN);

    String firstline = null;

    if (caseLength == 0) {

        int cread;
        int ccounter = 0;

        firstline = "";

        while (caseLength == 0 && (cread = in.read()) != -1) {
            ccounter++;
            if (cread == '\n') {
                caseLength = ccounter;
            }
            char c = (char) cread;
            firstline = firstline + c;
        }

    }

    if (caseLength == 0) {
        throw new IOException("Subsetting failed: could not read incoming byte stream. "
                + "(Requested file may be unavailable or missing)");

    }

    REC_LEN = caseLength;
    dbgLog.fine("REC_LEN=" + REC_LEN);

    for (int i = 0; i < cargSet.get(Long.valueOf(noCardsPerCase)).size(); i++) {
        int varEndOffset = cargSet.get(Long.valueOf(noCardsPerCase)).get(i).get(1);

        if (REC_LEN <= varEndOffset + 1) {
            throw new IOException("Failed to subset incoming byte stream. Invalid input. "
                    + "(Detected the first record of " + REC_LEN + " bytes; "
                    + "one of the columns requested ends at " + varEndOffset + " bytes).");
        }
    }

    Boolean dottednotation = false;
    Boolean foundData = false;

    // cutting a data file

    ReadableByteChannel rbc = Channels.newChannel(in);
    // input byte-buffer size = row-length + 1(=> new line char)
    ByteBuffer inbuffer = ByteBuffer.allocate(REC_LEN);

    OutputStream outs = new FileOutputStream(tabFileName);
    WritableByteChannel outc = Channels.newChannel(outs);
    ByteBuffer outbuffer = null;

    int pos = 0;
    int offset = 0;
    int outoffset = 0;

    int begin = 0;
    int end = 0;
    int blankoffset = 0;

    int blanktail = 0;
    int k;

    try {
        // lc: line counter
        int lc = 0;
        while (firstline != null || rbc.read(inbuffer) != -1) {

            if (firstline != null) {
                // we have the first line saved as a String:
                inbuffer.put(firstline.getBytes());
                firstline = null;
            }

            // calculate i-th card number
            lc++;
            k = lc % noCardsPerCase;
            if (k == 0) {
                k = noCardsPerCase;
            }
            //out.println("***** " +lc+ "-th line, recod k=" + k + " *****");
            byte[] line_read = new byte[OUT_LEN];
            byte[] junk = new byte[REC_LEN];
            byte[] line_final = new byte[OUT_LEN];

            //out.println("READ: " + offset);
            inbuffer.rewind();

            offset = 0;
            outoffset = 0;

            // how many variables are cut from this k-th card
            int noColumns = cargSet.get(Long.valueOf(k)).size();

            //out.println("noColumns=" + noColumns);
            //out.println("cargSet k =" + cargSet.get(Long.valueOf(k)));

            for (int i = 0; i < noColumns; i++) {
                //out.println("**** " + i +"-th col ****");
                begin = cargSet.get(Long.valueOf(k)).get(i).get(0); // bounds[2 * i];
                end = cargSet.get(Long.valueOf(k)).get(i).get(1); // bounds[2 * i + 1];

                //out.println("i: begin: " + begin + "\ti: end:" + end);

                try {
                    // throw away offect bytes
                    if (begin - offset - 1 > 0) {
                        inbuffer.get(junk, 0, (begin - offset - 1));
                    }
                    // get requested bytes
                    inbuffer.get(line_read, outoffset, (end - begin + 1));
                    // set outbound data
                    outbounds[2 * i] = outoffset;
                    outbounds[2 * i + 1] = outoffset + (end - begin);
                    // current position moved to outoffset
                    pos = outoffset;

                    dottednotation = false;
                    foundData = false;

                    blankoffset = 0;
                    blanktail = 0;

                    // as position increases
                    while (pos <= (outoffset + (end - begin))) {

                        //out.println("pos=" + pos + "\tline_read[pos]=" +
                        //    new String(line_read).replace("\000", "\052"));

                        // decimal octal
                        // 48 =>0 60
                        // 46 => . 56
                        // 32 = space 40

                        // dot: 
                        if (line_read[pos] == '\056') {
                            dottednotation = true;
                        }

                        // space:
                        if (line_read[pos] == '\040') {
                            if (foundData) {
                                blanktail = blanktail > 0 ? blanktail : pos - 1;
                            } else {
                                blankoffset = pos + 1;
                            }
                        } else {
                            foundData = true;
                            blanktail = 0;
                        }

                        pos++;
                    }
                    // increase the outoffset by width
                    outoffset += (end - begin + 1);
                    // dot false
                    if (!dottednotation) {
                        if (blankoffset > 0) {
                            // set outbound value to blankoffset
                            outbounds[2 * i] = blankoffset;
                        }
                        if (blanktail > 0) {
                            outbounds[2 * i + 1] = blanktail;
                        }
                    }

                } catch (BufferUnderflowException bufe) {
                    //bufe.printStackTrace();
                    throw new IOException(bufe.getMessage());
                }
                // set offset to the value of end-position
                offset = end;
            }

            outoffset = 0;
            // for each var
            for (int i = 0; i < noColumns; i++) {
                begin = outbounds[2 * i];
                end = outbounds[2 * i + 1];
                //out.println("begin=" + begin + "\t end=" + end);
                for (int j = begin; j <= end; j++) {
                    line_final[outoffset++] = line_read[j];
                }

                if (i < (noColumns - 1)) {
                    line_final[outoffset++] = '\011'; // tab x09
                } else {
                    if (k == cargSet.size()) {
                        line_final[outoffset++] = '\012'; // LF x0A
                    } else {
                        line_final[outoffset++] = '\011'; // tab x09
                    }
                }
            }
            //out.println("line_final=" +
            //    new String(line_final).replace("\000", "\052"));
            outbuffer = ByteBuffer.wrap(line_final, 0, outoffset);
            outc.write(outbuffer);
            inbuffer.clear();

        } // while loop
    } catch (IOException ex) {
        //ex.printStackTrace();
        throw new IOException("Failed to subset incoming fixed-field stream: " + ex.getMessage());
    }

}

From source file:org.callimachusproject.server.AccessLog.java

InputStream logOnClose(final String addr, final String username, final String line, final int code,
        final long length, final Header referer, final Header agent, InputStream in) {
    final ReadableByteChannel delegate = ChannelUtil.newChannel(in);
    return ChannelUtil.newInputStream(new ReadableByteChannel() {
        private long size = 0;
        private boolean complete;
        private boolean error;

        public boolean isOpen() {
            return delegate.isOpen();
        }/*from ww w. j  a v  a2s .c  o m*/

        public synchronized void close() throws IOException {
            delegate.close();
            if (!complete) {
                complete = true;
                if (error) {
                    log(addr, username, line, 599, size, referer, agent);
                } else if (size < length) {
                    log(addr, username, line, 499, size, referer, agent);
                } else {
                    log(addr, username, line, code, size, referer, agent);
                }
            }
        }

        public synchronized int read(ByteBuffer dst) throws IOException {
            error = true;
            int read = delegate.read(dst);
            if (read < 0) {
                complete = true;
                log(addr, username, line, code, size, referer, agent);
            } else {
                size += read;
            }
            error = false;
            return read;
        }
    });
}