Example usage for java.nio ByteBuffer clear

List of usage examples for java.nio ByteBuffer clear

Introduction

In this page you can find the example usage for java.nio ByteBuffer clear.

Prototype

public final Buffer clear() 

Source Link

Document

Clears this buffer.

Usage

From source file:cn.tc.ulife.platform.msg.http.util.HttpUtil.java

/**
 * ??/* w  w  w.  j  a v  a  2  s .  c  o m*/
 *
 * @param is
 * @return
 * @throws IOException
 */
public static String readStreamAsStr(InputStream is) throws IOException {
    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    WritableByteChannel dest = Channels.newChannel(bos);
    ReadableByteChannel src = Channels.newChannel(is);
    ByteBuffer bb = ByteBuffer.allocate(4096);

    while (src.read(bb) != -1) {
        bb.flip();
        dest.write(bb);
        bb.clear();
    }
    src.close();
    dest.close();

    return new String(bos.toByteArray(), Constants.ENCODING);
}

From source file:com.sunchenbin.store.feilong.core.io.IOWriteUtil.java

/**
 * NIO API ?? ()./*from w  w w .j  a  v  a  2s .co m*/
 * 
 * <h3>NIO</h3>
 * 
 * <blockquote>
 * <p>
 * nionew io,jdk1.4,??<br>
 * nio??<br>
 * </p>
 * 
 * <p>
 * ??,<br>
 * ?,,??? ????cpu,?cpu? <br>
 * ? Java IO ,? IO,NIO ?? IO
 * </p>
 * 
 * <p>
 * The new I/O (NIO) APIs introduced in v 1.4 provide new features and improved performance in the areas of buffer management, scalable
 * network and file I/O, character-set support, and regular-expression matching. The NIO APIs supplement the I/O facilities in the
 * java.io package.
 * </p>
 * 
 * <p>
 * The NIO APIs include the following features:
 * </p>
 * 
 * <ol>
 * <li>Buffers for data of primitive types</li>
 * <li>Character-set encoders and decoders</li>
 * <li>A pattern-matching facility based on Perl-style regular expressions</li>
 * <li>Channels, a new primitive I/O abstraction</li>
 * <li>A file interface that supports locks and memory mapping</li>
 * <li>A multiplexed, non-blocking I/O facility for writing scalable servers</li>
 * </ol>
 * </blockquote>
 * 
 * <p>
 * As creme de la creme with regard to performance,you could use NIO {@link java.nio.channels.Channels} and {@link java.nio.ByteBuffer}.
 * </p>
 *
 * @param bufferLength
 *            the buffer length
 * @param inputStream
 *            the input stream
 * @param outputStream
 *            the output stream
 * @since 1.0.8
 * @since jdk1.4
 */
private static void writeUseNIO(int bufferLength, InputStream inputStream, OutputStream outputStream) {
    Date beginDate = new Date();
    ReadableByteChannel readableByteChannel = Channels.newChannel(inputStream);
    WritableByteChannel writableByteChannel = Channels.newChannel(outputStream);
    ByteBuffer byteBuffer = ByteBuffer.allocate(bufferLength);

    try {
        int loopCount = 0;
        int sumSize = 0;
        while (readableByteChannel.read(byteBuffer) != -1) {
            byteBuffer.flip();
            sumSize += writableByteChannel.write(byteBuffer);
            byteBuffer.clear();
            loopCount++;
        }
        if (LOGGER.isDebugEnabled()) {
            String formatSize = FileUtil.formatSize(sumSize);
            String time = DateExtensionUtil.getIntervalForView(beginDate, new Date());
            LOGGER.debug("Write data over,sumSize:[{}],bufferLength:[{}],loopCount:[{}],time:{}", formatSize,
                    bufferLength, loopCount, time);
        }
    } catch (IOException e) {
        throw new UncheckedIOException(e);
    } finally {
        IOUtils.closeQuietly(outputStream);
        IOUtils.closeQuietly(writableByteChannel);
        IOUtils.closeQuietly(inputStream);
        IOUtils.closeQuietly(readableByteChannel);
    }
}

From source file:com.feilong.commons.core.io.IOWriteUtil.java

/**
 * NIO API ?? ()./*from  www  .  j  av a  2 s.co m*/
 *
 * @param bufferLength
 *            the buffer length
 * @param inputStream
 *            the input stream
 * @param outputStream
 *            the output stream
 * @throws UncheckedIOException
 *             the unchecked io exception
 * @since 1.0.8
 */
private static void writeUseNIO(int bufferLength, InputStream inputStream, OutputStream outputStream)
        throws UncheckedIOException {
    int i = 0;
    int sumSize = 0;
    int j = 0;

    ///2 
    //As creme de la creme with regard to performance, you could use NIO Channels and ByteBuffer. 

    ReadableByteChannel readableByteChannel = Channels.newChannel(inputStream);
    WritableByteChannel writableByteChannel = Channels.newChannel(outputStream);

    ByteBuffer byteBuffer = ByteBuffer.allocate(bufferLength);

    try {
        while (readableByteChannel.read(byteBuffer) != -1) {
            byteBuffer.flip();
            j = writableByteChannel.write(byteBuffer);
            sumSize += j;
            byteBuffer.clear();
            i++;
        }

        if (log.isDebugEnabled()) {
            log.debug("Write data over,sumSize:[{}],bufferLength:[{}],loopCount:[{}]",
                    FileUtil.formatSize(sumSize), bufferLength, i);
        }
    } catch (IOException e) {
        throw new UncheckedIOException(e);
    } finally {
        try {
            if (writableByteChannel != null) {
                outputStream.close();
                writableByteChannel.close();
            }
            if (readableByteChannel != null) {
                inputStream.close();
                readableByteChannel.close();
            }
        } catch (IOException e) {
            throw new UncheckedIOException(e);
        }
    }
}

From source file:fi.johannes.kata.ocr.utils.files.CFileOperations.java

public static void createChunksByRowsAndBytes(String inputFile, String outputFolder, int bufferSize, int rows)
        throws FileNotFoundException, IOException {
    File f = new File(inputFile);
    String filename = f.getName();
    BufferedReader bw = new BufferedReader(new FileReader(f));
    ByteBuffer buffer = ByteBuffer.allocate(bufferSize);
    int j = 0;/* w  w w  .  ja  v  a 2 s.  com*/
    for (int i = 0; i <= rows; i++) {
        String lineStr = bw.readLine();
        if (lineStr != null) {
            byte[] line = lineStr.getBytes(StandardCharsets.UTF_8);
            if (i == rows) {
                String outputfile = outputFolder + j + "-" + filename;
                writeToFile(buffer, outputfile);
                buffer.clear();
                j++;
                i = 0;
            }
            buffer.put(line);
            buffer.put(System.getProperty("line.separator").getBytes(StandardCharsets.UTF_8));
        } else {
            break;
        }
    }
}

From source file:yui.classes.utils.IOUtils.java

public static byte[] fileReadNIO(String name) {
    FileInputStream f = null;/*w ww .j ava 2  s  .  com*/
    ByteBuffer bb = null;
    try {
        f = new FileInputStream(name);

        FileChannel ch = f.getChannel();
        bb = ByteBuffer.allocateDirect(1024);

        long checkSum = 0L;
        int nRead;
        while ((nRead = ch.read(bb)) != -1) {
            bb.position(0);
            bb.limit(nRead);
            while (bb.hasRemaining()) {
                checkSum += bb.get();
            }
            bb.clear();
        }
    } catch (FileNotFoundException ex) {
        logger.error(ex.getMessage());
    } catch (IOException ex) {
        logger.error(ex.getMessage());
    } finally {
        try {
            f.close();
        } catch (IOException ex) {
            logger.error(ex.getMessage());
        }
    }
    return bb.array();

}

From source file:org.apache.kylin.engine.mr.common.CuboidStatsUtil.java

public static void writeCuboidStatistics(Configuration conf, Path outputPath, //
        Map<Long, HyperLogLogPlusCounter> cuboidHLLMap, int samplingPercentage, double mapperOverlapRatio)
        throws IOException {
    Path seqFilePath = new Path(outputPath, BatchConstants.CFG_STATISTICS_CUBOID_ESTIMATION_FILENAME);

    List<Long> allCuboids = new ArrayList<Long>();
    allCuboids.addAll(cuboidHLLMap.keySet());
    Collections.sort(allCuboids);

    ByteBuffer valueBuf = ByteBuffer.allocate(BufferedMeasureEncoder.DEFAULT_BUFFER_SIZE);
    SequenceFile.Writer writer = SequenceFile.createWriter(conf, SequenceFile.Writer.file(seqFilePath),
            SequenceFile.Writer.keyClass(LongWritable.class),
            SequenceFile.Writer.valueClass(BytesWritable.class));
    try {/* ww  w  .  java  2 s.  c  o  m*/
        // mapper overlap ratio at key -1
        writer.append(new LongWritable(-1), new BytesWritable(Bytes.toBytes(mapperOverlapRatio)));

        // sampling percentage at key 0
        writer.append(new LongWritable(0L), new BytesWritable(Bytes.toBytes(samplingPercentage)));

        for (long i : allCuboids) {
            valueBuf.clear();
            cuboidHLLMap.get(i).writeRegisters(valueBuf);
            valueBuf.flip();
            writer.append(new LongWritable(i), new BytesWritable(valueBuf.array(), valueBuf.limit()));
        }
    } finally {
        IOUtils.closeQuietly(writer);
    }
}

From source file:it.geosolutions.tools.io.file.IOUtils.java

/**
 * Copies the content of the source channel onto the destination channel.
 * //from   w w  w. j av  a 2s .  c  om
 * @param bufferSize
 *            size of the temp buffer to use for this copy.
 * @param source
 *            the source {@link ReadableByteChannel}.
 * @param destination
 *            the destination {@link WritableByteChannel};.
 * @throws IOException
 *             in case something bad happens.
 */
public static void copyChannel(int bufferSize, ReadableByteChannel source, WritableByteChannel destination)
        throws IOException {

    Objects.notNull(source, destination);
    if (!source.isOpen() || !destination.isOpen())
        throw new IllegalStateException("Source and destination channels must be open.");

    final java.nio.ByteBuffer buffer = java.nio.ByteBuffer.allocateDirect(bufferSize);
    while (source.read(buffer) != -1) {
        // prepare the buffer for draining
        buffer.flip();

        // write to destination
        while (buffer.hasRemaining())
            destination.write(buffer);

        // clear
        buffer.clear();

    }

}

From source file:com.unister.semweb.drums.TestUtils.java

/**
 * This function checks, if the file with the given filename contains exactly the given LinkData-objects.
 * /*from   w w  w  .  ja  v a 2  s .  c  o  m*/
 * @param dbFileName
 *            the name of the file
 * @param linkDataList
 *            the array, containing LinkData
 * @throws IOException
 * @throws FileLockException
 */
public static boolean checkContentFile(String dbFileName, DummyKVStorable[] linkDataList)
        throws IOException, FileLockException {
    // load file
    DummyKVStorable prototype = gp.getPrototype();
    HeaderIndexFile<DummyKVStorable> dbfile = new HeaderIndexFile<DummyKVStorable>(dbFileName, 1, TestUtils.gp);
    ByteBuffer buffer = ByteBuffer.allocate(prototype.getSize());
    long offset = 0;
    int k = 0;
    while (offset < dbfile.getFilledUpFromContentStart()) {
        dbfile.read(offset, buffer);
        buffer.flip();
        DummyKVStorable newLinkData = (DummyKVStorable) prototype.fromByteBuffer(buffer);
        if (!newLinkData.equals(linkDataList[k])) {
            return false;
        }
        k++;
        offset += buffer.limit();
        buffer.clear();
    }
    dbfile.close();
    return true;
}

From source file:com.flexive.shared.FxFileUtils.java

/**
 * Copy data from source to destination nio channel
 *
 * @param source      source channel//from w  ww  .  ja  v a  2 s.  com
 * @param destination destination channel
 * @return total number of bytes copied
 * @throws java.io.IOException on errors
 */
public static long copyNIOChannel(ReadableByteChannel source, WritableByteChannel destination)
        throws IOException {
    ByteBuffer xferBuffer = ByteBuffer.allocateDirect(4096);
    long count = 0, read, written;
    while (true) {
        read = source.read(xferBuffer);
        if (read < 0)
            return count;
        xferBuffer.flip();
        written = destination.write(xferBuffer);
        if (written > 0) {
            count += written;
            if (xferBuffer.hasRemaining())
                xferBuffer.compact();
            else
                xferBuffer.clear();
        } else {
            while (xferBuffer.hasRemaining()) {
                try {
                    Thread.sleep(5);
                } catch (InterruptedException e) {
                    LOG.warn(e);
                }
                written = destination.write(xferBuffer);
                if (written > 0) {
                    count += written;
                    if (xferBuffer.hasRemaining())
                        xferBuffer.compact();
                }
            }
            if (!xferBuffer.hasRemaining())
                xferBuffer.clear();
        }
    }
}

From source file:org.apache.hadoop.hdfs.StripedFileTestUtil.java

static void verifyStatefulRead(FileSystem fs, Path srcPath, int fileLength, byte[] expected, ByteBuffer buf)
        throws IOException {
    try (FSDataInputStream in = fs.open(srcPath)) {
        ByteBuffer result = ByteBuffer.allocate(fileLength);
        int readLen = 0;
        int ret;/*from w  w  w  .  j a v a 2  s .c  o  m*/
        while ((ret = in.read(buf)) >= 0) {
            readLen += ret;
            buf.flip();
            result.put(buf);
            buf.clear();
        }
        assertEquals("The length of file should be the same to write size", fileLength, readLen);
        Assert.assertArrayEquals(expected, result.array());
    }
}