Example usage for java.nio ByteBuffer clear

List of usage examples for java.nio ByteBuffer clear

Introduction

In this page you can find the example usage for java.nio ByteBuffer clear.

Prototype

public final Buffer clear() 

Source Link

Document

Clears this buffer.

Usage

From source file:com.mycustomloader.vsamloader.VSAMLoader.java

private void readField(ByteBuffer buf, int fieldID) {
    if (mRequiredColumns == null || (mRequiredColumns.length > fieldID && mRequiredColumns[fieldID])) {
        byte[] bytes = new byte[buf.position()];
        buf.rewind();//w  ww .jav  a2s  . c o m
        buf.get(bytes, 0, bytes.length);
        mProtoTuple.add(new DataByteArray(bytes));
    }
    buf.clear();
}

From source file:org.apache.hadoop.hdfs.server.datanode.BlockSender.java

/**
 * Write packet header into {@code pkt}//from   ww w. java 2s .  com
 */
private void writePacketHeader(ByteBuffer pkt, int dataLen, int packetLen) {
    pkt.clear();
    PacketHeader header = new PacketHeader(packetLen, offset, seqno, (dataLen == 0), dataLen);
    header.putInBuffer(pkt);
}

From source file:com.turn.ttorrent.common.Torrent.java

private static String hashFiles(List<File> files, int pieceLenght)
        throws InterruptedException, IOException, NoSuchAlgorithmException {
    int threads = getHashingThreadsCount();
    ExecutorService executor = Executors.newFixedThreadPool(threads);
    ByteBuffer buffer = ByteBuffer.allocate(pieceLenght);
    List<Future<String>> results = new LinkedList<Future<String>>();
    StringBuilder hashes = new StringBuilder();

    long length = 0L;
    int pieces = 0;

    long start = System.nanoTime();
    for (File file : files) {
        logger.info("Hashing data from {} with {} threads ({} pieces)...", new Object[] { file.getName(),
                threads, (int) (Math.ceil((double) file.length() / pieceLenght)) });

        length += file.length();//  ww w  . j ava 2 s.c o m

        FileInputStream fis = new FileInputStream(file);
        FileChannel channel = fis.getChannel();
        int step = 10;

        try {
            while (channel.read(buffer) > 0) {
                if (buffer.remaining() == 0) {
                    buffer.clear();
                    results.add(executor.submit(new CallableChunkHasher(buffer)));
                }

                if (results.size() >= threads) {
                    pieces += accumulateHashes(hashes, results);
                }

                if (channel.position() / (double) channel.size() * 100f > step) {
                    logger.info("  ... {}% complete", step);
                    step += 10;
                }
            }
        } finally {
            channel.close();
            fis.close();
        }
    }

    // Hash the last bit, if any
    if (buffer.position() > 0) {
        buffer.limit(buffer.position());
        buffer.position(0);
        results.add(executor.submit(new CallableChunkHasher(buffer)));
    }

    pieces += accumulateHashes(hashes, results);

    // Request orderly executor shutdown and wait for hashing tasks to
    // complete.
    executor.shutdown();
    while (!executor.isTerminated()) {
        Thread.sleep(10);
    }
    long elapsed = System.nanoTime() - start;

    int expectedPieces = (int) (Math.ceil((double) length / pieceLenght));
    logger.info("Hashed {} file(s) ({} bytes) in {} pieces ({} expected) in {}ms.", new Object[] { files.size(),
            length, pieces, expectedPieces, String.format("%.1f", elapsed / 1e6), });

    return hashes.toString();
}

From source file:de.tiqsolutions.hdfs.HadoopFileSystemProvider.java

@Override
public void copy(Path source, Path target, CopyOption... options) throws IOException {
    List<CopyOption> optionList = Arrays.asList(options);
    if (!optionList.contains(StandardCopyOption.REPLACE_EXISTING)) {
        if (Files.exists(target))
            throw new java.nio.file.FileAlreadyExistsException(source.toString(), target.toString(),
                    "could not copy file to destination");
    } else {/*from   w w w.ja va  2 s  . c  om*/
        Files.deleteIfExists(target);
    }

    FileSystem sourceFS = source.getFileSystem();
    FileSystem targetFS = target.getFileSystem();

    if (optionList.contains(HadoopCopyOption.REMOTE_COPY) && sourceFS.equals(targetFS)) {

        remoteCopy(source, target, options);
        return;

    }
    try (SeekableByteChannel sourceChannel = sourceFS.provider().newByteChannel(source,
            EnumSet.of(StandardOpenOption.READ))) {

        Set<StandardOpenOption> openOptions = EnumSet.of(StandardOpenOption.WRITE);

        if (optionList.contains(StandardCopyOption.REPLACE_EXISTING))
            openOptions.add(StandardOpenOption.CREATE);
        else
            openOptions.add(StandardOpenOption.CREATE_NEW);
        List<FileAttribute<?>> fileAttributes = new ArrayList<>();
        if (optionList.contains(StandardCopyOption.COPY_ATTRIBUTES)) {

            Set<String> sourceAttrViews = sourceFS.supportedFileAttributeViews();
            Set<String> targetAttrViews = targetFS.supportedFileAttributeViews();
            if (sourceAttrViews.contains(PosixFileAttributeViewImpl.NAME)
                    && targetAttrViews.contains(PosixFileAttributeViewImpl.NAME)) {
                PosixFileAttributes posixAttributes = sourceFS.provider().readAttributes(source,
                        PosixFileAttributes.class);
                fileAttributes.add(PosixFilePermissions.asFileAttribute(posixAttributes.permissions()));
            }

            if (sourceAttrViews.contains(HadoopFileAttributeViewImpl.NAME)
                    && targetAttrViews.contains(HadoopFileAttributeViewImpl.NAME)) {
                final HadoopFileAttributes hdfsAttributes = sourceFS.provider().readAttributes(source,
                        HadoopFileAttributes.class);
                fileAttributes.add(new FileAttribute<Long>() {
                    @Override
                    public String name() {
                        return HadoopFileAttributeViewImpl.NAME + ":blockSize";
                    }

                    @Override
                    public Long value() {
                        return hdfsAttributes.getBlockSize();
                    }
                });
                fileAttributes.add(new FileAttribute<Short>() {
                    @Override
                    public String name() {
                        return HadoopFileAttributeViewImpl.NAME + ":replication";
                    }

                    @Override
                    public Short value() {
                        return hdfsAttributes.getReplication();
                    }
                });

            }
        }

        FileAttribute<?>[] attributes = fileAttributes.toArray(new FileAttribute<?>[fileAttributes.size()]);

        try (SeekableByteChannel targetChannel = targetFS.provider().newByteChannel(target, openOptions,
                attributes)) {
            int buffSize = getConfiguration().getInt(DFSConfigKeys.DFS_STREAM_BUFFER_SIZE_KEY,
                    DFSConfigKeys.DFS_STREAM_BUFFER_SIZE_DEFAULT);
            ByteBuffer buffer = ByteBuffer.allocate(buffSize);
            buffer.clear();
            while (sourceChannel.read(buffer) > 0) {
                buffer.flip();
                targetChannel.write(buffer);
                buffer.clear();
            }

        }
        if (optionList.contains(StandardCopyOption.COPY_ATTRIBUTES)) {
            BasicFileAttributes attrs = sourceFS.provider().readAttributes(source, BasicFileAttributes.class);
            BasicFileAttributeView view = targetFS.provider().getFileAttributeView(target,
                    BasicFileAttributeView.class);
            view.setTimes(attrs.lastModifiedTime(), attrs.lastAccessTime(), attrs.creationTime());

        }

    }

}

From source file:com.twinsoft.convertigo.beans.steps.WriteXMLStep.java

protected void writeFile(String filePath, NodeList nodeList) throws EngineException {
    if (nodeList == null) {
        throw new EngineException("Unable to write to xml file: element is Null");
    }//from  ww w . ja  v a2s. c  o m

    String fullPathName = getAbsoluteFilePath(filePath);
    synchronized (Engine.theApp.filePropertyManager.getMutex(fullPathName)) {
        try {
            String encoding = getEncoding();
            encoding = encoding.length() > 0 && Charset.isSupported(encoding) ? encoding : "UTF-8";
            if (!isReallyAppend(fullPathName)) {
                String tTag = defaultRootTagname.length() > 0 ? StringUtils.normalize(defaultRootTagname)
                        : "document";
                FileUtils.write(new File(fullPathName),
                        "<?xml version=\"1.0\" encoding=\"" + encoding + "\"?>\n<" + tTag + "/>", encoding);
            }

            StringBuffer content = new StringBuffer();

            /* do the content, only append child element */
            for (int i = 0; i < nodeList.getLength(); i++) {
                if (nodeList.item(i).getNodeType() == Node.ELEMENT_NODE) {
                    content.append(XMLUtils.prettyPrintElement((Element) nodeList.item(i), true, true));
                }
            }

            /* detect current xml encoding */
            RandomAccessFile randomAccessFile = null;
            try {
                randomAccessFile = new RandomAccessFile(fullPathName, "rw");
                FileChannel fc = randomAccessFile.getChannel();
                ByteBuffer buf = ByteBuffer.allocate(60);
                int nb = fc.read(buf);
                String sbuf = new String(buf.array(), 0, nb, "ASCII");
                String enc = sbuf.replaceFirst("^.*encoding=\"", "").replaceFirst("\"[\\d\\D]*$", "");

                if (!Charset.isSupported(enc)) {
                    enc = encoding;
                }

                buf.clear();

                /* retrieve last header tag*/
                long pos = fc.size() - buf.capacity();
                if (pos < 0) {
                    pos = 0;
                }

                nb = fc.read(buf, pos);

                boolean isUTF8 = Charset.forName(enc) == Charset.forName("UTF-8");

                if (isUTF8) {
                    for (int i = 0; i < buf.capacity(); i++) {
                        sbuf = new String(buf.array(), i, nb - i, enc);
                        if (!sbuf.startsWith("")) {
                            pos += i;
                            break;
                        }
                    }
                } else {
                    sbuf = new String(buf.array(), 0, nb, enc);
                }

                int lastTagIndex = sbuf.lastIndexOf("</");
                if (lastTagIndex == -1) {
                    int iend = sbuf.lastIndexOf("/>");
                    if (iend != -1) {
                        lastTagIndex = sbuf.lastIndexOf("<", iend);
                        String tagname = sbuf.substring(lastTagIndex + 1, iend);
                        content = new StringBuffer(
                                "<" + tagname + ">\n" + content.toString() + "</" + tagname + ">");
                    } else {
                        throw new EngineException("Malformed XML file");
                    }
                } else {
                    content.append(sbuf.substring(lastTagIndex));

                    if (isUTF8) {
                        String before = sbuf.substring(0, lastTagIndex);
                        lastTagIndex = before.getBytes(enc).length;
                    }
                }
                fc.write(ByteBuffer.wrap(content.toString().getBytes(enc)), pos + lastTagIndex);
            } finally {
                if (randomAccessFile != null) {
                    randomAccessFile.close();
                }
            }
        } catch (IOException e) {
            throw new EngineException("Unable to write to xml file", e);
        } finally {
            Engine.theApp.filePropertyManager.releaseMutex(fullPathName);
        }
    }
}

From source file:org.gephi.desktop.importer.DesktopImportControllerUI.java

/**
 * Uncompress a GZIP file.//from w w  w  . j av  a  2s  .c  om
 */
private static File getGzFile(FileObject in, File out, boolean isTar) throws IOException {

    // Stream buffer
    final int BUFF_SIZE = 8192;
    final byte[] buffer = new byte[BUFF_SIZE];

    GZIPInputStream inputStream = null;
    FileOutputStream outStream = null;

    try {
        inputStream = new GZIPInputStream(new FileInputStream(in.getPath()));
        outStream = new FileOutputStream(out);

        if (isTar) {
            // Read Tar header
            int remainingBytes = readTarHeader(inputStream);

            // Read content
            ByteBuffer bb = ByteBuffer.allocateDirect(4 * BUFF_SIZE);
            byte[] tmpCache = new byte[BUFF_SIZE];
            int nRead, nGet;
            while ((nRead = inputStream.read(tmpCache)) != -1) {
                if (nRead == 0) {
                    continue;
                }
                bb.put(tmpCache);
                bb.position(0);
                bb.limit(nRead);
                while (bb.hasRemaining() && remainingBytes > 0) {
                    nGet = Math.min(bb.remaining(), BUFF_SIZE);
                    nGet = Math.min(nGet, remainingBytes);
                    bb.get(buffer, 0, nGet);
                    outStream.write(buffer, 0, nGet);
                    remainingBytes -= nGet;
                }
                bb.clear();
            }
        } else {
            int len;
            while ((len = inputStream.read(buffer)) > 0) {
                outStream.write(buffer, 0, len);
            }
        }
    } catch (IOException ex) {
        Exceptions.printStackTrace(ex);
    } finally {
        if (inputStream != null) {
            inputStream.close();
        }
        if (outStream != null) {
            outStream.close();
        }
    }

    return out;
}

From source file:org.gephi.desktop.importer.DesktopImportControllerUI.java

/**
 * Uncompress a Bzip2 file.//from  w ww.j  a v a2s.co m
 */
private static File getBzipFile(FileObject in, File out, boolean isTar) throws IOException {

    // Stream buffer
    final int BUFF_SIZE = 8192;
    final byte[] buffer = new byte[BUFF_SIZE];

    BZip2CompressorInputStream inputStream = null;
    FileOutputStream outStream = null;

    try {
        FileInputStream is = new FileInputStream(in.getPath());
        inputStream = new BZip2CompressorInputStream(is);
        outStream = new FileOutputStream(out.getAbsolutePath());

        if (isTar) {
            // Read Tar header
            int remainingBytes = readTarHeader(inputStream);

            // Read content
            ByteBuffer bb = ByteBuffer.allocateDirect(4 * BUFF_SIZE);
            byte[] tmpCache = new byte[BUFF_SIZE];
            int nRead, nGet;
            while ((nRead = inputStream.read(tmpCache)) != -1) {
                if (nRead == 0) {
                    continue;
                }
                bb.put(tmpCache);
                bb.position(0);
                bb.limit(nRead);
                while (bb.hasRemaining() && remainingBytes > 0) {
                    nGet = Math.min(bb.remaining(), BUFF_SIZE);
                    nGet = Math.min(nGet, remainingBytes);
                    bb.get(buffer, 0, nGet);
                    outStream.write(buffer, 0, nGet);
                    remainingBytes -= nGet;
                }
                bb.clear();
            }
        } else {
            int len;
            while ((len = inputStream.read(buffer)) > 0) {
                outStream.write(buffer, 0, len);
            }
        }
    } catch (IOException ex) {
        Exceptions.printStackTrace(ex);
    } finally {
        if (inputStream != null) {
            inputStream.close();
        }
        if (outStream != null) {
            outStream.close();
        }
    }

    return out;
}

From source file:org.apache.http.HC4.impl.nio.client.MainClientExec.java

@Override
public void consumeContent(final InternalState state, final ContentDecoder decoder, final IOControl ioctrl)
        throws IOException {
    if (this.log.isDebugEnabled()) {
        this.log.debug("[exchange: " + state.getId() + "] Consume content");
    }//w w  w. j  a  va2 s  .c o m
    if (state.getFinalResponse() != null) {
        final HttpAsyncResponseConsumer<?> responseConsumer = state.getResponseConsumer();
        responseConsumer.consumeContent(decoder, ioctrl);
    } else {
        final ByteBuffer tmpbuf = state.getTmpbuf();
        tmpbuf.clear();
        decoder.read(tmpbuf);
    }
}

From source file:com.bittorrent.mpetazzoni.common.Torrent.java

private static String hashFiles(List<File> files) throws InterruptedException, IOException {
    int threads = getHashingThreadsCount();
    ExecutorService executor = Executors.newFixedThreadPool(threads);
    ByteBuffer buffer = ByteBuffer.allocate(Torrent.PIECE_LENGTH);
    List<Future<String>> results = new LinkedList<Future<String>>();
    StringBuilder hashes = new StringBuilder();

    long length = 0L;
    int pieces = 0;

    long start = System.nanoTime();
    for (File file : files) {
        logger.info("Hashing data from {} with {} threads ({} pieces)...", new Object[] { file.getName(),
                threads, (int) (Math.ceil((double) file.length() / Torrent.PIECE_LENGTH)) });

        length += file.length();//from w  w  w  .  ja va 2 s  .  c  o m

        FileInputStream fis = new FileInputStream(file);
        FileChannel channel = fis.getChannel();
        int step = 10;

        try {
            while (channel.read(buffer) > 0) {
                if (buffer.remaining() == 0) {
                    buffer.clear();
                    results.add(executor.submit(new CallableChunkHasher(buffer)));
                }

                if (results.size() >= threads) {
                    pieces += accumulateHashes(hashes, results);
                }

                if (channel.position() / (double) channel.size() * 100f > step) {
                    logger.info("  ... {}% complete", step);
                    step += 10;
                }
            }
        } finally {
            channel.close();
            fis.close();
        }
    }

    // Hash the last bit, if any
    if (buffer.position() > 0) {
        buffer.limit(buffer.position());
        buffer.position(0);
        results.add(executor.submit(new CallableChunkHasher(buffer)));
    }

    pieces += accumulateHashes(hashes, results);

    // Request orderly executor shutdown and wait for hashing tasks to
    // complete.
    executor.shutdown();
    while (!executor.isTerminated()) {
        Thread.sleep(10);
    }
    long elapsed = System.nanoTime() - start;

    int expectedPieces = (int) (Math.ceil((double) length / Torrent.PIECE_LENGTH));
    logger.info("Hashed {} file(s) ({} bytes) in {} pieces ({} expected) in {}ms.", new Object[] { files.size(),
            length, pieces, expectedPieces, String.format("%.1f", elapsed / 1e6), });

    return hashes.toString();
}

From source file:org.darkware.wpman.security.ChecksumDatabase.java

/**
 * Perform a checksum calculation on the given {@link ReadableByteChannel}. Other code should not create
 * implementations which are dependant on any particular characteristics of the checksum, but the checksum
 * is very likely to be based on a cryptographic-strength hash. The results of the checksum are encoded as
 * a base64 {@code String}./* w  ww.  ja va2  s .c o m*/
 *
 * @param channel The {@code ReadableByteChannel} to read data from.
 * @return A Base64 encoded {@code String} representing the checksum.
 * @throws IOException If there was an error while reading data from the channel.
 * @see Base64#encodeBase64String(byte[])
 */
protected String doChecksum(ReadableByteChannel channel) throws IOException {
    Hasher hasher = Hashing.sha256().newHasher();

    final ByteBuffer block = ByteBuffer.allocate(4096);
    while (channel.isOpen()) {
        int bytesRead = channel.read(block);
        if (bytesRead > 0) {
            block.flip();
            hasher.putBytes(block.array(), 0, block.limit());
            block.clear();
        } else if (bytesRead == -1) {
            channel.close();
        }
    }

    return Base64.encodeBase64String(hasher.hash().asBytes());
}