Example usage for java.nio.channels FileChannel read

List of usage examples for java.nio.channels FileChannel read

Introduction

In this page you can find the example usage for java.nio.channels FileChannel read.

Prototype

public final long read(ByteBuffer[] dsts) throws IOException 

Source Link

Document

Reads a sequence of bytes from this channel into the given buffers.

Usage

From source file:Main.java

public static long getCommentLength(final FileChannel fileChannel) throws IOException {
    // End of central directory record (EOCD)
    // Offset    Bytes     Description[23]
    // 0           4       End of central directory signature = 0x06054b50
    // 4           2       Number of this disk
    // 6           2       Disk where central directory starts
    // 8           2       Number of central directory records on this disk
    // 10          2       Total number of central directory records
    // 12          4       Size of central directory (bytes)
    // 16          4       Offset of start of central directory, relative to start of archive
    // 20          2       Comment length (n)
    // 22          n       Comment
    // For a zip with no archive comment, the
    // end-of-central-directory record will be 22 bytes long, so
    // we expect to find the EOCD marker 22 bytes from the end.

    final long archiveSize = fileChannel.size();
    if (archiveSize < ZIP_EOCD_REC_MIN_SIZE) {
        throw new IOException("APK too small for ZIP End of Central Directory (EOCD) record");
    }//ww  w  .  j a va 2 s .c om
    // ZIP End of Central Directory (EOCD) record is located at the very end of the ZIP archive.
    // The record can be identified by its 4-byte signature/magic which is located at the very
    // beginning of the record. A complication is that the record is variable-length because of
    // the comment field.
    // The algorithm for locating the ZIP EOCD record is as follows. We search backwards from
    // end of the buffer for the EOCD record signature. Whenever we find a signature, we check
    // the candidate record's comment length is such that the remainder of the record takes up
    // exactly the remaining bytes in the buffer. The search is bounded because the maximum
    // size of the comment field is 65535 bytes because the field is an unsigned 16-bit number.
    final long maxCommentLength = Math.min(archiveSize - ZIP_EOCD_REC_MIN_SIZE, UINT16_MAX_VALUE);
    final long eocdWithEmptyCommentStartPosition = archiveSize - ZIP_EOCD_REC_MIN_SIZE;
    for (int expectedCommentLength = 0; expectedCommentLength <= maxCommentLength; expectedCommentLength++) {
        final long eocdStartPos = eocdWithEmptyCommentStartPosition - expectedCommentLength;

        final ByteBuffer byteBuffer = ByteBuffer.allocate(4);
        fileChannel.position(eocdStartPos);
        fileChannel.read(byteBuffer);
        byteBuffer.order(ByteOrder.LITTLE_ENDIAN);

        if (byteBuffer.getInt(0) == ZIP_EOCD_REC_SIG) {
            final ByteBuffer commentLengthByteBuffer = ByteBuffer.allocate(2);
            fileChannel.position(eocdStartPos + ZIP_EOCD_COMMENT_LENGTH_FIELD_OFFSET);
            fileChannel.read(commentLengthByteBuffer);
            commentLengthByteBuffer.order(ByteOrder.LITTLE_ENDIAN);

            final int actualCommentLength = commentLengthByteBuffer.getShort(0);
            if (actualCommentLength == expectedCommentLength) {
                return actualCommentLength;
            }
        }
    }
    throw new IOException("ZIP End of Central Directory (EOCD) record not found");
}

From source file:eu.stratosphere.nephele.services.iomanager.IOManagerPerformanceBenchmark.java

@SuppressWarnings("resource")
private final void speedTestNIO(int bufferSize, boolean direct) throws IOException {
    final Channel.ID tmpChannel = ioManager.createChannel();

    File tempFile = null;/* w w  w  .  j  a  va2 s. c  om*/
    FileChannel fs = null;

    try {
        tempFile = new File(tmpChannel.getPath());

        RandomAccessFile raf = new RandomAccessFile(tempFile, "rw");
        fs = raf.getChannel();

        ByteBuffer buf = direct ? ByteBuffer.allocateDirect(bufferSize) : ByteBuffer.allocate(bufferSize);

        long writeStart = System.currentTimeMillis();

        int valsLeft = NUM_INTS_WRITTEN;
        while (valsLeft-- > 0) {
            if (buf.remaining() < 4) {
                buf.flip();
                fs.write(buf);
                buf.clear();
            }
            buf.putInt(valsLeft);
        }

        if (buf.position() > 0) {
            buf.flip();
            fs.write(buf);
        }

        fs.close();
        raf.close();
        fs = null;

        long writeElapsed = System.currentTimeMillis() - writeStart;

        // ----------------------------------------------------------------

        raf = new RandomAccessFile(tempFile, "r");
        fs = raf.getChannel();
        buf.clear();

        long readStart = System.currentTimeMillis();

        fs.read(buf);
        buf.flip();

        valsLeft = NUM_INTS_WRITTEN;
        while (valsLeft-- > 0) {
            if (buf.remaining() < 4) {
                buf.compact();
                fs.read(buf);
                buf.flip();
            }
            if (buf.getInt() != valsLeft) {
                throw new IOException();
            }
        }

        fs.close();
        raf.close();

        long readElapsed = System.currentTimeMillis() - readStart;

        LOG.info("NIO Channel with buffer " + bufferSize + ": write " + writeElapsed + " msecs, read "
                + readElapsed + " msecs.");
    } finally {
        // close if possible
        if (fs != null) {
            fs.close();
            fs = null;
        }
        // try to delete the file
        if (tempFile != null) {
            tempFile.delete();
        }
    }
}

From source file:com.clustercontrol.agent.job.PublicKeyThread.java

/**
 *  ?Authorized_key????<BR>/*from w w w.  j  a  v a 2s  .c o m*/
 * 
 * @param publicKey
 * @return true : ?false:
 */
private synchronized boolean deleteKey(String publicKey) {
    m_log.debug("delete key start");

    if (SKIP_KEYFILE_UPDATE) {
        m_log.info("skipped deleting publicKey");
        return true;
    }

    Charset charset = Charset.forName("UTF-8");
    CharsetEncoder encoder = charset.newEncoder();
    CharsetDecoder decoder = charset.newDecoder();

    //???
    String fileName = AgentProperties.getProperty(execUser.toLowerCase() + AUTHORIZED_KEY_PATH);
    if (fileName == null || fileName.length() == 0)
        return false;

    //File?
    File fi = new File(fileName);

    RandomAccessFile randomAccessFile = null;
    FileChannel channel = null;
    FileLock lock = null;
    boolean delete = false;
    try {
        //RandomAccessFile?
        randomAccessFile = new RandomAccessFile(fi, "rw");
        //FileChannel?
        channel = randomAccessFile.getChannel();

        // 
        for (int i = 0; i < (FILELOCK_TIMEOUT / FILELOCK_WAIT); i++) {
            if (null != (lock = channel.tryLock())) {
                break;
            }
            m_log.info("waiting for locked file... [" + (i + 1) + "/" + (FILELOCK_TIMEOUT / FILELOCK_WAIT)
                    + " : " + fileName + "]");
            Thread.sleep(FILELOCK_WAIT);
        }
        if (null == lock) {
            m_log.warn("file locking timeout.");
            return false;
        }

        // (?)
        synchronized (authKeyLock) {
            //??
            ByteBuffer buffer = ByteBuffer.allocate((int) channel.size());

            //??
            channel.read(buffer);

            // ???????????0?
            buffer.flip();

            //??
            String contents = decoder.decode(buffer).toString();

            // ?
            m_log.debug("contents " + contents.length() + " : " + contents);

            //??
            List<String> keyCheck = new ArrayList<String>();
            StringTokenizer tokenizer = new StringTokenizer(contents, "\n");
            while (tokenizer.hasMoreTokens()) {
                keyCheck.add(tokenizer.nextToken());
            }

            //??????
            int s = keyCheck.lastIndexOf(publicKey);
            if (s != -1) {
                // ?
                m_log.debug("remobe key : " + keyCheck.get(s));
                keyCheck.remove(s);
            }

            //?????
            encoder.reset();
            buffer.clear();

            int i;
            if (keyCheck.size() > 0) {
                for (i = 0; i < keyCheck.size() - 1; i++) {
                    encoder.encode(CharBuffer.wrap(keyCheck.get(i) + "\n"), buffer, false);
                }
                encoder.encode(CharBuffer.wrap(keyCheck.get(i)), buffer, true);
            }

            //???
            buffer.flip();
            channel.truncate(0);
            channel.position(0);
            channel.write(buffer);
        }

        delete = true;
    } catch (IOException e) {
        m_log.error(e.getMessage(), e);
    } catch (RuntimeException e) {
        m_log.error(e.getMessage(), e);
    } catch (InterruptedException e) {
        m_log.error(e.getMessage(), e);
    } finally {
        try {
            if (channel != null) {
                channel.close();
            }
            if (randomAccessFile != null) {
                randomAccessFile.close();
            }
            //?
            if (lock != null) {
                lock.release();
            }
        } catch (Exception e) {
        }
    }

    return delete;
}

From source file:com.servoy.j2db.util.Utils.java

public static String getTXTFileContent(File f, Charset charset) {
    if (f != null /* && f.exists() */) {
        if (Thread.currentThread().isInterrupted()) {
            Thread.interrupted(); // reset interrupted flag of current thread, FileChannel.read() will throw an exception for it.
        }/* w ww. jav  a  2  s. com*/
        FileInputStream fis = null;
        try {
            int length = (int) f.length();
            if (f.exists()) {
                fis = new FileInputStream(f);
                FileChannel fc = fis.getChannel();
                ByteBuffer bb = ByteBuffer.allocate(length);
                fc.read(bb);
                bb.rewind();
                CharBuffer cb = charset.decode(bb);
                return cb.toString();
            }
        } catch (Exception e) {
            Debug.error("Error reading txt file: " + f, e); //$NON-NLS-1$
        } finally {
            closeInputStream(fis);
        }
    }
    return null;
}

From source file:com.servoy.j2db.util.Utils.java

public static byte[] readFile(File f, long size) {
    if (f != null && f.exists()) {

        FileInputStream fis = null;
        try {//from   w  w w  .j a va2  s. co m
            int length = (int) f.length();
            fis = new FileInputStream(f);
            FileChannel fc = fis.getChannel();
            if (size > length || size < 0)
                size = length;
            ByteBuffer bb = ByteBuffer.allocate((int) size);
            fc.read(bb);
            bb.rewind();
            byte[] bytes = null;
            if (bb.hasArray()) {
                bytes = bb.array();
            } else {
                bytes = new byte[(int) size];
                bb.get(bytes, 0, (int) size);
            }
            return bytes;
        } catch (Exception e) {
            Debug.error("Error reading file: " + f, e); //$NON-NLS-1$
        } finally {
            try {
                if (fis != null)
                    fis.close();
            } catch (Exception ex) {
            }
        }

        //         ByteArrayOutputStream sb = new ByteArrayOutputStream();
        //         try
        //         {
        //            FileInputStream is = new FileInputStream(f);
        //            BufferedInputStream bis = new BufferedInputStream(is);
        //            streamCopy(bis, sb);
        //            closeInputStream(bis);
        //         }
        //         catch (Exception e)
        //         {
        //            Debug.error(e);
        //         }
        //         return sb.toByteArray();
    }
    return null;
}

From source file:org.apache.nifi.processors.standard.TailFile.java

/**
 * Read new lines from the given FileChannel, copying it to the given Output
 * Stream. The Checksum is used in order to later determine whether or not
 * data has been consumed./*  w ww. ja  v  a2  s. c o m*/
 *
 * @param reader The FileChannel to read data from
 * @param buffer the buffer to use for copying data
 * @param out the OutputStream to copy the data to
 * @param checksum the Checksum object to use in order to calculate checksum
 * for recovery purposes
 *
 * @return The new position after the lines have been read
 * @throws java.io.IOException if an I/O error occurs.
 */
private long readLines(final FileChannel reader, final ByteBuffer buffer, final OutputStream out,
        final Checksum checksum) throws IOException {
    getLogger().debug("Reading lines starting at position {}", new Object[] { reader.position() });

    try (final ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
        long pos = reader.position();
        long rePos = pos; // position to re-read

        int num;
        int linesRead = 0;
        boolean seenCR = false;
        buffer.clear();

        while (((num = reader.read(buffer)) != -1)) {
            buffer.flip();

            for (int i = 0; i < num; i++) {
                byte ch = buffer.get(i);

                switch (ch) {
                case '\n': {
                    baos.write(ch);
                    seenCR = false;
                    baos.writeTo(out);
                    final byte[] baosBuffer = baos.toByteArray();
                    checksum.update(baosBuffer, 0, baos.size());
                    if (getLogger().isTraceEnabled()) {
                        getLogger().trace("Checksum updated to {}", new Object[] { checksum.getValue() });
                    }

                    baos.reset();
                    rePos = pos + i + 1;
                    linesRead++;
                    break;
                }
                case '\r': {
                    baos.write(ch);
                    seenCR = true;
                    break;
                }
                default: {
                    if (seenCR) {
                        seenCR = false;
                        baos.writeTo(out);
                        final byte[] baosBuffer = baos.toByteArray();
                        checksum.update(baosBuffer, 0, baos.size());
                        if (getLogger().isTraceEnabled()) {
                            getLogger().trace("Checksum updated to {}", new Object[] { checksum.getValue() });
                        }

                        linesRead++;
                        baos.reset();
                        baos.write(ch);
                        rePos = pos + i;
                    } else {
                        baos.write(ch);
                    }
                }
                }
            }

            pos = reader.position();
        }

        if (rePos < reader.position()) {
            getLogger().debug("Read {} lines; repositioning reader from {} to {}",
                    new Object[] { linesRead, pos, rePos });
            reader.position(rePos); // Ensure we can re-read if necessary
        }

        return rePos;
    }
}

From source file:edu.harvard.iq.dataverse.dataaccess.TabularSubsetGenerator.java

public Object[] subsetObjectVector(File tabfile, int column, int varcount, int casecount, int columntype,
        boolean compatmode) throws IOException {

    Object[] retVector = null;/*  w w  w . j av a 2s . co m*/

    boolean isString = false;
    boolean isDouble = false;
    boolean isLong = false;
    boolean isFloat = false;

    //Locale loc = new Locale("en", "US");

    if (columntype == COLUMN_TYPE_STRING) {
        isString = true;
        retVector = new String[casecount];
    } else if (columntype == COLUMN_TYPE_DOUBLE) {
        isDouble = true;
        retVector = new Double[casecount];
    } else if (columntype == COLUMN_TYPE_LONG) {
        isLong = true;
        retVector = new Long[casecount];
    } else if (columntype == COLUMN_TYPE_FLOAT) {
        isFloat = true;
        retVector = new Float[casecount];
    } else {
        throw new IOException("Unsupported column type: " + columntype);
    }

    File rotatedImageFile = getRotatedImage(tabfile, varcount, casecount);
    long[] columnEndOffsets = extractColumnOffsets(rotatedImageFile, varcount, casecount);
    long columnOffset = 0;
    long columnLength = 0;

    if (column > 0) {
        columnOffset = columnEndOffsets[column - 1];
        columnLength = columnEndOffsets[column] - columnEndOffsets[column - 1];
    } else {
        columnOffset = varcount * 8;
        columnLength = columnEndOffsets[0] - varcount * 8;
    }

    FileChannel fc = (FileChannel.open(Paths.get(rotatedImageFile.getAbsolutePath()), StandardOpenOption.READ));
    fc.position(columnOffset);
    int MAX_COLUMN_BUFFER = 8192;

    ByteBuffer in = ByteBuffer.allocate(MAX_COLUMN_BUFFER);

    if (columnLength < MAX_COLUMN_BUFFER) {
        in.limit((int) (columnLength));
    }

    long bytesRead = 0;
    long bytesReadTotal = 0;
    int caseindex = 0;
    int byteoffset = 0;
    byte[] leftover = null;

    while (bytesReadTotal < columnLength) {
        bytesRead = fc.read(in);
        byte[] columnBytes = in.array();
        int bytecount = 0;

        while (bytecount < bytesRead) {
            if (columnBytes[bytecount] == '\n') {
                /*
                String token = new String(columnBytes, byteoffset, bytecount-byteoffset, "UTF8");
                        
                if (leftover != null) {
                String leftoverString = new String (leftover, "UTF8");
                token = leftoverString + token;
                leftover = null;
                }
                */
                /* 
                 * Note that the way I was doing it at first - above - 
                 * was not quite the correct way - because I was creating UTF8
                 * strings from the leftover bytes, and the bytes in the 
                 * current buffer *separately*; which means, if a multi-byte
                 * UTF8 character got split in the middle between one buffer
                 * and the next, both chunks of it would become junk 
                 * characters, on each side!
                 * The correct way of doing it, of course, is to create a
                 * merged byte buffer, and then turn it into a UTF8 string. 
                 *      -- L.A. 4.0
                 */
                String token = null;

                if (leftover == null) {
                    token = new String(columnBytes, byteoffset, bytecount - byteoffset, "UTF8");
                } else {
                    byte[] merged = new byte[leftover.length + bytecount - byteoffset];

                    System.arraycopy(leftover, 0, merged, 0, leftover.length);
                    System.arraycopy(columnBytes, byteoffset, merged, leftover.length, bytecount - byteoffset);
                    token = new String(merged, "UTF8");
                    leftover = null;
                    merged = null;
                }

                if (isString) {
                    if ("".equals(token)) {
                        // An empty string is a string missing value!
                        // An empty string in quotes is an empty string!
                        retVector[caseindex] = null;
                    } else {
                        // Strip the outer quotes:
                        token = token.replaceFirst("^\\\"", "");
                        token = token.replaceFirst("\\\"$", "");

                        // We need to restore the special characters that 
                        // are stored in tab files escaped - quotes, new lines 
                        // and tabs. Before we do that however, we need to 
                        // take care of any escaped backslashes stored in 
                        // the tab file. I.e., "foo\t" should be transformed 
                        // to "foo<TAB>"; but "foo\\t" should be transformed 
                        // to "foo\t". This way new lines and tabs that were
                        // already escaped in the original data are not 
                        // going to be transformed to unescaped tab and 
                        // new line characters!

                        String[] splitTokens = token.split(Matcher.quoteReplacement("\\\\"), -2);

                        // (note that it's important to use the 2-argument version 
                        // of String.split(), and set the limit argument to a
                        // negative value; otherwise any trailing backslashes 
                        // are lost.)

                        for (int i = 0; i < splitTokens.length; i++) {
                            splitTokens[i] = splitTokens[i].replaceAll(Matcher.quoteReplacement("\\\""), "\"");
                            splitTokens[i] = splitTokens[i].replaceAll(Matcher.quoteReplacement("\\t"), "\t");
                            splitTokens[i] = splitTokens[i].replaceAll(Matcher.quoteReplacement("\\n"), "\n");
                            splitTokens[i] = splitTokens[i].replaceAll(Matcher.quoteReplacement("\\r"), "\r");
                        }
                        // TODO: 
                        // Make (some of?) the above optional; for ex., we 
                        // do need to restore the newlines when calculating UNFs;
                        // But if we are subsetting these vectors in order to 
                        // create a new tab-delimited file, they will 
                        // actually break things! -- L.A. Jul. 28 2014

                        token = StringUtils.join(splitTokens, '\\');

                        // "compatibility mode" - a hack, to be able to produce
                        // unfs identical to those produced by the "early" 
                        // unf5 jar; will be removed in production 4.0. 
                        // -- L.A. (TODO: ...)
                        if (compatmode && !"".equals(token)) {
                            if (token.length() > 128) {
                                if ("".equals(token.trim())) {
                                    // don't ask... 
                                    token = token.substring(0, 129);
                                } else {
                                    token = token.substring(0, 128);
                                    //token = String.format(loc, "%.128s", token);
                                    token = token.trim();
                                    //dbgLog.info("formatted and trimmed: "+token);
                                }
                            } else {
                                if ("".equals(token.trim())) {
                                    // again, don't ask; 
                                    // - this replicates some bugginness 
                                    // that happens inside unf5;
                                    token = "null";
                                } else {
                                    token = token.trim();
                                }
                            }
                        }

                        retVector[caseindex] = token;
                    }
                } else if (isDouble) {
                    try {
                        // TODO: verify that NaN and +-Inf are 
                        // handled correctly here! -- L.A.
                        // Verified: new Double("nan") works correctly, 
                        // resulting in Double.NaN;
                        // Double("[+-]Inf") doesn't work however; 
                        // (the constructor appears to be expecting it
                        // to be spelled as "Infinity", "-Infinity", etc. 
                        if ("inf".equalsIgnoreCase(token) || "+inf".equalsIgnoreCase(token)) {
                            retVector[caseindex] = java.lang.Double.POSITIVE_INFINITY;
                        } else if ("-inf".equalsIgnoreCase(token)) {
                            retVector[caseindex] = java.lang.Double.NEGATIVE_INFINITY;
                        } else if (token == null || token.equals("")) {
                            // missing value:
                            retVector[caseindex] = null;
                        } else {
                            retVector[caseindex] = new Double(token);
                        }
                    } catch (NumberFormatException ex) {
                        dbgLog.warning("NumberFormatException thrown for " + token + " as Double");

                        retVector[caseindex] = null; // missing value
                        // TODO: ?
                    }
                } else if (isLong) {
                    try {
                        retVector[caseindex] = new Long(token);
                    } catch (NumberFormatException ex) {
                        retVector[caseindex] = null; // assume missing value
                    }
                } else if (isFloat) {
                    try {
                        if ("inf".equalsIgnoreCase(token) || "+inf".equalsIgnoreCase(token)) {
                            retVector[caseindex] = java.lang.Float.POSITIVE_INFINITY;
                        } else if ("-inf".equalsIgnoreCase(token)) {
                            retVector[caseindex] = java.lang.Float.NEGATIVE_INFINITY;
                        } else if (token == null || token.equals("")) {
                            // missing value:
                            retVector[caseindex] = null;
                        } else {
                            retVector[caseindex] = new Float(token);
                        }
                    } catch (NumberFormatException ex) {
                        dbgLog.warning("NumberFormatException thrown for " + token + " as Float");
                        retVector[caseindex] = null; // assume missing value (TODO: ?)
                    }
                }
                caseindex++;

                if (bytecount == bytesRead - 1) {
                    byteoffset = 0;
                } else {
                    byteoffset = bytecount + 1;
                }
            } else {
                if (bytecount == bytesRead - 1) {
                    // We've reached the end of the buffer; 
                    // This means we'll save whatever unused bytes left in 
                    // it - i.e., the bytes between the last new line 
                    // encountered and the end - in the leftover buffer. 

                    // *EXCEPT*, there may be a case of a very long String
                    // that is actually longer than MAX_COLUMN_BUFFER, in 
                    // which case it is possible that we've read through
                    // an entire buffer of bytes without finding any 
                    // new lines... in this case we may need to add this
                    // entire byte buffer to an already existing leftover 
                    // buffer!
                    if (leftover == null) {
                        leftover = new byte[(int) bytesRead - byteoffset];
                        System.arraycopy(columnBytes, byteoffset, leftover, 0, (int) bytesRead - byteoffset);
                    } else {
                        if (byteoffset != 0) {
                            throw new IOException(
                                    "Reached the end of the byte buffer, with some leftover left from the last read; yet the offset is not zero!");
                        }
                        byte[] merged = new byte[leftover.length + (int) bytesRead];

                        System.arraycopy(leftover, 0, merged, 0, leftover.length);
                        System.arraycopy(columnBytes, byteoffset, merged, leftover.length, (int) bytesRead);
                        //leftover = null;
                        leftover = merged;
                        merged = null;
                    }
                    byteoffset = 0;

                }
            }
            bytecount++;
        }

        bytesReadTotal += bytesRead;
        in.clear();
        if (columnLength - bytesReadTotal < MAX_COLUMN_BUFFER) {
            in.limit((int) (columnLength - bytesReadTotal));
        }
    }

    fc.close();

    if (caseindex != casecount) {
        throw new IOException("Faile to read " + casecount + " tokens for column " + column);
        //System.out.println("read "+caseindex+" tokens instead of expected "+casecount+".");
    }

    return retVector;
}

From source file:au.org.theark.core.service.ArkCommonServiceImpl.java

/**
 * {@inheritDoc}/*w  ww  .  java  2s . c  o  m*/
 */
public void copyArkLargeFileAttachments(String sourceFilePath, String destinationFilePath) throws IOException {
    FileChannel source = null;
    FileChannel destination = null;

    try {
        source = new FileInputStream(new File(sourceFilePath)).getChannel();
        destination = new FileOutputStream(new File(destinationFilePath)).getChannel();

        // This fails with Map Failed exception on large files
        // destination.transferFrom(source, 0, source.size());

        ByteBuffer buf = ByteBuffer.allocateDirect(DEFAULT_BUFFER_SIZE);
        while ((source.read(buf)) != -1) {
            buf.flip();
            destination.write(buf);
            buf.clear();
        }
    } finally {
        if (source != null) {
            source.close();
        }
        if (destination != null) {
            destination.close();
        }
    }
}

From source file:MyZone.Settings.java

public byte[] readXML(String filename) {
    byte[] readIn = null;
    FileChannel channel = null;
    FileLock lock = null;/*from   w  w  w  .  jav a  2s  . com*/
    FileInputStream fis = null;
    ByteArrayOutputStream baos = null;
    try {
        File file = new File(filename);
        if (!file.exists()) {
            return null;
        }
        fis = new FileInputStream(file);
        channel = fis.getChannel();
        while ((lock = channel.tryLock(0L, Long.MAX_VALUE, true)) == null) {
            Thread.yield();
        }
        baos = new ByteArrayOutputStream();
        byte[] b = new byte[1024];
        ByteBuffer buf = ByteBuffer.wrap(b);
        int count = 0;
        long fileLength = file.length();
        while (fileLength > 0) {
            count = channel.read(buf);
            if (count >= 0) {
                fileLength -= count;
                baos.write(b, 0, count);
                buf.rewind();
            }
        }
        readIn = baos.toByteArray();
    } catch (Exception e) {
        if (DEBUG) {
            e.printStackTrace();
        }
        readIn = null;
    } finally {
        try {
            if (lock != null) {
                lock.release();
            }
            if (channel != null) {
                channel.close();
            }
            if (fis != null) {
                fis.close();
            }
            if (baos != null) {
                baos.close();
            }
        } catch (Exception e) {
            if (DEBUG) {
                e.printStackTrace();
            }
            readIn = null;
        }
    }
    return readIn;
}

From source file:com.MainFiles.Functions.java

public int createStan() {
    int x = 0;//from   w  w  w  . j a va2s . c o  m

    String filename = COUNT_FILE;
    File inwrite = new File(filename);

    // Get a file channel for the file
    try {
        FileChannel channel = new RandomAccessFile(inwrite, "rw").getChannel();
        // Use the file channel to create a lock on the file.
        // This method blocks until it can retrieve the lock.
        FileLock lock = channel.lock();
        //  if(!inwrite.exists()) {
        String s = "";
        try {
            int fileSize = (int) channel.size();
            //    System.out.println("int is" + fileSize);
            ByteBuffer bafa = ByteBuffer.allocate(fileSize);
            int numRead = 0;
            while (numRead >= 0) {
                numRead = channel.read(bafa);
                bafa.rewind();
                for (int i = 0; i < numRead; i++) {
                    int b = (int) bafa.get();
                    char c = (char) b;
                    s = s + c;
                }
            }

            x = Integer.parseInt(s);
            if (x > 999999) {
                x = 100000;
            } else if (x < 100000) {
                x = 100000;
            }
            x = ++x;
            String xx = String.valueOf(x);
            byte[] yy = xx.getBytes();
            channel.truncate(0);
            channel.write(ByteBuffer.wrap(yy));
            // channel.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
        lock.release();
        // Close the file
        channel.close();
    } catch (FileNotFoundException e) {
        String message = "The file " + inwrite.getName() + " does not exist. So no input can be written on it";
        System.out.println(message);
        e.printStackTrace();
        //log to error file
    } catch (IOException e) {
        System.out.println("Problem writing to the logfile " + inwrite.getName());

    }

    filename = "";
    return x;
}