Example usage for java.nio.channels FileChannel size

List of usage examples for java.nio.channels FileChannel size

Introduction

In this page you can find the example usage for java.nio.channels FileChannel size.

Prototype

public abstract long size() throws IOException;

Source Link

Document

Returns the current size of this channel's file.

Usage

From source file:org.dataconservancy.dcs.access.server.TransformerServiceImpl.java

@Override
public String fgdcToHtml(String inputUrl, String format) {

    if (format.contains("fgdc")) {
        TransformerFactory factory = TransformerFactory.newInstance();
        Source xslt = new StreamSource(new File(homeDir + "queryFgdcResult.xsl"));
        Transformer transformer;/*from   ww w.  j  av a  2s.c  o m*/
        try {
            transformer = factory.newTransformer(xslt);
            String inputPath = homeDir + UUID.randomUUID().toString() + "fgdcinput.xml";
            saveUrl(inputPath, inputUrl);
            Source text = new StreamSource(new File(inputPath));
            String outputPath = homeDir + UUID.randomUUID().toString() + "fgdcoutput.html";
            File outputFile = new File(outputPath);
            transformer.transform(text, new StreamResult(outputFile));
            FileInputStream stream = new FileInputStream(new File(outputPath));
            try {
                FileChannel fc = stream.getChannel();
                MappedByteBuffer bb = fc.map(FileChannel.MapMode.READ_ONLY, 0, fc.size());
                /* Instead of using default, pass in a decoder. */
                return Charset.defaultCharset().decode(bb).toString();
            } finally {
                stream.close();
            }

        } catch (TransformerConfigurationException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        } catch (TransformerException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        } catch (MalformedURLException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    } else {
        try {
            String inputPath =
                    //getServletContext().getContextPath()+"/xml/"+
                    homeDir + UUID.randomUUID().toString() + "fgdcinput.xml";
            saveUrl(inputPath, inputUrl);
            Source text = new StreamSource(new File(
                    //"/home/kavchand/Desktop/fgdc.xml"
                    inputPath));
            FileInputStream stream = new FileInputStream(new File(inputPath));

            FileChannel fc = stream.getChannel();
            MappedByteBuffer bb = fc.map(FileChannel.MapMode.READ_ONLY, 0, fc.size());
            /* Instead of using default, pass in a decoder. */
            return Charset.defaultCharset().decode(bb).toString();
        } catch (MalformedURLException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }

    return null;
}

From source file:de.cosmocode.palava.store.FileSystemStore.java

@Override
public ByteBuffer view(String identifier) throws IOException {
    Preconditions.checkNotNull(identifier, "Identifier");
    final File file = getFile(identifier);
    Preconditions.checkState(file.exists(), "%s does not exist", file);
    LOG.trace("Reading file from {}", file);
    final FileChannel channel = new RandomAccessFile(file, "r").getChannel();
    return channel.map(MapMode.READ_ONLY, 0, channel.size());
}

From source file:com.thinkberg.vfs.s3.jets3t.Jets3tFileObject.java

protected OutputStream doGetOutputStream(boolean bAppend) throws Exception {
    return new MonitorOutputStream(Channels.newOutputStream(getCacheFile().getChannel())) {
        protected void onClose() throws IOException {
            try {
                LOG.debug(String.format("sending '%s' to storage (cached=%b)", object.getKey(), cacheFile));
                if (cacheFile != null) {
                    FileChannel cacheFc = getCacheFile().getChannel();
                    object.setContentLength(cacheFc.size());
                    object.setDataInputStream(Channels.newInputStream(cacheFc));
                }//from w  ww .j  a v  a  2 s  .  c  om
                service.putObject(bucket, object);
            } catch (S3ServiceException e) {
                LOG.error(String.format("can't send object '%s' to storage", object), e);
            }
        }
    };
}

From source file:org.granite.grails.web.GrailsWebSWFServlet.java

@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {

    request.setAttribute(GrailsApplicationAttributes.REQUEST_SCOPE_ID, grailsAttributes);

    // Get the name of the Groovy script (intern the name so that we can lock on it)
    String pageName = "/swf" + request.getServletPath();
    Resource requestedFile = getResourceForUri(pageName);
    File swfFile = requestedFile.getFile();
    if (swfFile == null || !swfFile.exists()) {
        response.sendError(404, "\"" + pageName + "\" not found.");
        return;//from ww  w  . ja v a2 s.  c  o  m
    }
    response.setContentType("application/x-shockwave-flash");
    response.setContentLength((int) swfFile.length());
    response.setBufferSize((int) swfFile.length());
    response.setDateHeader("Expires", 0);

    FileInputStream is = null;
    FileChannel inChan = null;
    try {
        is = new FileInputStream(swfFile);
        OutputStream os = response.getOutputStream();
        inChan = is.getChannel();
        long fSize = inChan.size();
        MappedByteBuffer mBuf = inChan.map(FileChannel.MapMode.READ_ONLY, 0, fSize);
        byte[] buf = new byte[(int) fSize];
        mBuf.get(buf);
        os.write(buf);
    } finally {
        if (is != null) {
            IOUtils.closeQuietly(is);
        }
        if (inChan != null) {
            try {
                inChan.close();
            } catch (IOException ignored) {
            }
        }
    }
}

From source file:org.opencastproject.util.FileSupport.java

/**
 * Copies the specified <code>sourceLocation</code> to <code>targetLocation</code> and returns a reference to the
 * newly created file or directory./*from  w  ww  . j a  va 2s. c  o  m*/
 * <p/>
 * If <code>targetLocation</code> is an existing directory, then the source file or directory will be copied into this
 * directory, otherwise the source file will be copied to the file identified by <code>targetLocation</code>.
 * <p/>
 * If <code>overwrite</code> is set to <code>false</code>, this method throws an {@link IOException} if the target
 * file already exists.
 * <p/>
 * Note that if <code>targetLocation</code> is a directory than the directory itself, not only its content is copied.
 * 
 * @param sourceFile
 *          the source file or directory
 * @param targetFile
 *          the directory to copy the source file or directory to
 * @param overwrite
 *          <code>true</code> to overwrite existing files
 * @return the created copy
 * @throws IOException
 *           if copying of the file or directory failed
 */
public static File copy(File sourceFile, File targetFile, boolean overwrite) throws IOException {

    // This variable is used when the channel copy files, and stores the maximum size of the file parts copied from source to target
    final int chunk = 1024 * 1024 * 512; // 512 MB

    // This variable is used when the cannel copy fails completely, as the size of the memory buffer used to copy the data from one stream to the other.
    final int bufferSize = 1024 * 1024; // 1 MB 

    File dest = determineDestination(targetFile, sourceFile, overwrite);

    // We are copying a directory
    if (sourceFile.isDirectory()) {
        if (!dest.exists()) {
            dest.mkdirs();
        }
        File[] children = sourceFile.listFiles();
        for (File child : children) {
            copy(child, dest, overwrite);
        }
    }
    // We are copying a file
    else {
        // If dest is not an "absolute file", getParentFile may return null, even if there *is* a parent file.
        // That's why "getAbsoluteFile" is used here
        dest.getAbsoluteFile().getParentFile().mkdirs();
        if (dest.exists())
            delete(dest);

        FileChannel sourceChannel = null;
        FileChannel targetChannel = null;
        FileInputStream sourceStream = null;
        FileOutputStream targetStream = null;
        long size = 0;

        try {
            sourceStream = new FileInputStream(sourceFile);
            targetStream = new FileOutputStream(dest);
            try {
                sourceChannel = sourceStream.getChannel();
                targetChannel = targetStream.getChannel();
                size = targetChannel.transferFrom(sourceChannel, 0, sourceChannel.size());
            } catch (IOException ioe) {
                logger.warn("Got IOException using Channels for copying.");
            } finally {
                // This has to be in "finally", because in 64-bit machines the channel copy may fail to copy the whole file without causing a exception
                if ((sourceChannel != null) && (targetChannel != null) && (size < sourceFile.length()))
                    // Failing back to using FileChannels *but* with chunks and not altogether
                    logger.info("Trying to copy the file in chunks using Channels");
                if (size != sourceFile.length()) {
                    while (size < sourceFile.length())
                        size += targetChannel.transferFrom(sourceChannel, size, chunk);
                }
            }
        } catch (IOException ioe) {
            if ((sourceStream != null) && (targetStream != null) && (size < sourceFile.length())) {
                logger.warn(
                        "Got IOException using Channels for copying in chunks. Trying to use stream copy instead...");
                int copied = 0;
                byte[] buffer = new byte[bufferSize];
                while ((copied = sourceStream.read(buffer, 0, buffer.length)) != -1)
                    targetStream.write(buffer, 0, copied);
            } else
                throw ioe;
        } finally {
            if (sourceChannel != null)
                sourceChannel.close();
            if (sourceStream != null)
                sourceStream.close();
            if (targetChannel != null)
                targetChannel.close();
            if (targetStream != null)
                targetStream.close();
        }

        if (sourceFile.length() != dest.length()) {
            logger.warn("Source " + sourceFile + " and target " + dest + " do not have the same length");
            // TOOD: Why would this happen?
            // throw new IOException("Source " + sourceLocation + " and target " +
            // dest + " do not have the same length");
        }
    }
    return dest;
}

From source file:org.apache.solr.core.CoreContainer.java

/** Copies a src file to a dest file:
 *  used to circumvent the platform discrepancies regarding renaming files.
 */// w w  w . j a va 2s  .  c  o m
public static void fileCopy(File src, File dest) throws IOException {
    IOException xforward = null;
    FileInputStream fis = null;
    FileOutputStream fos = null;
    FileChannel fcin = null;
    FileChannel fcout = null;
    try {
        fis = new FileInputStream(src);
        fos = new FileOutputStream(dest);
        fcin = fis.getChannel();
        fcout = fos.getChannel();
        // do the file copy 32Mb at a time
        final int MB32 = 32 * 1024 * 1024;
        long size = fcin.size();
        long position = 0;
        while (position < size) {
            position += fcin.transferTo(position, MB32, fcout);
        }
    } catch (IOException xio) {
        xforward = xio;
    } finally {
        if (fis != null)
            try {
                fis.close();
                fis = null;
            } catch (IOException xio) {
            }
        if (fos != null)
            try {
                fos.close();
                fos = null;
            } catch (IOException xio) {
            }
        if (fcin != null && fcin.isOpen())
            try {
                fcin.close();
                fcin = null;
            } catch (IOException xio) {
            }
        if (fcout != null && fcout.isOpen())
            try {
                fcout.close();
                fcout = null;
            } catch (IOException xio) {
            }
    }
    if (xforward != null) {
        throw xforward;
    }
}

From source file:ga.rugal.jpt.common.tracker.common.Torrent.java

private static String hashFiles(List<File> files, int pieceLenght) throws InterruptedException, IOException {
    int threads = getHashingThreadsCount();
    ExecutorService executor = Executors.newFixedThreadPool(threads);
    ByteBuffer buffer = ByteBuffer.allocate(pieceLenght);
    List<Future<String>> results = new LinkedList<>();
    StringBuilder hashes = new StringBuilder();

    long length = 0L;
    int pieces = 0;

    long start = System.nanoTime();
    for (File file : files) {
        LOG.info("Hashing data from {} with {} threads ({} pieces)...", new Object[] { file.getName(), threads,
                (int) (Math.ceil((double) file.length() / pieceLenght)) });

        length += file.length();//from w w  w  . j ava2 s  .c om

        FileInputStream fis = new FileInputStream(file);
        FileChannel channel = fis.getChannel();
        int step = 10;

        try {
            while (channel.read(buffer) > 0) {
                if (buffer.remaining() == 0) {
                    buffer.clear();
                    results.add(executor.submit(new CallableChunkHasher(buffer)));
                }

                if (results.size() >= threads) {
                    pieces += accumulateHashes(hashes, results);
                }

                if (channel.position() / (double) channel.size() * 100f > step) {
                    LOG.info("  ... {}% complete", step);
                    step += 10;
                }
            }
        } finally {
            channel.close();
            fis.close();
        }
    }

    // Hash the last bit, if any
    if (buffer.position() > 0) {
        buffer.limit(buffer.position());
        buffer.position(0);
        results.add(executor.submit(new CallableChunkHasher(buffer)));
    }

    pieces += accumulateHashes(hashes, results);

    // Request orderly executor shutdown and wait for hashing tasks to
    // complete.
    executor.shutdown();
    while (!executor.isTerminated()) {
        Thread.sleep(10);
    }
    long elapsed = System.nanoTime() - start;

    int expectedPieces = (int) (Math.ceil((double) length / pieceLenght));
    LOG.info("Hashed {} file(s) ({} bytes) in {} pieces ({} expected) in {}ms.", new Object[] { files.size(),
            length, pieces, expectedPieces, String.format("%.1f", elapsed / 1e6), });

    return hashes.toString();
}

From source file:org.rzo.yajsw.os.ms.win.w32.WindowsJavaHome.java

/**
 * Copy file.//from   w w w.ja  v a 2  s .c  om
 * 
 * @param in
 *            the in
 * @param out
 *            the out
 * 
 * @throws IOException
 *             Signals that an I/O exception has occurred.
 */
void copyFile(File in, File out) throws IOException {
    System.out.println("copying : " + in.getAbsolutePath() + " -> " + out.getAbsolutePath());
    FileChannel inChannel = new FileInputStream(in).getChannel();
    FileChannel outChannel = new FileOutputStream(out).getChannel();
    try {
        inChannel.transferTo(0, inChannel.size(), outChannel);
    } catch (IOException e) {
        throw e;
    } finally {
        if (inChannel != null)
            inChannel.close();
        if (outChannel != null)
            outChannel.close();
    }
}

From source file:com.p2p.peercds.common.Torrent.java

private static String hashFiles(List<File> files) throws InterruptedException, IOException {
    int threads = getHashingThreadsCount();
    ExecutorService executor = Executors.newFixedThreadPool(threads);
    ByteBuffer buffer = ByteBuffer.allocate(PIECE_LENGTH);
    List<Future<String>> results = new LinkedList<Future<String>>();
    StringBuilder hashes = new StringBuilder();

    long length = 0L;
    int pieces = 0;

    long start = System.nanoTime();
    for (File file : files) {
        logger.info("Hashing data from {} with {} threads ({} pieces)...", new Object[] { file.getName(),
                threads, (int) (Math.ceil((double) file.length() / PIECE_LENGTH)) });

        length += file.length();/* w  w w  .j  a  va2  s. com*/

        FileInputStream fis = new FileInputStream(file);
        FileChannel channel = fis.getChannel();
        int step = 10;

        try {
            while (channel.read(buffer) > 0) {
                if (buffer.remaining() == 0) {
                    buffer.clear();
                    results.add(executor.submit(new CallableChunkHasher(buffer)));
                }

                if (results.size() >= threads) {
                    pieces += accumulateHashes(hashes, results);
                }

                if (channel.position() / (double) channel.size() * 100f > step) {
                    logger.info("  ... {}% complete", step);
                    step += 10;
                }
            }
        } finally {
            channel.close();
            fis.close();
        }
    }

    // Hash the last bit, if any
    if (buffer.position() > 0) {
        buffer.limit(buffer.position());
        buffer.position(0);
        results.add(executor.submit(new CallableChunkHasher(buffer)));
    }

    pieces += accumulateHashes(hashes, results);

    // Request orderly executor shutdown and wait for hashing tasks to
    // complete.
    executor.shutdown();
    while (!executor.isTerminated()) {
        Thread.sleep(10);
    }
    long elapsed = System.nanoTime() - start;

    int expectedPieces = (int) (Math.ceil((double) length / PIECE_LENGTH));
    logger.info("Hashed {} file(s) ({} bytes) in {} pieces ({} expected) in {}ms.", new Object[] { files.size(),
            length, pieces, expectedPieces, String.format("%.1f", elapsed / 1e6), });

    return hashes.toString();
}

From source file:org.cytobank.io.LargeFile.java

protected LargeFile(FileChannel fileChannel, int bytesPerRead, String mode) throws IOException {
    this(fileChannel, 0, fileChannel.size(), bytesPerRead, mode);
}