Example usage for org.apache.commons.io IOUtils copyLarge

List of usage examples for org.apache.commons.io IOUtils copyLarge

Introduction

In this page you can find the example usage for org.apache.commons.io IOUtils copyLarge.

Prototype

public static long copyLarge(Reader input, Writer output) throws IOException 

Source Link

Document

Copy chars from a large (over 2GB) Reader to a Writer.

Usage

From source file:org.codice.solr.factory.ConfigurationFileProxy.java

/**
 * Writes the solr configuration files out of the bundle onto the disk. This method requires
 * that the dataDirectoryPath has been set. If the code is run in an OSGi container, it will
 * automatically have a default dataDirectory location set and will not require setting
 * dataDirectory ahead of time.//from w ww  .j  a v a 2  s. c  o m
 */
public void writeBundleFilesTo(File configDir) {
    BundleContext bundleContext = getContext();
    if (bundleContext != null && configDir != null) {
        boolean directoriesMade = configDir.mkdirs();
        LOGGER.debug("Solr Config directories made?  {}", directoriesMade);

        @SuppressWarnings("rawtypes")
        Enumeration entries = bundleContext.getBundle().findEntries(SOLR_CONFIG_LOCATION_IN_BUNDLE, "*.*",
                false);

        while (entries.hasMoreElements()) {
            URL resourceURL = (URL) (entries.nextElement());
            LOGGER.debug("Found {}", resourceURL);

            try (InputStream inputStream = resourceURL.openStream()) {
                String fileName = FilenameUtils.getName(resourceURL.getPath());
                File currentFile = new File(configDir, fileName);

                if (!currentFile.exists()) {
                    try (FileOutputStream outputStream = new FileOutputStream(currentFile)) {
                        long byteCount = IOUtils.copyLarge(inputStream, outputStream);
                        LOGGER.debug("Wrote out {} bytes.", byteCount);
                    }
                }
            } catch (IOException e) {
                LOGGER.warn("IO exception copying out file", e);
            }
        }
    }
}

From source file:org.codice.solr.factory.impl.ConfigurationFileProxy.java

/** Writes the Solr configuration files for a core from the classpath to disk. */
void writeSolrConfiguration(String core) {
    File configDir = Paths.get(this.dataDirectory.getAbsolutePath(), core, "conf").toFile();
    boolean directoriesMade = configDir.mkdirs();
    LOGGER.debug("Solr Config directories made?  {}", directoriesMade);

    for (String filename : SOLR_CONFIG_FILES) {
        File currentFile = new File(configDir, filename);
        File backupFile = new File(configDir, filename + ".bak");
        if (!currentFile.exists() && !backupFile.exists()) {
            try (InputStream inputStream = ConfigurationFileProxy.class.getClassLoader()
                    .getResourceAsStream("solr/conf/" + filename);
                    FileOutputStream outputStream = new FileOutputStream(currentFile)) {
                long byteCount = IOUtils.copyLarge(inputStream, outputStream);
                LOGGER.debug("Wrote out {} bytes for [{}].", byteCount, filename);
            } catch (IOException e) {
                LOGGER.warn("Unable to copy Solr configuration file: " + filename, e);
            }//from   www.j  ava 2s.  com
        }
    }
}

From source file:org.cryptomator.crypto.aes256.Aes256Cryptor.java

@Override
public boolean authenticateContent(SeekableByteChannel encryptedFile) throws IOException {
    // init mac:/*from w  w w. ja va2  s.  c  o m*/
    final Mac calculatedMac = this.hmacSha256(hMacMasterKey);

    // read stored mac:
    encryptedFile.position(16);
    final ByteBuffer storedMac = ByteBuffer.allocate(calculatedMac.getMacLength());
    final int numMacBytesRead = encryptedFile.read(storedMac);

    // check validity of header:
    if (numMacBytesRead != calculatedMac.getMacLength()) {
        throw new IOException("Failed to read file header.");
    }

    // read all encrypted data and calculate mac:
    encryptedFile.position(64);
    final InputStream in = new SeekableByteChannelInputStream(encryptedFile);
    final InputStream macIn = new MacInputStream(in, calculatedMac);
    IOUtils.copyLarge(macIn, new NullOutputStream());

    // compare (in constant time):
    return MessageDigest.isEqual(storedMac.array(), calculatedMac.doFinal());
}

From source file:org.cryptomator.crypto.aes256.Aes256Cryptor.java

@Override
public Long encryptFile(InputStream plaintextFile, SeekableByteChannel encryptedFile) throws IOException {
    // truncate file
    encryptedFile.truncate(0);/*w  w  w  .  j  a  v  a 2  s .c o  m*/

    // use an IV, whose last 8 bytes store a long used in counter mode and write initial value to file.
    final ByteBuffer countingIv = ByteBuffer.wrap(randomData(AES_BLOCK_LENGTH));
    countingIv.putLong(AES_BLOCK_LENGTH - Long.BYTES, 0l);
    countingIv.position(0);
    encryptedFile.write(countingIv);

    // init crypto stuff:
    final Mac mac = this.hmacSha256(hMacMasterKey);
    final Cipher cipher = this.aesCtrCipher(primaryMasterKey, countingIv.array(), Cipher.ENCRYPT_MODE);

    // init mac buffer and skip 32 bytes
    final ByteBuffer macBuffer = ByteBuffer.allocate(mac.getMacLength());
    encryptedFile.write(macBuffer);

    // init filesize buffer and skip 16 bytes
    final ByteBuffer encryptedFileSizeBuffer = ByteBuffer.allocate(AES_BLOCK_LENGTH);
    encryptedFile.write(encryptedFileSizeBuffer);

    // write content:
    final OutputStream out = new SeekableByteChannelOutputStream(encryptedFile);
    final OutputStream macOut = new MacOutputStream(out, mac);
    final OutputStream cipheredOut = new CipherOutputStream(macOut, cipher);
    final OutputStream blockSizeBufferedOut = new BufferedOutputStream(cipheredOut, AES_BLOCK_LENGTH);
    final Long plaintextSize = IOUtils.copyLarge(plaintextFile, blockSizeBufferedOut);

    // ensure total byte count is a multiple of the block size, in CTR mode:
    final int remainderToFillLastBlock = AES_BLOCK_LENGTH - (int) (plaintextSize % AES_BLOCK_LENGTH);
    blockSizeBufferedOut.write(new byte[remainderToFillLastBlock]);

    // append a few blocks of fake data:
    final int numberOfPlaintextBlocks = (int) Math.ceil(plaintextSize / AES_BLOCK_LENGTH);
    final int upToTenPercentFakeBlocks = (int) Math.ceil(Math.random() * 0.1 * numberOfPlaintextBlocks);
    final byte[] emptyBytes = new byte[AES_BLOCK_LENGTH];
    for (int i = 0; i < upToTenPercentFakeBlocks; i += AES_BLOCK_LENGTH) {
        blockSizeBufferedOut.write(emptyBytes);
    }
    blockSizeBufferedOut.flush();

    // write MAC of total ciphertext:
    macBuffer.position(0);
    macBuffer.put(mac.doFinal());
    macBuffer.position(0);
    encryptedFile.position(16); // right behind the IV
    encryptedFile.write(macBuffer); // 256 bit MAC

    // encrypt and write plaintextSize
    try {
        final ByteBuffer fileSizeBuffer = ByteBuffer.allocate(Long.BYTES);
        fileSizeBuffer.putLong(plaintextSize);
        final Cipher sizeCipher = aesEcbCipher(primaryMasterKey, Cipher.ENCRYPT_MODE);
        final byte[] encryptedFileSize = sizeCipher.doFinal(fileSizeBuffer.array());
        encryptedFileSizeBuffer.position(0);
        encryptedFileSizeBuffer.put(encryptedFileSize);
        encryptedFileSizeBuffer.position(0);
        encryptedFile.position(48); // right behind the IV and MAC
        encryptedFile.write(encryptedFileSizeBuffer);
    } catch (IllegalBlockSizeException | BadPaddingException e) {
        throw new IllegalStateException(
                "Block size must be valid, as padding is requested. BadPaddingException not possible in encrypt mode.",
                e);
    }

    return plaintextSize;
}

From source file:org.datavec.api.util.ArchiveUtils.java

/**
 * Extracts files to the specified destination
 * @param file the file to extract to/*  www .j  a v  a 2  s.  c  o m*/
 * @param dest the destination directory
 * @throws java.io.IOException
 */
public static void unzipFileTo(String file, String dest) throws IOException {
    File target = new File(file);
    if (!target.exists())
        throw new IllegalArgumentException("Archive doesnt exist");
    FileInputStream fin = new FileInputStream(target);
    int BUFFER = 2048;
    byte data[] = new byte[BUFFER];

    if (file.endsWith(".zip") || file.endsWith(".jar")) {
        //getFromOrigin the zip file content
        ZipInputStream zis = new ZipInputStream(fin);
        //getFromOrigin the zipped file list entry
        ZipEntry ze = zis.getNextEntry();

        while (ze != null) {
            String fileName = ze.getName();

            File newFile = new File(dest + File.separator + fileName);

            if (ze.isDirectory()) {
                newFile.mkdirs();
                zis.closeEntry();
                ze = zis.getNextEntry();
                continue;
            }

            log.info("file unzip : " + newFile.getAbsoluteFile());

            //create all non exists folders
            //else you will hit FileNotFoundException for compressed folder

            FileOutputStream fos = new FileOutputStream(newFile);

            int len;
            while ((len = zis.read(data)) > 0) {
                fos.write(data, 0, len);
            }

            fos.flush();
            fos.close();
            zis.closeEntry();
            ze = zis.getNextEntry();
        }

        zis.close();

    }

    else if (file.endsWith(".tar")) {

        BufferedInputStream in = new BufferedInputStream(fin);
        TarArchiveInputStream tarIn = new TarArchiveInputStream(in);

        TarArchiveEntry entry = null;

        /** Read the tar entries using the getNextEntry method **/

        while ((entry = (TarArchiveEntry) tarIn.getNextEntry()) != null) {

            log.info("Extracting: " + entry.getName());

            /** If the entry is a directory, createComplex the directory. **/

            if (entry.isDirectory()) {

                File f = new File(dest + File.separator + entry.getName());
                f.mkdirs();
            }
            /**
             * If the entry is a file,write the decompressed file to the disk
             * and close destination stream.
             **/
            else {
                int count;

                FileOutputStream fos = new FileOutputStream(dest + File.separator + entry.getName());
                BufferedOutputStream destStream = new BufferedOutputStream(fos, BUFFER);
                while ((count = tarIn.read(data, 0, BUFFER)) != -1) {
                    destStream.write(data, 0, count);
                }

                destStream.flush();
                ;

                IOUtils.closeQuietly(destStream);
            }
        }

        /** Close the input stream **/

        tarIn.close();
    }

    else if (file.endsWith(".tar.gz") || file.endsWith(".tgz")) {

        BufferedInputStream in = new BufferedInputStream(fin);
        GzipCompressorInputStream gzIn = new GzipCompressorInputStream(in);
        TarArchiveInputStream tarIn = new TarArchiveInputStream(gzIn);

        TarArchiveEntry entry = null;

        /** Read the tar entries using the getNextEntry method **/

        while ((entry = (TarArchiveEntry) tarIn.getNextEntry()) != null) {

            log.info("Extracting: " + entry.getName());

            /** If the entry is a directory, createComplex the directory. **/

            if (entry.isDirectory()) {

                File f = new File(dest + File.separator + entry.getName());
                f.mkdirs();
            }
            /**
             * If the entry is a file,write the decompressed file to the disk
             * and close destination stream.
             **/
            else {
                int count;

                FileOutputStream fos = new FileOutputStream(dest + File.separator + entry.getName());
                BufferedOutputStream destStream = new BufferedOutputStream(fos, BUFFER);
                while ((count = tarIn.read(data, 0, BUFFER)) != -1) {
                    destStream.write(data, 0, count);
                }

                destStream.flush();

                IOUtils.closeQuietly(destStream);
            }
        }

        /** Close the input stream **/

        tarIn.close();
    }

    else if (file.endsWith(".gz")) {
        GZIPInputStream is2 = new GZIPInputStream(fin);
        File extracted = new File(target.getParent(), target.getName().replace(".gz", ""));
        if (extracted.exists())
            extracted.delete();
        extracted.createNewFile();
        OutputStream fos = FileUtils.openOutputStream(extracted);
        IOUtils.copyLarge(is2, fos);
        is2.close();
        fos.flush();
        fos.close();
    }

}

From source file:org.deeplearning4j.util.ArchiveUtils.java

/**
 * Extracts files to the specified destination
 * @param file the file to extract to//www. j  a va 2  s.  co m
 * @param dest the destination directory
 * @throws IOException
 */
public static void unzipFileTo(String file, String dest) throws IOException {
    File target = new File(file);
    if (!target.exists())
        throw new IllegalArgumentException("Archive doesnt exist");
    FileInputStream fin = new FileInputStream(target);
    int BUFFER = 2048;
    byte data[] = new byte[BUFFER];

    if (file.endsWith(".zip")) {
        //getFromOrigin the zip file content
        ZipInputStream zis = new ZipInputStream(fin);
        //getFromOrigin the zipped file list entry
        ZipEntry ze = zis.getNextEntry();

        while (ze != null) {

            String fileName = ze.getName();
            File newFile = new File(dest + File.separator + fileName);

            log.info("file unzip : " + newFile.getAbsoluteFile());

            //createComplex all non exists folders
            //else you will hit FileNotFoundException for compressed folder
            new File(newFile.getParent()).mkdirs();

            FileOutputStream fos = new FileOutputStream(newFile);

            int len;
            while ((len = zis.read(data)) > 0) {
                fos.write(data, 0, len);
            }

            fos.close();
            ze = zis.getNextEntry();
        }

        zis.closeEntry();
        zis.close();

    }

    else if (file.endsWith(".tar.gz") || file.endsWith(".tgz")) {

        BufferedInputStream in = new BufferedInputStream(fin);
        GzipCompressorInputStream gzIn = new GzipCompressorInputStream(in);
        TarArchiveInputStream tarIn = new TarArchiveInputStream(gzIn);

        TarArchiveEntry entry = null;

        /** Read the tar entries using the getNextEntry method **/

        while ((entry = (TarArchiveEntry) tarIn.getNextEntry()) != null) {

            log.info("Extracting: " + entry.getName());

            /** If the entry is a directory, createComplex the directory. **/

            if (entry.isDirectory()) {

                File f = new File(dest + File.separator + entry.getName());
                f.mkdirs();
            }
            /**
             * If the entry is a file,write the decompressed file to the disk
             * and close destination stream.
             **/
            else {
                int count;

                FileOutputStream fos = new FileOutputStream(dest + File.separator + entry.getName());
                BufferedOutputStream destStream = new BufferedOutputStream(fos, BUFFER);
                while ((count = tarIn.read(data, 0, BUFFER)) != -1) {
                    destStream.write(data, 0, count);
                }

                destStream.flush();

                IOUtils.closeQuietly(destStream);
            }
        }

        /** Close the input stream **/

        tarIn.close();
    }

    else if (file.endsWith(".gz")) {
        GZIPInputStream is2 = new GZIPInputStream(fin);
        File extracted = new File(target.getParent(), target.getName().replace(".gz", ""));
        if (extracted.exists())
            extracted.delete();
        extracted.createNewFile();
        OutputStream fos = FileUtils.openOutputStream(extracted);
        IOUtils.copyLarge(is2, fos);
        is2.close();
        fos.flush();
        fos.close();
    }

    target.delete();

}

From source file:org.digidoc4j.impl.BDocContainer.java

@Override
public void save(OutputStream out) {
    logger.debug("");
    try {/*w w  w .j  a v a2 s  .co m*/
        IOUtils.copyLarge(signedDocument.openStream(), out);
    } catch (IOException e) {
        logger.error(e.getMessage());
        throw new DigiDoc4JException(e);
    }
}

From source file:org.duracloud.chunk.writer.FilesystemContentWriter.java

private void copyLarge(InputStream chunk, OutputStream outStream) {
    try {//w  ww  . ja  va 2  s .  co  m
        IOUtils.copyLarge(chunk, outStream);
    } catch (IOException e) {
        String msg = "Error in copy: " + chunk.toString() + ": ";
        log.error(msg, e);
        throw new DuraCloudRuntimeException(msg + e.getMessage(), e);
    }
}

From source file:org.duracloud.retrieval.mgmt.RetrievalWorker.java

/**
 * Transfers the remote file stream to the local file
 *
 * @param localFile// www  . j ava2s.com
 * @param listener
 * @return
 * @throws IOException
 * @returns the checksum of the File upon successful retrieval.  Successful
 * retrieval means the checksum of the local file and remote file match,
 * otherwise an IOException is thrown.
 */
protected Map<String, String> retrieveToFile(File localFile, RetrievalListener listener) throws IOException {

    try {
        contentStream = new Retrier(5, 4000, 3).execute(() -> {
            return source.getSourceContent(contentItem, listener);
        });
    } catch (Exception ex) {
        throw new IOException(ex);
    }

    try (InputStream inStream = contentStream.getStream();
            OutputStream outStream = new FileOutputStream(localFile);) {
        IOUtils.copyLarge(inStream, outStream);
    } catch (IOException e) {
        try {
            deleteFile(localFile);
        } catch (IOException ioe) {
            logger.error("Exception deleting local file " + localFile.getAbsolutePath() + " due to: "
                    + ioe.getMessage());
        }
        throw e;
    }

    if (!checksumsMatch(localFile, contentStream.getChecksum())) {
        deleteFile(localFile);
        throw new IOException(
                "Calculated checksum value for retrieved " + "file does not match properties checksum.");
    }

    // Set time stamps
    if (applyTimestamps) {
        applyTimestamps(contentStream, localFile);
    }
    return contentStream.getProperties();
}

From source file:org.duracloud.stitch.FileStitcherDriver.java

private void writeContentToDir(Content content, File toDir) {
    File outFile = new File(toDir, content.getId());
    log.info("Writing to '{}'.", outFile.getAbsolutePath());

    OutputStream outputStream = null;
    try {//from ww w  .  j av a2  s  . c  o m
        // Create any needed subdirectories
        File parentDir = outFile.getParentFile();
        if (!parentDir.exists()) {
            parentDir.mkdirs();
            parentDir.setWritable(true);
        }

        // Write content
        outputStream = new FileOutputStream(outFile);
        IOUtils.copyLarge(content.getStream(), outputStream);

    } catch (IOException e) {
        StringBuilder msg = new StringBuilder();
        msg.append("Error writing content: ");
        msg.append(content.getId());
        msg.append(" to output file: ");
        msg.append(outFile.getAbsolutePath());
        throw new DuraCloudRuntimeException(msg.toString(), e);

    } finally {
        IOUtils.closeQuietly(outputStream);
    }
}