Example usage for org.apache.commons.compress.archivers.tar TarArchiveOutputStream close

List of usage examples for org.apache.commons.compress.archivers.tar TarArchiveOutputStream close

Introduction

In this page you can find the example usage for org.apache.commons.compress.archivers.tar TarArchiveOutputStream close.

Prototype

public void close() throws IOException 

Source Link

Document

Closes the underlying OutputStream.

Usage

From source file:gdt.data.entity.ArchiveHandler.java

/**
* Compress the entities into the tgz archive file. 
* @param  entigrator entigrator instance 
* @param locator$ container of arguments in the string form. 
* @return true if success false otherwise.
*///from w  w w .  ja va 2s .c  om
public boolean compressEntitiesToTgz(Entigrator entigrator, String locator$) {
    try {
        Properties locator = Locator.toProperties(locator$);
        archiveType$ = locator.getProperty(ARCHIVE_TYPE);
        archiveFile$ = locator.getProperty(ARCHIVE_FILE);
        String entityList$ = locator.getProperty(EntityHandler.ENTITY_LIST);
        String[] sa = Locator.toArray(entityList$);
        String tgzFile$ = archiveFile$;
        File tgzFile = new File(tgzFile$);
        if (!tgzFile.exists())
            tgzFile.createNewFile();
        // String userHome$=System.getProperty("user.home");
        File tarFile = new File(tgzFile$.replace(".tgz", "") + ".tar");
        if (!tarFile.exists())
            tarFile.createNewFile();
        String entityBody$ = null;
        String entityHome$ = null;
        TarArchiveOutputStream aos = (TarArchiveOutputStream) new ArchiveStreamFactory()
                .createArchiveOutputStream("tar", new FileOutputStream(tarFile));
        aos.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
        String entihome$ = entigrator.getEntihome();
        String entitiesHome$ = entihome$ + "/" + Entigrator.ENTITY_BASE + "/data/";
        String iconsHome$ = entihome$ + "/" + Entigrator.ICONS + "/";
        String icon$;
        for (String aSa : sa) {
            entityBody$ = entitiesHome$ + aSa;
            append(entigrator, entigrator.getEntihome(), entityBody$, aos);
            entityHome$ = entigrator.ent_getHome(aSa);
            if (new File(entityHome$).exists())
                append(entigrator, entigrator.getEntihome(), entityHome$, aos);
            icon$ = entigrator.indx_getIcon(aSa);
            if (icon$ != null)
                append(entigrator, entigrator.getEntihome(), iconsHome$ + icon$, aos);

        }
        aos.close();
        compressGzipFile(tarFile.getPath(), tgzFile.getPath());
        tarFile.delete();
        return true;
    } catch (Exception e) {
        LOGGER.severe(e.toString());
        return false;
    }
}

From source file:gdt.data.entity.ArchiveHandler.java

/**
   * Compress the entities into the tar archive file. 
   * @param  entigrator entigrator instance
   * @param locator$ container of arguments in the string form. 
   * @return true if success false otherwise.
   */// w w  w. j av a 2 s  .  c o m
public boolean compressEntitiesToTar(Entigrator entigrator, String locator$) {
    try {
        //       System.out.println("ArchiveHandler:compressEntitiesToTar:locator="+locator$);
        Properties locator = Locator.toProperties(locator$);
        archiveType$ = locator.getProperty(ARCHIVE_TYPE);
        archiveFile$ = locator.getProperty(ARCHIVE_FILE);
        String entityList$ = locator.getProperty(EntityHandler.ENTITY_LIST);
        String[] sa = Locator.toArray(entityList$);
        System.out.println("ArchiveHandler:compressEntitiesToTar:sa=" + sa.length);
        String tarfile$ = archiveFile$;
        File tarfile = new File(tarfile$);
        if (!tarfile.exists())
            tarfile.createNewFile();
        String entityBody$ = null;
        String entityHome$ = null;
        TarArchiveOutputStream aos = (TarArchiveOutputStream) new ArchiveStreamFactory()
                .createArchiveOutputStream("tar", new FileOutputStream(tarfile$));
        aos.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
        String entihome$ = entigrator.getEntihome();
        String entitiesHome$ = entihome$ + "/" + Entigrator.ENTITY_BASE + "/data/";
        //  System.out.println("ArchiveHandler:append:entities home=" + entitiesHome$);
        String iconsHome$ = entihome$ + "/" + Entigrator.ICONS + "/";
        String icon$;
        for (String aSa : sa) {
            entityBody$ = entitiesHome$ + aSa;
            append(entigrator, entigrator.getEntihome(), entityBody$, aos);
            entityHome$ = entigrator.ent_getHome(aSa);
            if (new File(entityHome$).exists()) {
                append(entigrator, entigrator.getEntihome(), entityHome$, aos);
            }
            icon$ = entigrator.indx_getIcon(aSa);
            if (icon$ != null)
                append(entigrator, entigrator.getEntihome(), iconsHome$ + icon$, aos);
        }
        aos.close();
        return true;
    } catch (Exception e) {
        LOGGER.severe(e.toString());
        return false;
    }
}

From source file:gov.noaa.pfel.coastwatch.util.FileVisitorDNLS.java

/** 
 * This makes a .tgz or .tar.gz file./*  w ww . ja  v  a 2 s. c om*/
 *
 * @param tResultName is the full result file name, usually 
 *   the name of the dir being archived, and ending in .tgz or .tar.gz.
 */
public static void makeTgz(String tDir, String tFileNameRegex, boolean tRecursive, String tPathRegex,
        String tResultName) throws Exception {
    TarArchiveOutputStream tar = null;
    String outerDir = File2.getDirectory(tDir.substring(0, tDir.length() - 1));
    tar = new TarArchiveOutputStream(
            new GZIPOutputStream(new BufferedOutputStream(new FileOutputStream(tResultName))));

    // Add data to out and flush stream
    Table filesTable = oneStep(tDir, tFileNameRegex, tRecursive, tPathRegex, false); //tDirectoriesToo
    StringArray directoryPA = (StringArray) filesTable.getColumn(DIRECTORY);
    StringArray namePA = (StringArray) filesTable.getColumn(NAME);
    LongArray lastModifiedPA = (LongArray) filesTable.getColumn(LASTMODIFIED);
    LongArray sizePA = (LongArray) filesTable.getColumn(SIZE);
    byte buffer[] = new byte[32768];
    int nBytes;
    for (int fi = 0; fi < namePA.size(); fi++) {
        String fullName = directoryPA.get(fi) + namePA.get(fi);
        TarArchiveEntry entry = new TarArchiveEntry(new File(fullName.substring(outerDir.length())));
        entry.setSize(sizePA.get(fi));
        entry.setModTime(lastModifiedPA.get(fi));
        tar.putArchiveEntry(entry);
        FileInputStream fis = new FileInputStream(fullName);
        while ((nBytes = fis.read(buffer)) > 0)
            tar.write(buffer, 0, nBytes);
        fis.close();
        tar.closeArchiveEntry();
    }
    tar.close();
}

From source file:com.baidu.rigel.biplatform.tesseract.util.FileUtils.java

/**
 * Perform file compression./* w w  w.  j a  va  2  s.  com*/
 * 
 * @param inFileName
 *            Name of the file to be compressed
 * @throws IOException
 */
public static String doCompressFile(String inFileName, String outFileName) throws IOException {
    LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_BEGIN, "doCompressFile",
            "[inFileName:" + inFileName + "]"));
    FileOutputStream fOut = null;
    BufferedOutputStream bOut = null;
    GzipCompressorOutputStream gzOut = null;
    TarArchiveOutputStream tOut = null;
    if (StringUtils.isEmpty(inFileName)) {
        throw new IllegalArgumentException();
    }
    String compressedFileName = outFileName;

    FileInputStream fi = null;
    BufferedInputStream sourceStream = null;

    try {

        LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_COMPRESS_PROCESS,
                "Creating the GZIP output stream"));

        /** Step: 1 ---> create a TarArchiveOutputStream object. **/
        fOut = new FileOutputStream(new File(compressedFileName));
        bOut = new BufferedOutputStream(fOut);
        gzOut = new GzipCompressorOutputStream(bOut);
        tOut = new TarArchiveOutputStream(gzOut);

        /**
         * Step: 2 --->Open the source data and get a list of files from
         * given directory.
         */
        File source = new File(inFileName);
        if (!source.exists()) {
            LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_COMPRESS_ERROR,
                    "File not found. " + inFileName));
            return null;
        }
        File[] files = null;
        if (source.isDirectory()) {
            files = source.listFiles();
        } else {
            files = new File[1];
            files[0] = source;
        }

        for (int i = 0; i < files.length; i++) {
            LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_COMPRESS_PROCESS,
                    "Adding File:" + source.getParentFile().toURI().relativize(files[i].toURI()).getPath()));
            /**
             * Step: 3 ---> Create a tar entry for each file that is read.
             */
            /**
             * relativize is used to to add a file to a tar, without
             * including the entire path from root.
             */

            TarArchiveEntry entry = new TarArchiveEntry(files[i],
                    source.getParentFile().toURI().relativize(files[i].toURI()).getPath());
            /**
             * Step: 4 ---> Put the tar entry using putArchiveEntry.
             */
            tOut.putArchiveEntry(entry);

            /**
             * Step: 5 ---> Write the data to the tar file and close the
             * input stream.
             */

            fi = new FileInputStream(files[i]);
            sourceStream = new BufferedInputStream(fi, TesseractConstant.FILE_BLOCK_SIZE);
            int count;
            byte[] data = new byte[TesseractConstant.FILE_BLOCK_SIZE];
            while ((count = sourceStream.read(data, 0, TesseractConstant.FILE_BLOCK_SIZE)) != -1) {
                tOut.write(data, 0, count);
            }

            sourceStream.close();

            /**
             * Step: 6 --->close the archive entry.
             */

            tOut.closeArchiveEntry();

        }

        /**
         * Step: 7 --->close the output stream.
         */

        tOut.close();

    } catch (IOException e) {
        LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_COMPRESS_ERROR, "IOException"));
        LOGGER.error(e.getMessage(), e);
        throw e;

    } finally {
        try {
            fOut.close();
            bOut.close();
            gzOut.close();
            tOut.close();
            fi.close();
            sourceStream.close();
        } catch (IOException e) {
            LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_COMPRESS_ERROR,
                    "IOException occur when closing fd"));
            LOGGER.error(e.getMessage(), e);
            throw e;
        }

    }

    LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_END, "doCompressFile",
            "[inFileName:" + inFileName + "][compressedFileName:" + compressedFileName + "]"));

    return compressedFileName;
}

From source file:gov.nih.nci.ncicb.tcga.dcc.dam.processors.FilePackager.java

void makeArchive(final String readmeTempFilename) throws IOException {
    final byte[] buffer = new byte[1024];
    File archiveFile = null;/*  w  w w  .  ja  v  a2  s.  c om*/
    TarArchiveOutputStream out = null;
    //in case we need to write to an external server
    final String archiveName = prefixPathForExternalServer(
            filePackagerBean.getArchivePhysicalName() + ".tar.gz");
    try {
        archiveFile = new File(archiveName);
        out = makeTarGzOutputStream(archiveFile);
        copyManifestToArchive(out);
        copyReadmeToArchive(out, readmeTempFilename);
        int i = 0;
        for (final DataFile fileInfo : filePackagerBean.getSelectedFiles()) {
            final File file = new File(fileInfo.getPath());
            if (!file.exists()) {
                throw new IOException("Data file does not exist: " + fileInfo.getPath());
            }
            logger.logToLogger(Level.DEBUG,
                    "tarring file " + (++i) + ":" + fileInfo.getPath() + " into " + archiveName);
            //"synthetic" file path, as we want it to appear in the tar
            final String archiveFilePath = constructInternalFilePath(fileInfo);
            final TarArchiveEntry tarAdd = new TarArchiveEntry(file);
            tarAdd.setModTime(file.lastModified());
            tarAdd.setName(archiveFilePath);
            out.putArchiveEntry(tarAdd);
            FileInputStream in = null;
            try {
                in = new FileInputStream(file);
                int nRead = in.read(buffer, 0, buffer.length);
                while (nRead >= 0) {
                    out.write(buffer, 0, nRead);
                    nRead = in.read(buffer, 0, buffer.length);
                }
            } finally {
                if (in != null) {
                    in.close();
                }
            }
            out.closeArchiveEntry();
            if (fileInfo.getCacheFileToGenerate() != null) {
                //a special case where there should be a cache file but it doesn't exist -
                // Send email with error message
                //filePackagerFactory.getErrorMailSender().send(Messages.CACHE_ERROR, MessageFormat.format(Messages.CACHE_FILE_NOT_FOUND, fileInfo.getCacheFileToGenerate()));
            }
        }
    } catch (IOException ex) {
        //delete the out file if it exists
        if (out != null) {
            out.close();
            out = null;
        }
        if (archiveFile != null && archiveFile.exists()) {
            // give OS time to delete file handle
            try {
                Thread.sleep(100);
            } catch (InterruptedException ie) {
                // it's ok
            }
            // keep track of uncompressed size
            this.actualUncompressedSize = archiveFile.length();
            //noinspection ResultOfMethodCallIgnored
            archiveFile.delete();
        }
        throw ex;
    } finally {
        if (out != null) {
            out.close();
        }
    }
    logger.logToLogger(Level.DEBUG, "Created tar " + archiveName);
}

From source file:com.st.maven.debian.DebianPackageMojo.java

private void fillControlTar(Config config, ArFileOutputStream output) throws MojoExecutionException {
    TarArchiveOutputStream tar = null;
    try {/*w w w  .  ja v  a  2s . co m*/
        tar = new TarArchiveOutputStream(new GZIPOutputStream(new ArWrapper(output)));
        tar.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
        TarArchiveEntry rootDir = new TarArchiveEntry("./");
        tar.putArchiveEntry(rootDir);
        tar.closeArchiveEntry();

        byte[] controlData = processTemplate(freemarkerConfig, config, "control.ftl");
        TarArchiveEntry controlEntry = new TarArchiveEntry("./control");
        controlEntry.setSize(controlData.length);
        tar.putArchiveEntry(controlEntry);
        tar.write(controlData);
        tar.closeArchiveEntry();

        byte[] preinstBaseData = processTemplate("preinst", freemarkerConfig, config,
                combine("preinst.ftl", BASE_DIR + File.separator + "preinst", false));
        long size = preinstBaseData.length;
        TarArchiveEntry preinstEntry = new TarArchiveEntry("./preinst");
        preinstEntry.setSize(size);
        preinstEntry.setMode(0755);
        tar.putArchiveEntry(preinstEntry);
        tar.write(preinstBaseData);
        tar.closeArchiveEntry();

        byte[] postinstBaseData = processTemplate("postinst", freemarkerConfig, config,
                combine("postinst.ftl", BASE_DIR + File.separator + "postinst", true));
        size = postinstBaseData.length;
        TarArchiveEntry postinstEntry = new TarArchiveEntry("./postinst");
        postinstEntry.setSize(size);
        postinstEntry.setMode(0755);
        tar.putArchiveEntry(postinstEntry);
        tar.write(postinstBaseData);
        tar.closeArchiveEntry();

        byte[] prermBaseData = processTemplate("prerm", freemarkerConfig, config,
                combine("prerm.ftl", BASE_DIR + File.separator + "prerm", false));
        size = prermBaseData.length;
        TarArchiveEntry prermEntry = new TarArchiveEntry("./prerm");
        prermEntry.setSize(size);
        prermEntry.setMode(0755);
        tar.putArchiveEntry(prermEntry);
        tar.write(prermBaseData);
        tar.closeArchiveEntry();

        byte[] postrmBaseData = processTemplate("postrm", freemarkerConfig, config,
                combine("postrm.ftl", BASE_DIR + File.separator + "postrm", false));
        size = postrmBaseData.length;
        TarArchiveEntry postrmEntry = new TarArchiveEntry("./postrm");
        postrmEntry.setSize(size);
        postrmEntry.setMode(0755);
        tar.putArchiveEntry(postrmEntry);
        tar.write(postrmBaseData);
        tar.closeArchiveEntry();

    } catch (Exception e) {
        throw new MojoExecutionException("unable to create control tar", e);
    } finally {
        if (tar != null) {
            try {
                tar.close();
            } catch (IOException e) {
                getLog().error("unable to finish tar", e);
            }
        }
    }
}

From source file:io.anserini.index.IndexUtils.java

public void dumpDocumentVectors(String reqDocidsPath, DocVectorWeight weight) throws IOException {
    String outFileName = weight == null ? reqDocidsPath + ".docvector.tar.gz"
            : reqDocidsPath + ".docvector." + weight + ".tar.gz";
    LOG.info("Start dump document vectors with weight " + weight);

    InputStream in = getReadFileStream(reqDocidsPath);
    BufferedReader bRdr = new BufferedReader(new InputStreamReader(in));
    FileOutputStream fOut = new FileOutputStream(new File(outFileName));
    BufferedOutputStream bOut = new BufferedOutputStream(fOut);
    GzipCompressorOutputStream gzOut = new GzipCompressorOutputStream(bOut);
    TarArchiveOutputStream tOut = new TarArchiveOutputStream(gzOut);

    Map<Term, Integer> docFreqMap = new HashMap<>();

    int numNonEmptyDocs = reader.getDocCount(LuceneDocumentGenerator.FIELD_BODY);

    String docid;/*from   w w  w .j  a  v a  2 s .c o  m*/
    int counter = 0;
    while ((docid = bRdr.readLine()) != null) {
        counter++;

        // get term frequency
        Terms terms = reader.getTermVector(convertDocidToLuceneDocid(docid),
                LuceneDocumentGenerator.FIELD_BODY);
        if (terms == null) {
            // We do not throw exception here because there are some
            //  collections in which part of documents don't have document vectors
            LOG.warn("Document vector not stored for doc " + docid);
            continue;
        }

        TermsEnum te = terms.iterator();
        if (te == null) {
            LOG.warn("Document vector not stored for doc " + docid);
            continue;
        }

        Term term;
        long freq;

        // iterate every term and write and store in Map
        Map<String, String> docVectors = new HashMap<>();
        while ((te.next()) != null) {
            term = new Term(LuceneDocumentGenerator.FIELD_BODY, te.term());
            freq = te.totalTermFreq();

            switch (weight) {
            case NONE:
                docVectors.put(term.bytes().utf8ToString(), String.valueOf(freq));
                break;

            case TF_IDF:
                int docFreq;
                if (docFreqMap.containsKey(term)) {
                    docFreq = docFreqMap.get(term);
                } else {
                    try {
                        docFreq = reader.docFreq(term);
                    } catch (Exception e) {
                        LOG.error("Cannot find term " + term.toString() + " in indexing file.");
                        continue;
                    }
                    docFreqMap.put(term, docFreq);
                }
                float tfIdf = (float) (freq * Math.log(numNonEmptyDocs * 1.0 / docFreq));
                docVectors.put(term.bytes().utf8ToString(), String.format("%.6f", tfIdf));
                break;
            }
        }

        // Count size and write
        byte[] bytesOut = docVectors.entrySet().stream().map(e -> e.getKey() + " " + e.getValue())
                .collect(joining("\n")).getBytes(StandardCharsets.UTF_8);

        TarArchiveEntry tarEntry = new TarArchiveEntry(new File(docid));
        tarEntry.setSize(bytesOut.length + String.format("<DOCNO>%s</DOCNO>\n", docid).length());
        tOut.putArchiveEntry(tarEntry);
        tOut.write(String.format("<DOCNO>%s</DOCNO>\n", docid).getBytes());
        tOut.write(bytesOut);
        tOut.closeArchiveEntry();

        if (counter % 100000 == 0) {
            LOG.info(counter + " files have been dumped.");
        }
    }
    tOut.close();
    LOG.info("Document Vectors are output to: " + outFileName);
}

From source file:net.zyuiop.remoteworldloader.utils.CompressionUtils.java

private static void writeZipFile(File directoryToZip, List<File> fileList, File target) {
    try {//from  www. j a va2s .  c  o  m
        FileOutputStream fos = new FileOutputStream(target);
        GzipParameters parameters = new GzipParameters();
        parameters.setCompressionLevel(9);
        GzipCompressorOutputStream gzip = new GzipCompressorOutputStream(fos, parameters);
        TarArchiveOutputStream stream = new TarArchiveOutputStream(gzip);

        for (File file : fileList) {
            if (!file.isDirectory()) { // we only zip files, not directories
                int retryCount = 0;
                while (retryCount < 10) {
                    try {
                        addToZip(directoryToZip, file, stream);
                        break;
                    } catch (Exception e) {
                        retryCount++;
                        if (retryCount > 9)
                            e.printStackTrace();
                    }
                }
            }
        }

        stream.close();
        gzip.close();
        fos.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:org.apache.camel.processor.aggregate.TarAggregationStrategy.java

@Override
public void onCompletion(Exchange exchange) {
    List<Exchange> list = exchange.getProperty(Exchange.GROUPED_EXCHANGE, List.class);
    try {/*from ww w  .  j  a va2  s  .c  om*/
        ByteArrayOutputStream bout = new ByteArrayOutputStream();
        TarArchiveOutputStream tout = new TarArchiveOutputStream(bout);
        for (Exchange item : list) {
            String name = item.getProperty(TAR_ENTRY_NAME,
                    item.getProperty(Exchange.FILE_NAME, item.getExchangeId(), String.class), String.class);
            byte[] body = item.getIn().getBody(byte[].class);
            TarArchiveEntry entry = new TarArchiveEntry(name);
            entry.setSize(body.length);
            tout.putArchiveEntry(entry);
            tout.write(body);
            tout.closeArchiveEntry();
        }
        tout.close();
        exchange.getIn().setBody(bout.toByteArray());
        exchange.removeProperty(Exchange.GROUPED_EXCHANGE);
    } catch (Exception e) {
        throw new RuntimeException("Unable to tar exchanges!", e);
    }
}

From source file:org.apache.hadoop.hive.common.CompressionUtils.java

/**
 * Archive all the files in the inputFiles into outputFile
 *
 * @param inputFiles//from  www  .j a v a  2 s  . c o  m
 * @param outputFile
 * @throws IOException
 */
public static void tar(String parentDir, String[] inputFiles, String outputFile) throws IOException {

    FileOutputStream out = null;
    try {
        out = new FileOutputStream(new File(parentDir, outputFile));
        TarArchiveOutputStream tOut = new TarArchiveOutputStream(
                new GzipCompressorOutputStream(new BufferedOutputStream(out)));

        for (int i = 0; i < inputFiles.length; i++) {
            File f = new File(parentDir, inputFiles[i]);
            TarArchiveEntry tarEntry = new TarArchiveEntry(f, f.getName());
            tOut.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
            tOut.putArchiveEntry(tarEntry);
            FileInputStream input = new FileInputStream(f);
            try {
                IOUtils.copy(input, tOut); // copy with 8K buffer, not close
            } finally {
                input.close();
            }
            tOut.closeArchiveEntry();
        }
        tOut.close(); // finishes inside
    } finally {
        // TarArchiveOutputStream seemed not to close files properly in error situation
        org.apache.hadoop.io.IOUtils.closeStream(out);
    }
}