Example usage for org.apache.commons.compress.archivers.tar TarArchiveOutputStream putArchiveEntry

List of usage examples for org.apache.commons.compress.archivers.tar TarArchiveOutputStream putArchiveEntry

Introduction

In this page you can find the example usage for org.apache.commons.compress.archivers.tar TarArchiveOutputStream putArchiveEntry.

Prototype

public void putArchiveEntry(ArchiveEntry archiveEntry) throws IOException 

Source Link

Document

Put an entry on the output stream.

Usage

From source file:com.lizardtech.expresszip.model.Job.java

private void writeTarFile(File baseDir, File archive, List<String> files) throws IOException {
    FileOutputStream fOut = null;
    BufferedOutputStream bOut = null;
    GzipCompressorOutputStream gzOut = null;
    TarArchiveOutputStream tOut = null;
    try {//  w  w  w . j  a v a2 s . c om
        fOut = new FileOutputStream(archive);
        bOut = new BufferedOutputStream(fOut);
        gzOut = new GzipCompressorOutputStream(bOut);
        tOut = new TarArchiveOutputStream(gzOut);

        for (String f : files) {
            File myfile = new File(baseDir, f);
            String entryName = myfile.getName();
            logger.info(String.format("Writing %s to TAR archive %s", f, archive));

            TarArchiveEntry tarEntry = new TarArchiveEntry(myfile, entryName);
            tOut.putArchiveEntry(tarEntry);

            FileInputStream fis = new FileInputStream(myfile);
            IOUtils.copy(fis, tOut);
            fis.close();
            tOut.closeArchiveEntry();
        }
    } finally {
        tOut.finish();
        tOut.close();
        gzOut.close();
        bOut.close();
        fOut.close();
    }
}

From source file:com.linuxbox.enkive.workspace.searchFolder.SearchFolder.java

/**
 * Writes a tar.gz file to the provided outputstream
 * /*from  w w  w. ja  v  a  2 s. c o  m*/
 * @param outputStream
 * @throws IOException
 */
public void exportSearchFolder(OutputStream outputStream) throws IOException {
    BufferedOutputStream out = new BufferedOutputStream(outputStream);
    GzipCompressorOutputStream gzOut = new GzipCompressorOutputStream(out);
    TarArchiveOutputStream tOut = new TarArchiveOutputStream(gzOut);

    File mboxFile = File.createTempFile("mbox-export", ".mbox");
    BufferedWriter mboxWriter = new BufferedWriter(new FileWriter(mboxFile));
    // Write mbox to tempfile?
    for (String messageId : getMessageIds()) {
        try {
            Message message = retrieverService.retrieve(messageId);

            mboxWriter.write("From " + message.getDateStr() + "\r\n");
            BufferedReader reader = new BufferedReader(new StringReader(message.getReconstitutedEmail()));
            String tmpLine;
            while ((tmpLine = reader.readLine()) != null) {
                if (tmpLine.startsWith("From "))
                    mboxWriter.write(">" + tmpLine);
                else
                    mboxWriter.write(tmpLine);
                mboxWriter.write("\r\n");
            }
        } catch (CannotRetrieveException e) {
            // Add errors to report
            // if (LOGGER.isErrorEnabled())
            // LOGGER.error("Could not retrieve message with id"
            // + messageId);
        }
    }
    mboxWriter.flush();
    mboxWriter.close();
    // Add mbox to tarfile
    TarArchiveEntry mboxEntry = new TarArchiveEntry(mboxFile, "filename.mbox");
    tOut.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
    tOut.putArchiveEntry(mboxEntry);
    IOUtils.copy(new FileInputStream(mboxFile), tOut);
    tOut.flush();
    tOut.closeArchiveEntry();
    mboxWriter.close();
    mboxFile.delete();
    // Create report in tempfile?

    // Add report to tarfile

    // Close out stream
    tOut.finish();
    outputStream.flush();
    tOut.close();
    outputStream.close();

}

From source file:com.baidu.rigel.biplatform.tesseract.util.FileUtils.java

/**
 * Perform file compression./* w  w  w .ja v a  2s .  com*/
 * 
 * @param inFileName
 *            Name of the file to be compressed
 * @throws IOException
 */
public static String doCompressFile(String inFileName, String outFileName) throws IOException {
    LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_BEGIN, "doCompressFile",
            "[inFileName:" + inFileName + "]"));
    FileOutputStream fOut = null;
    BufferedOutputStream bOut = null;
    GzipCompressorOutputStream gzOut = null;
    TarArchiveOutputStream tOut = null;
    if (StringUtils.isEmpty(inFileName)) {
        throw new IllegalArgumentException();
    }
    String compressedFileName = outFileName;

    FileInputStream fi = null;
    BufferedInputStream sourceStream = null;

    try {

        LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_COMPRESS_PROCESS,
                "Creating the GZIP output stream"));

        /** Step: 1 ---> create a TarArchiveOutputStream object. **/
        fOut = new FileOutputStream(new File(compressedFileName));
        bOut = new BufferedOutputStream(fOut);
        gzOut = new GzipCompressorOutputStream(bOut);
        tOut = new TarArchiveOutputStream(gzOut);

        /**
         * Step: 2 --->Open the source data and get a list of files from
         * given directory.
         */
        File source = new File(inFileName);
        if (!source.exists()) {
            LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_COMPRESS_ERROR,
                    "File not found. " + inFileName));
            return null;
        }
        File[] files = null;
        if (source.isDirectory()) {
            files = source.listFiles();
        } else {
            files = new File[1];
            files[0] = source;
        }

        for (int i = 0; i < files.length; i++) {
            LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_COMPRESS_PROCESS,
                    "Adding File:" + source.getParentFile().toURI().relativize(files[i].toURI()).getPath()));
            /**
             * Step: 3 ---> Create a tar entry for each file that is read.
             */
            /**
             * relativize is used to to add a file to a tar, without
             * including the entire path from root.
             */

            TarArchiveEntry entry = new TarArchiveEntry(files[i],
                    source.getParentFile().toURI().relativize(files[i].toURI()).getPath());
            /**
             * Step: 4 ---> Put the tar entry using putArchiveEntry.
             */
            tOut.putArchiveEntry(entry);

            /**
             * Step: 5 ---> Write the data to the tar file and close the
             * input stream.
             */

            fi = new FileInputStream(files[i]);
            sourceStream = new BufferedInputStream(fi, TesseractConstant.FILE_BLOCK_SIZE);
            int count;
            byte[] data = new byte[TesseractConstant.FILE_BLOCK_SIZE];
            while ((count = sourceStream.read(data, 0, TesseractConstant.FILE_BLOCK_SIZE)) != -1) {
                tOut.write(data, 0, count);
            }

            sourceStream.close();

            /**
             * Step: 6 --->close the archive entry.
             */

            tOut.closeArchiveEntry();

        }

        /**
         * Step: 7 --->close the output stream.
         */

        tOut.close();

    } catch (IOException e) {
        LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_COMPRESS_ERROR, "IOException"));
        LOGGER.error(e.getMessage(), e);
        throw e;

    } finally {
        try {
            fOut.close();
            bOut.close();
            gzOut.close();
            tOut.close();
            fi.close();
            sourceStream.close();
        } catch (IOException e) {
            LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_COMPRESS_ERROR,
                    "IOException occur when closing fd"));
            LOGGER.error(e.getMessage(), e);
            throw e;
        }

    }

    LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_END, "doCompressFile",
            "[inFileName:" + inFileName + "][compressedFileName:" + compressedFileName + "]"));

    return compressedFileName;
}

From source file:gov.nih.nci.ncicb.tcga.dcc.dam.processors.FilePackager.java

void makeArchive(final String readmeTempFilename) throws IOException {
    final byte[] buffer = new byte[1024];
    File archiveFile = null;//www.  ja v a 2 s.c  o  m
    TarArchiveOutputStream out = null;
    //in case we need to write to an external server
    final String archiveName = prefixPathForExternalServer(
            filePackagerBean.getArchivePhysicalName() + ".tar.gz");
    try {
        archiveFile = new File(archiveName);
        out = makeTarGzOutputStream(archiveFile);
        copyManifestToArchive(out);
        copyReadmeToArchive(out, readmeTempFilename);
        int i = 0;
        for (final DataFile fileInfo : filePackagerBean.getSelectedFiles()) {
            final File file = new File(fileInfo.getPath());
            if (!file.exists()) {
                throw new IOException("Data file does not exist: " + fileInfo.getPath());
            }
            logger.logToLogger(Level.DEBUG,
                    "tarring file " + (++i) + ":" + fileInfo.getPath() + " into " + archiveName);
            //"synthetic" file path, as we want it to appear in the tar
            final String archiveFilePath = constructInternalFilePath(fileInfo);
            final TarArchiveEntry tarAdd = new TarArchiveEntry(file);
            tarAdd.setModTime(file.lastModified());
            tarAdd.setName(archiveFilePath);
            out.putArchiveEntry(tarAdd);
            FileInputStream in = null;
            try {
                in = new FileInputStream(file);
                int nRead = in.read(buffer, 0, buffer.length);
                while (nRead >= 0) {
                    out.write(buffer, 0, nRead);
                    nRead = in.read(buffer, 0, buffer.length);
                }
            } finally {
                if (in != null) {
                    in.close();
                }
            }
            out.closeArchiveEntry();
            if (fileInfo.getCacheFileToGenerate() != null) {
                //a special case where there should be a cache file but it doesn't exist -
                // Send email with error message
                //filePackagerFactory.getErrorMailSender().send(Messages.CACHE_ERROR, MessageFormat.format(Messages.CACHE_FILE_NOT_FOUND, fileInfo.getCacheFileToGenerate()));
            }
        }
    } catch (IOException ex) {
        //delete the out file if it exists
        if (out != null) {
            out.close();
            out = null;
        }
        if (archiveFile != null && archiveFile.exists()) {
            // give OS time to delete file handle
            try {
                Thread.sleep(100);
            } catch (InterruptedException ie) {
                // it's ok
            }
            // keep track of uncompressed size
            this.actualUncompressedSize = archiveFile.length();
            //noinspection ResultOfMethodCallIgnored
            archiveFile.delete();
        }
        throw ex;
    } finally {
        if (out != null) {
            out.close();
        }
    }
    logger.logToLogger(Level.DEBUG, "Created tar " + archiveName);
}

From source file:gov.noaa.pfel.coastwatch.util.FileVisitorDNLS.java

/** 
 * This makes a .tgz or .tar.gz file.// w  w w  .  j  a  v a 2  s .co m
 *
 * @param tResultName is the full result file name, usually 
 *   the name of the dir being archived, and ending in .tgz or .tar.gz.
 */
public static void makeTgz(String tDir, String tFileNameRegex, boolean tRecursive, String tPathRegex,
        String tResultName) throws Exception {
    TarArchiveOutputStream tar = null;
    String outerDir = File2.getDirectory(tDir.substring(0, tDir.length() - 1));
    tar = new TarArchiveOutputStream(
            new GZIPOutputStream(new BufferedOutputStream(new FileOutputStream(tResultName))));

    // Add data to out and flush stream
    Table filesTable = oneStep(tDir, tFileNameRegex, tRecursive, tPathRegex, false); //tDirectoriesToo
    StringArray directoryPA = (StringArray) filesTable.getColumn(DIRECTORY);
    StringArray namePA = (StringArray) filesTable.getColumn(NAME);
    LongArray lastModifiedPA = (LongArray) filesTable.getColumn(LASTMODIFIED);
    LongArray sizePA = (LongArray) filesTable.getColumn(SIZE);
    byte buffer[] = new byte[32768];
    int nBytes;
    for (int fi = 0; fi < namePA.size(); fi++) {
        String fullName = directoryPA.get(fi) + namePA.get(fi);
        TarArchiveEntry entry = new TarArchiveEntry(new File(fullName.substring(outerDir.length())));
        entry.setSize(sizePA.get(fi));
        entry.setModTime(lastModifiedPA.get(fi));
        tar.putArchiveEntry(entry);
        FileInputStream fis = new FileInputStream(fullName);
        while ((nBytes = fis.read(buffer)) > 0)
            tar.write(buffer, 0, nBytes);
        fis.close();
        tar.closeArchiveEntry();
    }
    tar.close();
}

From source file:gdt.data.entity.ArchiveHandler.java

private boolean append(Entigrator entigrator, String root$, String source$, TarArchiveOutputStream aos) {
    try {/* w ww .j  av  a2  s  . c  o m*/

        File[] fa = null;
        File source = new File(source$);
        if (source.exists())
            if (source.isFile())
                fa = new File[] { source };
            else
                fa = source.listFiles();
        if (fa == null)
            return true;
        File recordFile = null;

        Stack<TarArchiveEntry> s = new Stack<TarArchiveEntry>();
        int cnt = 0;

        TarArchiveEntry entry = null;
        for (File aFa : fa) {
            recordFile = aFa;
            entry = new TarArchiveEntry(recordFile);
            entry.setSize(recordFile.length());
            s.clear();
            getTarEntries(entry, s, root$);
            cnt = s.size();
            //  System.out.println("EximpExpert:append:cnt=" + cnt);
            File nextFile = null;
            for (int j = 0; j < cnt; j++) {
                entry = (TarArchiveEntry) s.pop();
                try {
                    String nextFile$ = entigrator.getEntihome() + "/" + entry.getName();
                    //            System.out.println("EximpExpert:append:try next file=" + nextFile$);
                    nextFile = new File(nextFile$);
                    if (!nextFile.exists() || nextFile.length() < 1) {
                        System.out.println("ArchiveHandler:append:wrong next file=" + nextFile$);
                        continue;
                    }
                    aos.putArchiveEntry(entry);
                    IOUtils.copy(new FileInputStream(nextFile$), aos);
                    // System.out.println("EximpExpert:tar_write:j="+j);
                    aos.closeArchiveEntry();
                } catch (Exception ee) {
                    //   System.out.println("EximpExpert:append:" + ee.toString());
                    LOGGER.severe(":append:" + ee.toString());
                }
            }
        }
        //System.out.println("EximpExpert:tar_write:finish");
        return true;

        //System.out.println("EximpExpert:tar_write:exit");
    } catch (Exception e) {
        LOGGER.severe(":append:" + e.toString());
        return false;
    }
}

From source file:io.anserini.index.IndexUtils.java

public void dumpDocumentVectors(String reqDocidsPath, DocVectorWeight weight) throws IOException {
    String outFileName = weight == null ? reqDocidsPath + ".docvector.tar.gz"
            : reqDocidsPath + ".docvector." + weight + ".tar.gz";
    LOG.info("Start dump document vectors with weight " + weight);

    InputStream in = getReadFileStream(reqDocidsPath);
    BufferedReader bRdr = new BufferedReader(new InputStreamReader(in));
    FileOutputStream fOut = new FileOutputStream(new File(outFileName));
    BufferedOutputStream bOut = new BufferedOutputStream(fOut);
    GzipCompressorOutputStream gzOut = new GzipCompressorOutputStream(bOut);
    TarArchiveOutputStream tOut = new TarArchiveOutputStream(gzOut);

    Map<Term, Integer> docFreqMap = new HashMap<>();

    int numNonEmptyDocs = reader.getDocCount(LuceneDocumentGenerator.FIELD_BODY);

    String docid;/* w w w  .j av  a  2  s  . co m*/
    int counter = 0;
    while ((docid = bRdr.readLine()) != null) {
        counter++;

        // get term frequency
        Terms terms = reader.getTermVector(convertDocidToLuceneDocid(docid),
                LuceneDocumentGenerator.FIELD_BODY);
        if (terms == null) {
            // We do not throw exception here because there are some
            //  collections in which part of documents don't have document vectors
            LOG.warn("Document vector not stored for doc " + docid);
            continue;
        }

        TermsEnum te = terms.iterator();
        if (te == null) {
            LOG.warn("Document vector not stored for doc " + docid);
            continue;
        }

        Term term;
        long freq;

        // iterate every term and write and store in Map
        Map<String, String> docVectors = new HashMap<>();
        while ((te.next()) != null) {
            term = new Term(LuceneDocumentGenerator.FIELD_BODY, te.term());
            freq = te.totalTermFreq();

            switch (weight) {
            case NONE:
                docVectors.put(term.bytes().utf8ToString(), String.valueOf(freq));
                break;

            case TF_IDF:
                int docFreq;
                if (docFreqMap.containsKey(term)) {
                    docFreq = docFreqMap.get(term);
                } else {
                    try {
                        docFreq = reader.docFreq(term);
                    } catch (Exception e) {
                        LOG.error("Cannot find term " + term.toString() + " in indexing file.");
                        continue;
                    }
                    docFreqMap.put(term, docFreq);
                }
                float tfIdf = (float) (freq * Math.log(numNonEmptyDocs * 1.0 / docFreq));
                docVectors.put(term.bytes().utf8ToString(), String.format("%.6f", tfIdf));
                break;
            }
        }

        // Count size and write
        byte[] bytesOut = docVectors.entrySet().stream().map(e -> e.getKey() + " " + e.getValue())
                .collect(joining("\n")).getBytes(StandardCharsets.UTF_8);

        TarArchiveEntry tarEntry = new TarArchiveEntry(new File(docid));
        tarEntry.setSize(bytesOut.length + String.format("<DOCNO>%s</DOCNO>\n", docid).length());
        tOut.putArchiveEntry(tarEntry);
        tOut.write(String.format("<DOCNO>%s</DOCNO>\n", docid).getBytes());
        tOut.write(bytesOut);
        tOut.closeArchiveEntry();

        if (counter % 100000 == 0) {
            LOG.info(counter + " files have been dumped.");
        }
    }
    tOut.close();
    LOG.info("Document Vectors are output to: " + outFileName);
}

From source file:net.zyuiop.remoteworldloader.utils.CompressionUtils.java

private static void addToZip(File directoryToZip, File file, TarArchiveOutputStream zos) throws IOException {

    FileInputStream fis = new FileInputStream(file);

    String filePath = file.getCanonicalPath().substring(directoryToZip.getCanonicalPath().length() + 1,
            file.getCanonicalPath().length());
    Bukkit.getLogger().info(filePath);/*from   ww  w . ja va 2 s.co m*/
    ArchiveEntry zipEntry = zos.createArchiveEntry(file, filePath);
    zos.putArchiveEntry(zipEntry);

    final byte[] buf = new byte[8192];
    int bytesRead;
    while (-1 != (bytesRead = fis.read(buf)))
        zos.write(buf, 0, bytesRead);

    zos.closeArchiveEntry();
    fis.close();
}

From source file:org.apache.camel.dataformat.tarfile.TarFileDataFormat.java

@Override
public void marshal(Exchange exchange, Object graph, OutputStream stream) throws Exception {
    String filename = exchange.getIn().getHeader(FILE_NAME, String.class);
    Long filelength = exchange.getIn().getHeader(FILE_LENGTH, Long.class);
    if (filename != null) {
        filename = new File(filename).getName(); // remove any path elements
    } else {/*from w  w  w  .j a v a 2 s . c  o m*/
        // generate the file name as the camel file component would do
        filename = StringHelper.sanitize(exchange.getIn().getMessageId());
    }

    TarArchiveOutputStream tos = new TarArchiveOutputStream(stream);
    tos.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX);
    tos.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_POSIX);

    InputStream is = exchange.getContext().getTypeConverter().mandatoryConvertTo(InputStream.class, graph);
    if (filelength == null) {
        filelength = new Long(is.available());
    }

    TarArchiveEntry entry = new TarArchiveEntry(filename);
    entry.setSize(filelength);
    tos.putArchiveEntry(entry);

    try {
        IOHelper.copy(is, tos);
    } finally {
        tos.closeArchiveEntry();
        IOHelper.close(is, tos);
    }

    String newFilename = filename + ".tar";
    exchange.getOut().setHeader(FILE_NAME, newFilename);
}

From source file:org.apache.camel.dataformat.tarfile.TarFileDataFormatTest.java

private static byte[] getTaredText(String entryName) throws IOException {
    ByteArrayInputStream bais = new ByteArrayInputStream(TEXT.getBytes("UTF-8"));
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    TarArchiveOutputStream tos = new TarArchiveOutputStream(baos);
    try {//  w ww  . ja  v a  2  s. c  o  m
        TarArchiveEntry entry = new TarArchiveEntry(entryName);
        entry.setSize(bais.available());
        tos.putArchiveEntry(entry);
        IOHelper.copy(bais, tos);
    } finally {
        tos.closeArchiveEntry();
        IOHelper.close(bais, tos);
    }
    return baos.toByteArray();
}