Example usage for org.apache.commons.compress.archivers.tar TarArchiveOutputStream TarArchiveOutputStream

List of usage examples for org.apache.commons.compress.archivers.tar TarArchiveOutputStream TarArchiveOutputStream

Introduction

In this page you can find the example usage for org.apache.commons.compress.archivers.tar TarArchiveOutputStream TarArchiveOutputStream.

Prototype

public TarArchiveOutputStream(OutputStream os) 

Source Link

Document

Constructor for TarInputStream.

Usage

From source file:com.st.maven.debian.DebianPackageMojo.java

private void fillDataTar(Config config, ArFileOutputStream output) throws MojoExecutionException {
    TarArchiveOutputStream tar = null;//from  www  .j  a  v  a2s.  c  o  m
    try {
        tar = new TarArchiveOutputStream(new GZIPOutputStream(new ArWrapper(output)));
        tar.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
        if (Boolean.TRUE.equals(javaServiceWrapper)) {
            byte[] daemonData = processTemplate(freemarkerConfig, config, "daemon.ftl");
            TarArchiveEntry initScript = new TarArchiveEntry("etc/init.d/" + project.getArtifactId());
            initScript.setSize(daemonData.length);
            initScript.setMode(040755);
            tar.putArchiveEntry(initScript);
            tar.write(daemonData);
            tar.closeArchiveEntry();
        }
        String packageBaseDir = "home/" + unixUserId + "/" + project.getArtifactId() + "/";
        if (fileSets != null && !fileSets.isEmpty()) {
            writeDirectory(tar, packageBaseDir);

            Collections.sort(fileSets, MappingPathComparator.INSTANCE);
            for (Fileset curPath : fileSets) {
                curPath.setTarget(packageBaseDir + curPath.getTarget());
                addRecursively(config, tar, curPath);
            }
        }

    } catch (Exception e) {
        throw new MojoExecutionException("unable to create data tar", e);
    } finally {
        IOUtils.closeQuietly(tar);
    }
}

From source file:co.cask.cdap.internal.app.runtime.LocalizationUtilsTest.java

private File createTgzFile(String tgzFileName, File... filesToAdd) throws IOException {
    File target = TEMP_FOLDER.newFile(tgzFileName + ".tgz");
    try (TarArchiveOutputStream tos = new TarArchiveOutputStream(
            new GZIPOutputStream(new BufferedOutputStream(new FileOutputStream(target))))) {
        addFilesToTar(tos, filesToAdd);/*from  w  ww . j a va  2 s .c  o m*/
    }
    return target;
}

From source file:freenet.client.async.ContainerInserter.java

/**
** OutputStream os will be close()d if this method returns successfully.
*//*from   w ww  . j  av a 2s. c o  m*/
private String createTarBucket(OutputStream os) throws IOException {
    if (logMINOR)
        Logger.minor(this, "Create a TAR Bucket");

    TarArchiveOutputStream tarOS = new TarArchiveOutputStream(os);
    try {
        tarOS.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
        TarArchiveEntry ze;

        for (ContainerElement ph : containerItems) {
            if (logMINOR)
                Logger.minor(this, "Putting into tar: " + ph + " data length " + ph.data.size() + " name "
                        + ph.targetInArchive);
            ze = new TarArchiveEntry(ph.targetInArchive);
            ze.setModTime(0);
            long size = ph.data.size();
            ze.setSize(size);
            tarOS.putArchiveEntry(ze);
            BucketTools.copyTo(ph.data, tarOS, size);
            tarOS.closeArchiveEntry();
        }
    } finally {
        tarOS.close();
    }

    return ARCHIVE_TYPE.TAR.mimeTypes[0];
}

From source file:io.anserini.index.IndexUtils.java

public void dumpDocumentVectors(String reqDocidsPath, DocVectorWeight weight) throws IOException {
    String outFileName = weight == null ? reqDocidsPath + ".docvector.tar.gz"
            : reqDocidsPath + ".docvector." + weight + ".tar.gz";
    LOG.info("Start dump document vectors with weight " + weight);

    InputStream in = getReadFileStream(reqDocidsPath);
    BufferedReader bRdr = new BufferedReader(new InputStreamReader(in));
    FileOutputStream fOut = new FileOutputStream(new File(outFileName));
    BufferedOutputStream bOut = new BufferedOutputStream(fOut);
    GzipCompressorOutputStream gzOut = new GzipCompressorOutputStream(bOut);
    TarArchiveOutputStream tOut = new TarArchiveOutputStream(gzOut);

    Map<Term, Integer> docFreqMap = new HashMap<>();

    int numNonEmptyDocs = reader.getDocCount(LuceneDocumentGenerator.FIELD_BODY);

    String docid;/*from   w  ww  . j  av  a  2 s . c  o  m*/
    int counter = 0;
    while ((docid = bRdr.readLine()) != null) {
        counter++;

        // get term frequency
        Terms terms = reader.getTermVector(convertDocidToLuceneDocid(docid),
                LuceneDocumentGenerator.FIELD_BODY);
        if (terms == null) {
            // We do not throw exception here because there are some
            //  collections in which part of documents don't have document vectors
            LOG.warn("Document vector not stored for doc " + docid);
            continue;
        }

        TermsEnum te = terms.iterator();
        if (te == null) {
            LOG.warn("Document vector not stored for doc " + docid);
            continue;
        }

        Term term;
        long freq;

        // iterate every term and write and store in Map
        Map<String, String> docVectors = new HashMap<>();
        while ((te.next()) != null) {
            term = new Term(LuceneDocumentGenerator.FIELD_BODY, te.term());
            freq = te.totalTermFreq();

            switch (weight) {
            case NONE:
                docVectors.put(term.bytes().utf8ToString(), String.valueOf(freq));
                break;

            case TF_IDF:
                int docFreq;
                if (docFreqMap.containsKey(term)) {
                    docFreq = docFreqMap.get(term);
                } else {
                    try {
                        docFreq = reader.docFreq(term);
                    } catch (Exception e) {
                        LOG.error("Cannot find term " + term.toString() + " in indexing file.");
                        continue;
                    }
                    docFreqMap.put(term, docFreq);
                }
                float tfIdf = (float) (freq * Math.log(numNonEmptyDocs * 1.0 / docFreq));
                docVectors.put(term.bytes().utf8ToString(), String.format("%.6f", tfIdf));
                break;
            }
        }

        // Count size and write
        byte[] bytesOut = docVectors.entrySet().stream().map(e -> e.getKey() + " " + e.getValue())
                .collect(joining("\n")).getBytes(StandardCharsets.UTF_8);

        TarArchiveEntry tarEntry = new TarArchiveEntry(new File(docid));
        tarEntry.setSize(bytesOut.length + String.format("<DOCNO>%s</DOCNO>\n", docid).length());
        tOut.putArchiveEntry(tarEntry);
        tOut.write(String.format("<DOCNO>%s</DOCNO>\n", docid).getBytes());
        tOut.write(bytesOut);
        tOut.closeArchiveEntry();

        if (counter % 100000 == 0) {
            LOG.info(counter + " files have been dumped.");
        }
    }
    tOut.close();
    LOG.info("Document Vectors are output to: " + outFileName);
}

From source file:com.dotcms.publisher.myTest.PushPublisher.java

/**
 * Compress (tar.gz) the input files to the output file
 *
 * @param files The files to compress/*from ww  w  .  j a  v a2  s  .c  om*/
 * @param output The resulting output file (should end in .tar.gz)
 * @param bundleRoot
 * @throws IOException
 */
private void compressFiles(Collection<File> files, File output, String bundleRoot) throws IOException {
    Logger.info(this.getClass(), "Compressing " + files.size() + " to " + output.getAbsoluteFile());
    // Create the output stream for the output file
    FileOutputStream fos = new FileOutputStream(output);
    // Wrap the output file stream in streams that will tar and gzip everything
    TarArchiveOutputStream taos = new TarArchiveOutputStream(
            new GZIPOutputStream(new BufferedOutputStream(fos)));

    // TAR originally didn't support long file names, so enable the support for it
    taos.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);

    // Get to putting all the files in the compressed output file
    for (File f : files) {
        addFilesToCompression(taos, f, ".", bundleRoot);
    }

    // Close everything up
    taos.close();
    fos.close();
}

From source file:co.cask.cdap.internal.app.runtime.LocalizationUtilsTest.java

private File createTarGzFile(String tarGzFileName, File... filesToAdd) throws IOException {
    File target = TEMP_FOLDER.newFile(tarGzFileName + ".tar.gz");
    try (TarArchiveOutputStream tos = new TarArchiveOutputStream(
            new GZIPOutputStream(new BufferedOutputStream(new FileOutputStream(target))))) {
        addFilesToTar(tos, filesToAdd);// www  .jav a  2  s  . c  om
    }
    return target;
}

From source file:io.fabric8.docker.client.impl.BuildImage.java

@Override
public OutputHandle fromFolder(String path) {
    try {/*from   w w  w . j a  va  2  s .  c om*/
        final Path root = Paths.get(path);
        final Path dockerIgnore = root.resolve(DOCKER_IGNORE);
        final List<String> ignorePatterns = new ArrayList<>();
        if (dockerIgnore.toFile().exists()) {
            for (String p : Files.readAllLines(dockerIgnore, UTF_8)) {
                ignorePatterns.add(path.endsWith(File.separator) ? path + p : path + File.separator + p);
            }
        }

        final DockerIgnorePathMatcher dockerIgnorePathMatcher = new DockerIgnorePathMatcher(ignorePatterns);

        File tempFile = Files.createTempFile(Paths.get(DEFAULT_TEMP_DIR), DOCKER_PREFIX, BZIP2_SUFFIX).toFile();

        try (FileOutputStream fout = new FileOutputStream(tempFile);
                BufferedOutputStream bout = new BufferedOutputStream(fout);
                BZip2CompressorOutputStream bzout = new BZip2CompressorOutputStream(bout);
                final TarArchiveOutputStream tout = new TarArchiveOutputStream(bzout)) {
            Files.walkFileTree(root, new SimpleFileVisitor<Path>() {

                @Override
                public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs)
                        throws IOException {
                    if (dockerIgnorePathMatcher.matches(dir)) {
                        return FileVisitResult.SKIP_SUBTREE;
                    }
                    return FileVisitResult.CONTINUE;
                }

                @Override
                public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
                    if (dockerIgnorePathMatcher.matches(file)) {
                        return FileVisitResult.SKIP_SUBTREE;
                    }

                    final Path relativePath = root.relativize(file);
                    final TarArchiveEntry entry = new TarArchiveEntry(file.toFile());
                    entry.setName(relativePath.toString());
                    entry.setMode(TarArchiveEntry.DEFAULT_FILE_MODE);
                    entry.setSize(attrs.size());
                    tout.putArchiveEntry(entry);
                    Files.copy(file, tout);
                    tout.closeArchiveEntry();
                    return FileVisitResult.CONTINUE;
                }
            });
            fout.flush();
        }
        return fromTar(tempFile.getAbsolutePath());

    } catch (IOException e) {
        throw DockerClientException.launderThrowable(e);
    }
}

From source file:com.francetelecom.clara.cloud.mvn.consumer.maven.MavenDeployer.java

private File populateTgzArchive(File archive, List<FileRef> fileset) throws IOException {
    archive.getParentFile().mkdirs();/*  w w w .  jav a2s  . co m*/
    CompressorOutputStream zip = new GzipCompressorOutputStream(new FileOutputStream(archive));
    TarArchiveOutputStream tar = new TarArchiveOutputStream(zip);
    for (FileRef fileRef : fileset) {
        TarArchiveEntry entry = new TarArchiveEntry(new File(fileRef.getRelativeLocation()));
        byte[] bytes = fileRef.getContent().getBytes();
        entry.setSize(bytes.length);
        tar.putArchiveEntry(entry);
        tar.write(bytes);
        tar.closeArchiveEntry();
    }
    tar.close();
    return archive;
}

From source file:com.facebook.buck.jvm.java.DefaultJavaLibraryIntegrationTest.java

/**
 * writeTarZst writes a .tar.zst file to 'file'.
 *
 * <p>For each key:value in archiveContents, a file named 'key' with contents 'value' will be
 * created in the archive. File names ending with "/" are considered directories.
 *///  w  ww.  jav  a 2  s. c  o  m
private void writeTarZst(Path file, Map<String, byte[]> archiveContents) throws IOException {
    try (OutputStream o = new BufferedOutputStream(Files.newOutputStream(file));
            OutputStream z = new ZstdCompressorOutputStream(o);
            TarArchiveOutputStream archive = new TarArchiveOutputStream(z)) {
        archive.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX);
        for (Entry<String, byte[]> mapEntry : archiveContents.entrySet()) {
            String fileName = mapEntry.getKey();
            byte[] fileContents = mapEntry.getValue();
            boolean isRegularFile = !fileName.endsWith("/");

            TarArchiveEntry e = new TarArchiveEntry(fileName);
            if (isRegularFile) {
                e.setSize(fileContents.length);
                archive.putArchiveEntry(e);
                archive.write(fileContents);
            } else {
                archive.putArchiveEntry(e);
            }
            archive.closeArchiveEntry();
        }
        archive.finish();
    }
}

From source file:com.lizardtech.expresszip.model.Job.java

private void writeTarFile(File baseDir, File archive, List<String> files) throws IOException {
    FileOutputStream fOut = null;
    BufferedOutputStream bOut = null;
    GzipCompressorOutputStream gzOut = null;
    TarArchiveOutputStream tOut = null;/*  w w  w .  jav a 2 s  .  c  o m*/
    try {
        fOut = new FileOutputStream(archive);
        bOut = new BufferedOutputStream(fOut);
        gzOut = new GzipCompressorOutputStream(bOut);
        tOut = new TarArchiveOutputStream(gzOut);

        for (String f : files) {
            File myfile = new File(baseDir, f);
            String entryName = myfile.getName();
            logger.info(String.format("Writing %s to TAR archive %s", f, archive));

            TarArchiveEntry tarEntry = new TarArchiveEntry(myfile, entryName);
            tOut.putArchiveEntry(tarEntry);

            FileInputStream fis = new FileInputStream(myfile);
            IOUtils.copy(fis, tOut);
            fis.close();
            tOut.closeArchiveEntry();
        }
    } finally {
        tOut.finish();
        tOut.close();
        gzOut.close();
        bOut.close();
        fOut.close();
    }
}