Example usage for org.apache.commons.compress.archivers.tar TarArchiveOutputStream write

List of usage examples for org.apache.commons.compress.archivers.tar TarArchiveOutputStream write

Introduction

In this page you can find the example usage for org.apache.commons.compress.archivers.tar TarArchiveOutputStream write.

Prototype

public void write(int b) throws IOException 

Source Link

Document

Writes a byte to the current archive entry.

Usage

From source file:com.gitblit.servlet.PtServlet.java

@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    try {//from ww w .ja  v a 2 s.c o  m
        response.setContentType("application/octet-stream");
        response.setDateHeader("Last-Modified", lastModified);
        response.setHeader("Cache-Control", "none");
        response.setHeader("Pragma", "no-cache");
        response.setDateHeader("Expires", 0);

        boolean windows = false;
        try {
            String useragent = request.getHeader("user-agent").toString();
            windows = useragent.toLowerCase().contains("windows");
        } catch (Exception e) {
        }

        byte[] pyBytes;
        File file = runtimeManager.getFileOrFolder("tickets.pt", "${baseFolder}/pt.py");
        if (file.exists()) {
            // custom script
            pyBytes = readAll(new FileInputStream(file));
        } else {
            // default script
            pyBytes = readAll(getClass().getResourceAsStream("/pt.py"));
        }

        if (windows) {
            // windows: download zip file with pt.py and pt.cmd
            response.setHeader("Content-Disposition", "attachment; filename=\"pt.zip\"");

            OutputStream os = response.getOutputStream();
            ZipArchiveOutputStream zos = new ZipArchiveOutputStream(os);

            // add the Python script
            ZipArchiveEntry pyEntry = new ZipArchiveEntry("pt.py");
            pyEntry.setSize(pyBytes.length);
            pyEntry.setUnixMode(FileMode.EXECUTABLE_FILE.getBits());
            pyEntry.setTime(lastModified);
            zos.putArchiveEntry(pyEntry);
            zos.write(pyBytes);
            zos.closeArchiveEntry();

            // add a Python launch cmd file
            byte[] cmdBytes = readAll(getClass().getResourceAsStream("/pt.cmd"));
            ZipArchiveEntry cmdEntry = new ZipArchiveEntry("pt.cmd");
            cmdEntry.setSize(cmdBytes.length);
            cmdEntry.setUnixMode(FileMode.REGULAR_FILE.getBits());
            cmdEntry.setTime(lastModified);
            zos.putArchiveEntry(cmdEntry);
            zos.write(cmdBytes);
            zos.closeArchiveEntry();

            // add a brief readme
            byte[] txtBytes = readAll(getClass().getResourceAsStream("/pt.txt"));
            ZipArchiveEntry txtEntry = new ZipArchiveEntry("readme.txt");
            txtEntry.setSize(txtBytes.length);
            txtEntry.setUnixMode(FileMode.REGULAR_FILE.getBits());
            txtEntry.setTime(lastModified);
            zos.putArchiveEntry(txtEntry);
            zos.write(txtBytes);
            zos.closeArchiveEntry();

            // cleanup
            zos.finish();
            zos.close();
            os.flush();
        } else {
            // unix: download a tar.gz file with pt.py set with execute permissions
            response.setHeader("Content-Disposition", "attachment; filename=\"pt.tar.gz\"");

            OutputStream os = response.getOutputStream();
            CompressorOutputStream cos = new CompressorStreamFactory()
                    .createCompressorOutputStream(CompressorStreamFactory.GZIP, os);
            TarArchiveOutputStream tos = new TarArchiveOutputStream(cos);
            tos.setAddPaxHeadersForNonAsciiNames(true);
            tos.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX);

            // add the Python script
            TarArchiveEntry pyEntry = new TarArchiveEntry("pt");
            pyEntry.setMode(FileMode.EXECUTABLE_FILE.getBits());
            pyEntry.setModTime(lastModified);
            pyEntry.setSize(pyBytes.length);
            tos.putArchiveEntry(pyEntry);
            tos.write(pyBytes);
            tos.closeArchiveEntry();

            // add a brief readme
            byte[] txtBytes = readAll(getClass().getResourceAsStream("/pt.txt"));
            TarArchiveEntry txtEntry = new TarArchiveEntry("README");
            txtEntry.setMode(FileMode.REGULAR_FILE.getBits());
            txtEntry.setModTime(lastModified);
            txtEntry.setSize(txtBytes.length);
            tos.putArchiveEntry(txtEntry);
            tos.write(txtBytes);
            tos.closeArchiveEntry();

            // cleanup
            tos.finish();
            tos.close();
            cos.close();
            os.flush();
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:edu.wisc.doit.tcrypt.BouncyCastleFileEncrypter.java

/**
 * Encrypts and encodes the string and writes it to the tar output stream with the specified file name
 *///from   www .ja  v  a 2s .c  o m
protected void encryptAndWrite(TarArchiveOutputStream tarArchiveOutputStream, String contents, String fileName)
        throws InvalidCipherTextException, IOException {
    final byte[] contentBytes = contents.getBytes(CHARSET);

    //Encrypt contents
    final AsymmetricBlockCipher encryptCipher = this.getEncryptCipher();
    final byte[] encryptedContentBytes = encryptCipher.processBlock(contentBytes, 0, contentBytes.length);
    final byte[] encryptedContentBase64Bytes = Base64.encodeBase64(encryptedContentBytes);

    //Write encrypted contents to tar output stream
    final TarArchiveEntry contentEntry = new TarArchiveEntry(fileName);
    contentEntry.setSize(encryptedContentBase64Bytes.length);
    tarArchiveOutputStream.putArchiveEntry(contentEntry);
    tarArchiveOutputStream.write(encryptedContentBase64Bytes);
    tarArchiveOutputStream.closeArchiveEntry();
}

From source file:com.st.maven.debian.DebianPackageMojo.java

private void fillDataTar(Config config, ArFileOutputStream output) throws MojoExecutionException {
    TarArchiveOutputStream tar = null;
    try {/*from  ww  w  .j a va2 s  .c o  m*/
        tar = new TarArchiveOutputStream(new GZIPOutputStream(new ArWrapper(output)));
        tar.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
        if (Boolean.TRUE.equals(javaServiceWrapper)) {
            byte[] daemonData = processTemplate(freemarkerConfig, config, "daemon.ftl");
            TarArchiveEntry initScript = new TarArchiveEntry("etc/init.d/" + project.getArtifactId());
            initScript.setSize(daemonData.length);
            initScript.setMode(040755);
            tar.putArchiveEntry(initScript);
            tar.write(daemonData);
            tar.closeArchiveEntry();
        }
        String packageBaseDir = "home/" + unixUserId + "/" + project.getArtifactId() + "/";
        if (fileSets != null && !fileSets.isEmpty()) {
            writeDirectory(tar, packageBaseDir);

            Collections.sort(fileSets, MappingPathComparator.INSTANCE);
            for (Fileset curPath : fileSets) {
                curPath.setTarget(packageBaseDir + curPath.getTarget());
                addRecursively(config, tar, curPath);
            }
        }

    } catch (Exception e) {
        throw new MojoExecutionException("unable to create data tar", e);
    } finally {
        IOUtils.closeQuietly(tar);
    }
}

From source file:com.st.maven.debian.DebianPackageMojo.java

private void fillControlTar(Config config, ArFileOutputStream output) throws MojoExecutionException {
    TarArchiveOutputStream tar = null;
    try {/*ww  w  . ja va 2s  .com*/
        tar = new TarArchiveOutputStream(new GZIPOutputStream(new ArWrapper(output)));
        tar.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
        TarArchiveEntry rootDir = new TarArchiveEntry("./");
        tar.putArchiveEntry(rootDir);
        tar.closeArchiveEntry();

        byte[] controlData = processTemplate(freemarkerConfig, config, "control.ftl");
        TarArchiveEntry controlEntry = new TarArchiveEntry("./control");
        controlEntry.setSize(controlData.length);
        tar.putArchiveEntry(controlEntry);
        tar.write(controlData);
        tar.closeArchiveEntry();

        byte[] preinstBaseData = processTemplate("preinst", freemarkerConfig, config,
                combine("preinst.ftl", BASE_DIR + File.separator + "preinst", false));
        long size = preinstBaseData.length;
        TarArchiveEntry preinstEntry = new TarArchiveEntry("./preinst");
        preinstEntry.setSize(size);
        preinstEntry.setMode(0755);
        tar.putArchiveEntry(preinstEntry);
        tar.write(preinstBaseData);
        tar.closeArchiveEntry();

        byte[] postinstBaseData = processTemplate("postinst", freemarkerConfig, config,
                combine("postinst.ftl", BASE_DIR + File.separator + "postinst", true));
        size = postinstBaseData.length;
        TarArchiveEntry postinstEntry = new TarArchiveEntry("./postinst");
        postinstEntry.setSize(size);
        postinstEntry.setMode(0755);
        tar.putArchiveEntry(postinstEntry);
        tar.write(postinstBaseData);
        tar.closeArchiveEntry();

        byte[] prermBaseData = processTemplate("prerm", freemarkerConfig, config,
                combine("prerm.ftl", BASE_DIR + File.separator + "prerm", false));
        size = prermBaseData.length;
        TarArchiveEntry prermEntry = new TarArchiveEntry("./prerm");
        prermEntry.setSize(size);
        prermEntry.setMode(0755);
        tar.putArchiveEntry(prermEntry);
        tar.write(prermBaseData);
        tar.closeArchiveEntry();

        byte[] postrmBaseData = processTemplate("postrm", freemarkerConfig, config,
                combine("postrm.ftl", BASE_DIR + File.separator + "postrm", false));
        size = postrmBaseData.length;
        TarArchiveEntry postrmEntry = new TarArchiveEntry("./postrm");
        postrmEntry.setSize(size);
        postrmEntry.setMode(0755);
        tar.putArchiveEntry(postrmEntry);
        tar.write(postrmBaseData);
        tar.closeArchiveEntry();

    } catch (Exception e) {
        throw new MojoExecutionException("unable to create control tar", e);
    } finally {
        if (tar != null) {
            try {
                tar.close();
            } catch (IOException e) {
                getLog().error("unable to finish tar", e);
            }
        }
    }
}

From source file:io.anserini.index.IndexUtils.java

public void dumpRawDocuments(String reqDocidsPath, boolean prependDocid)
        throws IOException, NotStoredException {
    LOG.info("Start dump raw documents" + (prependDocid ? " with Docid prepended" : "."));

    InputStream in = getReadFileStream(reqDocidsPath);
    BufferedReader bRdr = new BufferedReader(new InputStreamReader(in));
    FileOutputStream fOut = new FileOutputStream(new File(reqDocidsPath + ".output.tar.gz"));
    BufferedOutputStream bOut = new BufferedOutputStream(fOut);
    GzipCompressorOutputStream gzOut = new GzipCompressorOutputStream(bOut);
    TarArchiveOutputStream tOut = new TarArchiveOutputStream(gzOut);

    String docid;/*from w  w  w  . j a  v a 2s.  com*/
    int counter = 0;
    while ((docid = bRdr.readLine()) != null) {
        counter += 1;
        Document d = reader.document(convertDocidToLuceneDocid(docid));
        IndexableField doc = d.getField(LuceneDocumentGenerator.FIELD_RAW);
        if (doc == null) {
            throw new NotStoredException("Raw documents not stored!");
        }
        TarArchiveEntry tarEntry = new TarArchiveEntry(new File(docid));

        byte[] bytesOut = doc.stringValue().getBytes(StandardCharsets.UTF_8);
        tarEntry.setSize(
                bytesOut.length + (prependDocid ? String.format("<DOCNO>%s</DOCNO>\n", docid).length() : 0));
        tOut.putArchiveEntry(tarEntry);
        if (prependDocid) {
            tOut.write(String.format("<DOCNO>%s</DOCNO>\n", docid).getBytes());
        }
        tOut.write(bytesOut);
        tOut.closeArchiveEntry();

        if (counter % 100000 == 0) {
            LOG.info(counter + " files have been dumped.");
        }
    }
    tOut.close();
    LOG.info(String.format("Raw documents are output to: %s", reqDocidsPath + ".output.tar.gz"));
}

From source file:com.st.maven.debian.DebianPackageMojo.java

private void addRecursively(Config config, TarArchiveOutputStream tar, Fileset fileset)
        throws MojoExecutionException {
    File sourceFile = new File(fileset.getSource());
    String targetFilename = fileset.getTarget();
    // skip well-known ignore directories
    if (ignore.contains(sourceFile.getName()) || sourceFile.getName().endsWith(".rrd")
            || sourceFile.getName().endsWith(".log")) {
        return;// ww w.  j  a va2s. c o  m
    }
    FileInputStream fis = null;
    try {
        if (!sourceFile.isDirectory()) {
            TarArchiveEntry curEntry = new TarArchiveEntry(targetFilename);
            if (fileset.isFilter()) {
                byte[] bytes = processTemplate(freemarkerConfig, config, fileset.getSource());
                curEntry.setSize(bytes.length);
                tar.putArchiveEntry(curEntry);
                tar.write(bytes);
            } else {
                curEntry.setSize(sourceFile.length());
                tar.putArchiveEntry(curEntry);
                fis = new FileInputStream(sourceFile);
                IOUtils.copy(fis, tar);
            }
            tar.closeArchiveEntry();
        } else if (sourceFile.isDirectory()) {
            targetFilename += "/";
            if (!dirsAdded.contains(targetFilename)) {
                dirsAdded.add(targetFilename);
                writeDirectory(tar, targetFilename);
            }
        }
    } catch (Exception e) {
        throw new MojoExecutionException("unable to write", e);
    } finally {
        IOUtils.closeQuietly(fis);
    }

    if (sourceFile.isDirectory()) {
        File[] subFiles = sourceFile.listFiles();
        for (File curSubFile : subFiles) {
            Fileset curSubFileset = new Fileset(fileset.getSource() + "/" + curSubFile.getName(),
                    fileset.getTarget() + "/" + curSubFile.getName(), fileset.isFilter());
            addRecursively(config, tar, curSubFileset);
        }
    }
}

From source file:io.anserini.index.IndexUtils.java

public void dumpDocumentVectors(String reqDocidsPath, DocVectorWeight weight) throws IOException {
    String outFileName = weight == null ? reqDocidsPath + ".docvector.tar.gz"
            : reqDocidsPath + ".docvector." + weight + ".tar.gz";
    LOG.info("Start dump document vectors with weight " + weight);

    InputStream in = getReadFileStream(reqDocidsPath);
    BufferedReader bRdr = new BufferedReader(new InputStreamReader(in));
    FileOutputStream fOut = new FileOutputStream(new File(outFileName));
    BufferedOutputStream bOut = new BufferedOutputStream(fOut);
    GzipCompressorOutputStream gzOut = new GzipCompressorOutputStream(bOut);
    TarArchiveOutputStream tOut = new TarArchiveOutputStream(gzOut);

    Map<Term, Integer> docFreqMap = new HashMap<>();

    int numNonEmptyDocs = reader.getDocCount(LuceneDocumentGenerator.FIELD_BODY);

    String docid;/*from  w  ww .  j ava2s . c  o  m*/
    int counter = 0;
    while ((docid = bRdr.readLine()) != null) {
        counter++;

        // get term frequency
        Terms terms = reader.getTermVector(convertDocidToLuceneDocid(docid),
                LuceneDocumentGenerator.FIELD_BODY);
        if (terms == null) {
            // We do not throw exception here because there are some
            //  collections in which part of documents don't have document vectors
            LOG.warn("Document vector not stored for doc " + docid);
            continue;
        }

        TermsEnum te = terms.iterator();
        if (te == null) {
            LOG.warn("Document vector not stored for doc " + docid);
            continue;
        }

        Term term;
        long freq;

        // iterate every term and write and store in Map
        Map<String, String> docVectors = new HashMap<>();
        while ((te.next()) != null) {
            term = new Term(LuceneDocumentGenerator.FIELD_BODY, te.term());
            freq = te.totalTermFreq();

            switch (weight) {
            case NONE:
                docVectors.put(term.bytes().utf8ToString(), String.valueOf(freq));
                break;

            case TF_IDF:
                int docFreq;
                if (docFreqMap.containsKey(term)) {
                    docFreq = docFreqMap.get(term);
                } else {
                    try {
                        docFreq = reader.docFreq(term);
                    } catch (Exception e) {
                        LOG.error("Cannot find term " + term.toString() + " in indexing file.");
                        continue;
                    }
                    docFreqMap.put(term, docFreq);
                }
                float tfIdf = (float) (freq * Math.log(numNonEmptyDocs * 1.0 / docFreq));
                docVectors.put(term.bytes().utf8ToString(), String.format("%.6f", tfIdf));
                break;
            }
        }

        // Count size and write
        byte[] bytesOut = docVectors.entrySet().stream().map(e -> e.getKey() + " " + e.getValue())
                .collect(joining("\n")).getBytes(StandardCharsets.UTF_8);

        TarArchiveEntry tarEntry = new TarArchiveEntry(new File(docid));
        tarEntry.setSize(bytesOut.length + String.format("<DOCNO>%s</DOCNO>\n", docid).length());
        tOut.putArchiveEntry(tarEntry);
        tOut.write(String.format("<DOCNO>%s</DOCNO>\n", docid).getBytes());
        tOut.write(bytesOut);
        tOut.closeArchiveEntry();

        if (counter % 100000 == 0) {
            LOG.info(counter + " files have been dumped.");
        }
    }
    tOut.close();
    LOG.info("Document Vectors are output to: " + outFileName);
}

From source file:org.apache.camel.processor.aggregate.TarAggregationStrategy.java

@Override
public void onCompletion(Exchange exchange) {
    List<Exchange> list = exchange.getProperty(Exchange.GROUPED_EXCHANGE, List.class);
    try {/*from w  ww  . j a  v  a  2  s  .  com*/
        ByteArrayOutputStream bout = new ByteArrayOutputStream();
        TarArchiveOutputStream tout = new TarArchiveOutputStream(bout);
        for (Exchange item : list) {
            String name = item.getProperty(TAR_ENTRY_NAME,
                    item.getProperty(Exchange.FILE_NAME, item.getExchangeId(), String.class), String.class);
            byte[] body = item.getIn().getBody(byte[].class);
            TarArchiveEntry entry = new TarArchiveEntry(name);
            entry.setSize(body.length);
            tout.putArchiveEntry(entry);
            tout.write(body);
            tout.closeArchiveEntry();
        }
        tout.close();
        exchange.getIn().setBody(bout.toByteArray());
        exchange.removeProperty(Exchange.GROUPED_EXCHANGE);
    } catch (Exception e) {
        throw new RuntimeException("Unable to tar exchanges!", e);
    }
}

From source file:org.apache.hadoop.yarn.util.TestFSDownload.java

static LocalResource createTarFile(FileContext files, Path p, int len, Random r, LocalResourceVisibility vis)
        throws IOException, URISyntaxException {
    byte[] bytes = new byte[len];
    r.nextBytes(bytes);/*  ww  w.ja  v  a2 s  .  c  om*/

    File archiveFile = new File(p.toUri().getPath() + ".tar");
    archiveFile.createNewFile();
    TarArchiveOutputStream out = new TarArchiveOutputStream(new FileOutputStream(archiveFile));
    TarArchiveEntry entry = new TarArchiveEntry(p.getName());
    entry.setSize(bytes.length);
    out.putArchiveEntry(entry);
    out.write(bytes);
    out.closeArchiveEntry();
    out.close();

    LocalResource ret = recordFactory.newRecordInstance(LocalResource.class);
    ret.setResource(URL.fromPath(new Path(p.toString() + ".tar")));
    ret.setSize(len);
    ret.setType(LocalResourceType.ARCHIVE);
    ret.setVisibility(vis);
    ret.setTimestamp(files.getFileStatus(new Path(p.toString() + ".tar")).getModificationTime());
    return ret;
}

From source file:org.apache.hadoop.yarn.util.TestFSDownload.java

static LocalResource createTgzFile(FileContext files, Path p, int len, Random r, LocalResourceVisibility vis)
        throws IOException, URISyntaxException {
    byte[] bytes = new byte[len];
    r.nextBytes(bytes);/*www . ja v a  2  s . c  o m*/

    File gzipFile = new File(p.toUri().getPath() + ".tar.gz");
    gzipFile.createNewFile();
    TarArchiveOutputStream out = new TarArchiveOutputStream(
            new GZIPOutputStream(new FileOutputStream(gzipFile)));
    TarArchiveEntry entry = new TarArchiveEntry(p.getName());
    entry.setSize(bytes.length);
    out.putArchiveEntry(entry);
    out.write(bytes);
    out.closeArchiveEntry();
    out.close();

    LocalResource ret = recordFactory.newRecordInstance(LocalResource.class);
    ret.setResource(URL.fromPath(new Path(p.toString() + ".tar.gz")));
    ret.setSize(len);
    ret.setType(LocalResourceType.ARCHIVE);
    ret.setVisibility(vis);
    ret.setTimestamp(files.getFileStatus(new Path(p.toString() + ".tar.gz")).getModificationTime());
    return ret;
}