Example usage for org.apache.commons.compress.archivers.tar TarArchiveOutputStream TarArchiveOutputStream

List of usage examples for org.apache.commons.compress.archivers.tar TarArchiveOutputStream TarArchiveOutputStream

Introduction

In this page you can find the example usage for org.apache.commons.compress.archivers.tar TarArchiveOutputStream TarArchiveOutputStream.

Prototype

public TarArchiveOutputStream(OutputStream os) 

Source Link

Document

Constructor for TarInputStream.

Usage

From source file:org.apache.camel.processor.aggregate.TarAggregationStrategy.java

@Override
public void onCompletion(Exchange exchange) {
    List<Exchange> list = exchange.getProperty(Exchange.GROUPED_EXCHANGE, List.class);
    try {//from  w w  w  .j a v a2 s. c o  m
        ByteArrayOutputStream bout = new ByteArrayOutputStream();
        TarArchiveOutputStream tout = new TarArchiveOutputStream(bout);
        for (Exchange item : list) {
            String name = item.getProperty(TAR_ENTRY_NAME,
                    item.getProperty(Exchange.FILE_NAME, item.getExchangeId(), String.class), String.class);
            byte[] body = item.getIn().getBody(byte[].class);
            TarArchiveEntry entry = new TarArchiveEntry(name);
            entry.setSize(body.length);
            tout.putArchiveEntry(entry);
            tout.write(body);
            tout.closeArchiveEntry();
        }
        tout.close();
        exchange.getIn().setBody(bout.toByteArray());
        exchange.removeProperty(Exchange.GROUPED_EXCHANGE);
    } catch (Exception e) {
        throw new RuntimeException("Unable to tar exchanges!", e);
    }
}

From source file:org.apache.camel.processor.aggregate.tarfile.TarAggregationStrategy.java

private static void addFileToTar(File source, File file, String fileName) throws IOException, ArchiveException {
    File tmpTar = File.createTempFile(source.getName(), null);
    tmpTar.delete();//  www  .  j  av a 2s . co m
    if (!source.renameTo(tmpTar)) {
        throw new IOException("Could not make temp file (" + source.getName() + ")");
    }

    TarArchiveInputStream tin = (TarArchiveInputStream) new ArchiveStreamFactory()
            .createArchiveInputStream(ArchiveStreamFactory.TAR, new FileInputStream(tmpTar));
    TarArchiveOutputStream tos = new TarArchiveOutputStream(new FileOutputStream(source));
    tos.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX);
    tos.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_POSIX);

    InputStream in = new FileInputStream(file);

    // copy the existing entries    
    ArchiveEntry nextEntry;
    while ((nextEntry = tin.getNextEntry()) != null) {
        tos.putArchiveEntry(nextEntry);
        IOUtils.copy(tin, tos);
        tos.closeArchiveEntry();
    }

    // Add the new entry
    TarArchiveEntry entry = new TarArchiveEntry(fileName == null ? file.getName() : fileName);
    entry.setSize(file.length());
    tos.putArchiveEntry(entry);
    IOUtils.copy(in, tos);
    tos.closeArchiveEntry();

    IOHelper.close(in);
    IOHelper.close(tin);
    IOHelper.close(tos);
}

From source file:org.apache.camel.processor.aggregate.tarfile.TarAggregationStrategy.java

private static void addEntryToTar(File source, String entryName, byte[] buffer, int length)
        throws IOException, ArchiveException {
    File tmpTar = File.createTempFile(source.getName(), null);
    tmpTar.delete();/* w ww .ja  va  2 s . c om*/
    if (!source.renameTo(tmpTar)) {
        throw new IOException("Cannot create temp file: " + source.getName());
    }
    TarArchiveInputStream tin = (TarArchiveInputStream) new ArchiveStreamFactory()
            .createArchiveInputStream(ArchiveStreamFactory.TAR, new FileInputStream(tmpTar));
    TarArchiveOutputStream tos = new TarArchiveOutputStream(new FileOutputStream(source));
    tos.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX);
    tos.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_POSIX);

    // copy the existing entries    
    ArchiveEntry nextEntry;
    while ((nextEntry = tin.getNextEntry()) != null) {
        tos.putArchiveEntry(nextEntry);
        IOUtils.copy(tin, tos);
        tos.closeArchiveEntry();
    }

    // Create new entry
    TarArchiveEntry entry = new TarArchiveEntry(entryName);
    entry.setSize(length);
    tos.putArchiveEntry(entry);
    tos.write(buffer, 0, length);
    tos.closeArchiveEntry();

    IOHelper.close(tin);
    IOHelper.close(tos);
}

From source file:org.apache.hadoop.hive.common.CompressionUtils.java

/**
 * Archive all the files in the inputFiles into outputFile
 *
 * @param inputFiles/*from ww  w  .  j  a  va  2s . c  om*/
 * @param outputFile
 * @throws IOException
 */
public static void tar(String parentDir, String[] inputFiles, String outputFile) throws IOException {

    FileOutputStream out = null;
    try {
        out = new FileOutputStream(new File(parentDir, outputFile));
        TarArchiveOutputStream tOut = new TarArchiveOutputStream(
                new GzipCompressorOutputStream(new BufferedOutputStream(out)));

        for (int i = 0; i < inputFiles.length; i++) {
            File f = new File(parentDir, inputFiles[i]);
            TarArchiveEntry tarEntry = new TarArchiveEntry(f, f.getName());
            tOut.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
            tOut.putArchiveEntry(tarEntry);
            FileInputStream input = new FileInputStream(f);
            try {
                IOUtils.copy(input, tOut); // copy with 8K buffer, not close
            } finally {
                input.close();
            }
            tOut.closeArchiveEntry();
        }
        tOut.close(); // finishes inside
    } finally {
        // TarArchiveOutputStream seemed not to close files properly in error situation
        org.apache.hadoop.io.IOUtils.closeStream(out);
    }
}

From source file:org.apache.hadoop.yarn.util.TestFSDownload.java

static LocalResource createTarFile(FileContext files, Path p, int len, Random r, LocalResourceVisibility vis)
        throws IOException, URISyntaxException {
    byte[] bytes = new byte[len];
    r.nextBytes(bytes);/*from w ww .  ja va  2 s .  c  o  m*/

    File archiveFile = new File(p.toUri().getPath() + ".tar");
    archiveFile.createNewFile();
    TarArchiveOutputStream out = new TarArchiveOutputStream(new FileOutputStream(archiveFile));
    TarArchiveEntry entry = new TarArchiveEntry(p.getName());
    entry.setSize(bytes.length);
    out.putArchiveEntry(entry);
    out.write(bytes);
    out.closeArchiveEntry();
    out.close();

    LocalResource ret = recordFactory.newRecordInstance(LocalResource.class);
    ret.setResource(URL.fromPath(new Path(p.toString() + ".tar")));
    ret.setSize(len);
    ret.setType(LocalResourceType.ARCHIVE);
    ret.setVisibility(vis);
    ret.setTimestamp(files.getFileStatus(new Path(p.toString() + ".tar")).getModificationTime());
    return ret;
}

From source file:org.apache.hadoop.yarn.util.TestFSDownload.java

static LocalResource createTgzFile(FileContext files, Path p, int len, Random r, LocalResourceVisibility vis)
        throws IOException, URISyntaxException {
    byte[] bytes = new byte[len];
    r.nextBytes(bytes);/* w  w w  . j a  v  a2s  .  com*/

    File gzipFile = new File(p.toUri().getPath() + ".tar.gz");
    gzipFile.createNewFile();
    TarArchiveOutputStream out = new TarArchiveOutputStream(
            new GZIPOutputStream(new FileOutputStream(gzipFile)));
    TarArchiveEntry entry = new TarArchiveEntry(p.getName());
    entry.setSize(bytes.length);
    out.putArchiveEntry(entry);
    out.write(bytes);
    out.closeArchiveEntry();
    out.close();

    LocalResource ret = recordFactory.newRecordInstance(LocalResource.class);
    ret.setResource(URL.fromPath(new Path(p.toString() + ".tar.gz")));
    ret.setSize(len);
    ret.setType(LocalResourceType.ARCHIVE);
    ret.setVisibility(vis);
    ret.setTimestamp(files.getFileStatus(new Path(p.toString() + ".tar.gz")).getModificationTime());
    return ret;
}

From source file:org.apache.karaf.tooling.ArchiveMojo.java

public File archive(File source, File dest, Artifact artifact) throws //ArchiverException,
IOException {// w  w  w.ja v a2  s  . c  o  m
    String serverName = null;
    if (targetFile != null) {
        serverName = targetFile.getName();
    } else {
        serverName = artifact.getArtifactId() + "-" + artifact.getVersion();
    }
    dest = new File(dest, serverName + "." + artifact.getType());

    String prefix = "";
    if (usePathPrefix) {
        prefix = pathPrefix.trim();
        if (prefix.length() > 0 && !prefix.endsWith("/")) {
            prefix += "/";
        }
    }

    if ("tar.gz".equals(artifact.getType())) {
        try (OutputStream fOut = Files.newOutputStream(dest.toPath());
                OutputStream bOut = new BufferedOutputStream(fOut);
                OutputStream gzOut = new GzipCompressorOutputStream(bOut);
                TarArchiveOutputStream tOut = new TarArchiveOutputStream(gzOut);
                DirectoryStream<Path> children = Files.newDirectoryStream(source.toPath())

        ) {
            tOut.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX);
            tOut.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_POSIX);
            for (Path child : children) {
                addFileToTarGz(tOut, child, prefix);
            }
        }
    } else if ("zip".equals(artifact.getType())) {
        try (OutputStream fOut = Files.newOutputStream(dest.toPath());
                OutputStream bOut = new BufferedOutputStream(fOut);
                ZipArchiveOutputStream tOut = new ZipArchiveOutputStream(bOut);
                DirectoryStream<Path> children = Files.newDirectoryStream(source.toPath())

        ) {
            for (Path child : children) {
                addFileToZip(tOut, child, prefix);
            }
        }
    } else {
        throw new IllegalArgumentException("Unknown target type: " + artifact.getType());
    }

    return dest;
}

From source file:org.apache.nifi.cluster.flow.impl.DataFlowDaoImpl.java

private void writeDataFlow(final File file, final ClusterDataFlow clusterDataFlow,
        final ClusterMetadata clusterMetadata) throws IOException, JAXBException {

    try (final OutputStream fos = new FileOutputStream(file);
            final TarArchiveOutputStream tarOut = new TarArchiveOutputStream(new BufferedOutputStream(fos))) {

        final DataFlow dataFlow = clusterDataFlow.getDataFlow();
        if (dataFlow == null) {
            writeTarEntry(tarOut, FLOW_XML_FILENAME, getEmptyFlowBytes());
            writeTarEntry(tarOut, TEMPLATES_FILENAME, new byte[0]);
            writeTarEntry(tarOut, SNIPPETS_FILENAME, new byte[0]);
        } else {/*  w w w  . j  a  v  a 2 s. c o m*/
            writeTarEntry(tarOut, FLOW_XML_FILENAME, dataFlow.getFlow());
            writeTarEntry(tarOut, TEMPLATES_FILENAME, dataFlow.getTemplates());
            writeTarEntry(tarOut, SNIPPETS_FILENAME, dataFlow.getSnippets());
        }
        writeTarEntry(tarOut, CONTROLLER_SERVICES_FILENAME, clusterDataFlow.getControllerServices());
        writeTarEntry(tarOut, REPORTING_TASKS_FILENAME, clusterDataFlow.getReportingTasks());

        final ByteArrayOutputStream baos = new ByteArrayOutputStream(256);
        writeClusterMetadata(clusterMetadata, baos);
        final byte[] clusterInfoBytes = baos.toByteArray();

        writeTarEntry(tarOut, CLUSTER_INFO_FILENAME, clusterInfoBytes);
    }
}

From source file:org.apache.nifi.util.FlowFilePackagerV1.java

@Override
public void packageFlowFile(final InputStream in, final OutputStream out, final Map<String, String> attributes,
        final long fileSize) throws IOException {
    try (final TarArchiveOutputStream tout = new TarArchiveOutputStream(out)) {
        writeAttributesEntry(attributes, tout);
        writeContentEntry(tout, in, fileSize);
        tout.finish();/*from  ww w .ja v  a 2s .c o m*/
        tout.flush();
        tout.close();
    }
}

From source file:org.apache.nutch.tools.CommonCrawlDataDumper.java

private void constructNewStream(File outputDir) throws IOException {
    String archiveName = new SimpleDateFormat("yyyyMMddhhmm'.tar.gz'").format(new Date());
    LOG.info("Creating a new gzip archive: " + archiveName);
    fileOutput = new FileOutputStream(new File(outputDir + File.separator + archiveName));
    bufOutput = new BufferedOutputStream(fileOutput);
    gzipOutput = new GzipCompressorOutputStream(bufOutput);
    tarOutput = new TarArchiveOutputStream(gzipOutput);
    tarOutput.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
}