Example usage for org.apache.commons.compress.archivers.tar TarArchiveInputStream TarArchiveInputStream

List of usage examples for org.apache.commons.compress.archivers.tar TarArchiveInputStream TarArchiveInputStream

Introduction

In this page you can find the example usage for org.apache.commons.compress.archivers.tar TarArchiveInputStream TarArchiveInputStream.

Prototype

public TarArchiveInputStream(InputStream is) 

Source Link

Document

Constructor for TarInputStream.

Usage

From source file:com.redhat.red.offliner.ftest.SinglePlaintextDownloadOfTarballFTest.java

@Test
public void testGenericTarballDownload() throws Exception {
    // Generate some test content
    String path = contentGenerator.newArtifactPath("tar.gz");
    Map<String, byte[]> entries = new HashMap<>();
    entries.put(contentGenerator.newArtifactPath("jar"), contentGenerator.newBinaryContent(2400));
    entries.put(contentGenerator.newArtifactPath("jar"), contentGenerator.newBinaryContent(2400));

    final File tgz = makeTarball(entries);

    System.out.println("tar content array has length: " + tgz.length());

    // We only need one repo server.
    ExpectationServer server = new ExpectationServer();
    server.start();//  w  ww.  ja  v a 2 s  .com

    String url = server.formatUrl(path);

    // Register the generated content by writing it to the path within the repo server's dir structure.
    // This way when the path is requested it can be downloaded instead of returning a 404.
    server.expect("GET", url, (req, resp) -> {
        //            Content-Length: 47175
        //            Content-Type: application/x-gzip
        resp.setHeader("Content-Encoding", "x-gzip");
        resp.setHeader("Content-Type", "application/x-gzip");

        byte[] raw = FileUtils.readFileToByteArray(tgz);
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        GzipCompressorOutputStream gzout = new GzipCompressorOutputStream(baos);
        gzout.write(raw);
        gzout.finish();

        byte[] content = baos.toByteArray();

        resp.setHeader("Content-Length", Long.toString(content.length));
        OutputStream respStream = resp.getOutputStream();
        respStream.write(content);
        respStream.flush();

        System.out.println("Wrote content with length: " + content.length);
    });

    final PoolingHttpClientConnectionManager ccm = new PoolingHttpClientConnectionManager();
    ccm.setMaxTotal(1);

    final HttpClientBuilder builder = HttpClients.custom().setConnectionManager(ccm);
    CloseableHttpClient client = builder.build();

    HttpGet get = new HttpGet(url);
    //        get.setHeader( "Accept-Encoding", "gzip,deflate" );

    Boolean result = client.execute(get, (response) -> {
        Arrays.stream(response.getAllHeaders()).forEach((h) -> System.out.println("Header:: " + h));

        Header contentEncoding = response.getEntity().getContentEncoding();
        if (contentEncoding == null) {
            contentEncoding = response.getFirstHeader("Content-Encoding");
        }

        System.out.printf("Got content encoding: %s\n",
                contentEncoding == null ? "None" : contentEncoding.getValue());

        byte[] content = IOUtils.toByteArray(response.getEntity().getContent());

        try (TarArchiveInputStream tarIn = new TarArchiveInputStream(
                new GzipCompressorInputStream(new ByteArrayInputStream(content)))) {
            TarArchiveEntry entry = null;
            while ((entry = tarIn.getNextTarEntry()) != null) {
                System.out.printf("Got tar entry: %s\n", entry.getName());
                byte[] entryData = new byte[(int) entry.getSize()];
                int read = tarIn.read(entryData, 0, entryData.length);
            }
        }

        return false;
    });
}

From source file:net.sf.sveditor.core.tests.utils.BundleUtils.java

public void unpackBundleTgzToFS(String bundle_path, File fs_path) {
    URL url = fBundle.getEntry(bundle_path);
    TestCase.assertNotNull(url);//from  w  ww . jav a 2s. c o m

    if (!fs_path.isDirectory()) {
        TestCase.assertTrue(fs_path.mkdirs());
    }
    InputStream in = null;
    GzipCompressorInputStream gz_stream = null;
    TarArchiveInputStream tar_stream = null;

    try {
        in = url.openStream();
    } catch (IOException e) {
        TestCase.fail("Failed to open data file " + bundle_path + " : " + e.getMessage());
    }

    try {
        gz_stream = new GzipCompressorInputStream(in);
    } catch (IOException e) {
        TestCase.fail("Failed to uncompress data file " + bundle_path + " : " + e.getMessage());
    }

    tar_stream = new TarArchiveInputStream(gz_stream);

    try {
        byte tmp[] = new byte[4 * 1024];
        int cnt;

        ArchiveEntry te;

        while ((te = tar_stream.getNextEntry()) != null) {
            // System.out.println("Entry: \"" + ze.getName() + "\"");
            File entry_f = new File(fs_path, te.getName());
            if (te.getName().endsWith("/")) {
                // Directory
                continue;
            }
            if (!entry_f.getParentFile().exists()) {
                TestCase.assertTrue(entry_f.getParentFile().mkdirs());
            }
            FileOutputStream fos = new FileOutputStream(entry_f);
            BufferedOutputStream bos = new BufferedOutputStream(fos, tmp.length);

            while ((cnt = tar_stream.read(tmp, 0, tmp.length)) > 0) {
                bos.write(tmp, 0, cnt);
            }
            bos.flush();
            bos.close();
            fos.close();

            //            tar_stream.closeEntry();
        }
        tar_stream.close();
    } catch (IOException e) {
        e.printStackTrace();
        TestCase.fail("Failed to unpack tar file: " + e.getMessage());
    }
}

From source file:com.blackducksoftware.integration.hub.docker.tar.DockerTarParser.java

public List<File> extractLayerTars(final File dockerTar) throws IOException {
    final File tarExtractionDirectory = getTarExtractionDirectory();
    final List<File> untaredFiles = new ArrayList<>();
    final File outputDir = new File(tarExtractionDirectory, dockerTar.getName());
    final TarArchiveInputStream tarArchiveInputStream = new TarArchiveInputStream(
            new FileInputStream(dockerTar));
    try {//from  w  w w .  j  a  va  2s.  c o m
        TarArchiveEntry tarArchiveEntry = null;
        while (null != (tarArchiveEntry = tarArchiveInputStream.getNextTarEntry())) {
            final File outputFile = new File(outputDir, tarArchiveEntry.getName());
            if (tarArchiveEntry.isFile()) {
                if (!outputFile.getParentFile().exists()) {
                    outputFile.getParentFile().mkdirs();
                }
                final OutputStream outputFileStream = new FileOutputStream(outputFile);
                try {
                    IOUtils.copy(tarArchiveInputStream, outputFileStream);
                    if (tarArchiveEntry.getName().contains(DOCKER_LAYER_TAR_FILENAME)) {
                        untaredFiles.add(outputFile);
                    }
                } finally {
                    outputFileStream.close();
                }
            }
        }
    } finally {
        IOUtils.closeQuietly(tarArchiveInputStream);
    }
    return untaredFiles;
}

From source file:com.facebook.buck.util.unarchive.Untar.java

private TarArchiveInputStream getArchiveInputStream(Path tarFile) throws IOException, CompressorException {
    BufferedInputStream inputStream = new BufferedInputStream(Files.newInputStream(tarFile));
    if (compressorType.isPresent()) {
        return new TarArchiveInputStream(
                new CompressorStreamFactory().createCompressorInputStream(compressorType.get(), inputStream));
    } else {/* www.  j  a  v  a  2 s .c o m*/
        return new TarArchiveInputStream(inputStream);
    }
}

From source file:com.mobilesorcery.sdk.builder.linux.PackageParser.java

/**
 * Extracts a package template and parses and replaces variables
 * in filenames and files.//from w  ww.  j  a va 2s .c om
 *
 * @param o Output directory
 * @param i Input file
 *
 * @throws Exception If recursion is too deep, a variable isn't defined or
 *                   malformed meta data
 * @throws IOException Error reading inputstream
 * @throws ParseException Malformed JSON
 * @throws FileNotFoundException Could not open input file
 */
public void doProcessTarGZip(File o, File i)
        throws Exception, IOException, ParseException, FileNotFoundException {
    FileInputStream fis = new FileInputStream(i);
    GZIPInputStream gis = new GZIPInputStream(fis);
    TarArchiveInputStream tis = new TarArchiveInputStream(gis);

    // Remove any old data if any
    if (o.exists() == true)
        o.delete();

    // Find and parse meta data, this should always be the
    // first file, but it can' be assumed
    while (true) {
        ArchiveEntry e = tis.getNextEntry();
        if (e == null)
            break;

        if (e.getName().equals(".meta/.meta") == false)
            continue;

        doParseMeta(tis);
        break;
    }

    // Reset input
    tis.close();
    gis.close();
    fis.close();
    fis = new FileInputStream(i);
    gis = new GZIPInputStream(fis);
    tis = new TarArchiveInputStream(gis);

    // Process and extract files
    while (true) {
        File f;
        ArchiveEntry e = tis.getNextEntry();

        if (e == null)
            break;

        // Check if it's a script that we need to load and parse
        if (e.getName().contains(".meta") == true) {
            if (m_scriptMap.containsKey(e.getName()) == true) {
                String name = m_scriptMap.get(e.getName());
                String script = m_varResolver.doParseStream(tis);
                m_scriptMap.put(name, script);
                m_scriptMap.remove(e.getName());
            }
            continue;
        }

        // Store its permissions
        String n = m_varResolver.doResolveString(e.getName());
        m_filemodeMap.put(n, ((TarArchiveEntry) e).getMode());

        // Directory ?
        f = new File(o, n);
        if (e.isDirectory() == true) {
            if (f.exists() == false)
                f.mkdirs();
            continue;
        }

        // It's a file
        if (m_parseSet.contains(e.getName()) == true)
            m_varResolver.doParseCopyStream(f, tis);
        else
            BuilderUtil.getInstance().copyInputStreamToFile(f, tis, e.getSize());
    }
}

From source file:heigit.ors.routing.graphhopper.extensions.reader.borders.CountryBordersReader.java

/**
 * Method to read the geometries from a GeoJSON file that represent the boundaries of different countries. Ideally
 * it should be written using many small objects split into hierarchies.
 *
 * If the file is a .tar.gz format, it will decompress it and then store the reulting data to be read into the
 * JSON object./*from   w ww. j  a va2  s.co m*/
 *
 * @return      A (Geo)JSON object representing the contents of the file
 */
private JSONObject readBordersData() throws IOException {
    String data = "";

    InputStream is = null;
    BufferedReader buf = null;
    try {
        is = new FileInputStream(BORDER_FILE);

        if (BORDER_FILE.endsWith(".tar.gz")) {
            // We are working with a compressed file
            TarArchiveInputStream tis = new TarArchiveInputStream(
                    new GzipCompressorInputStream(new BufferedInputStream(is)));

            TarArchiveEntry entry;
            StringBuilder sb = new StringBuilder();

            while ((entry = tis.getNextTarEntry()) != null) {
                if (!entry.isDirectory()) {
                    byte[] bytes = new byte[(int) entry.getSize()];
                    tis.read(bytes);
                    String str = new String(bytes);
                    sb.append(str);
                }
            }
            data = sb.toString();
        } else {
            // Assume a normal file so read line by line

            buf = new BufferedReader(new InputStreamReader(is));

            String line = "";
            StringBuilder sb = new StringBuilder();

            while ((line = buf.readLine()) != null) {
                sb.append(line);
            }

            data = sb.toString();
        }
    } catch (IOException ioe) {
        LOGGER.warn("Cannot access borders file!");
        throw ioe;
    } finally {
        try {
            if (is != null)
                is.close();
            if (buf != null)
                buf.close();
        } catch (IOException ioe) {
            LOGGER.warn("Error closing file reader buffers!");
        } catch (NullPointerException npe) {
            // This can happen if the file itself wasn't available
            throw new IOException("Borders file " + BORDER_FILE + " not found!");
        }
    }

    JSONObject json = new JSONObject(data);

    return json;
}

From source file:in.neoandroid.neoupdate.neoUpdate.java

private String getMetaFromNPK() {
    try {/*from  www.  j av  a2 s . c om*/
        GZIPInputStream npkFile = new GZIPInputStream(new FileInputStream(baseUrl));
        //FileInputStream npkFile = new FileInputStream(baseUrl);
        TarArchiveInputStream input = new TarArchiveInputStream(npkFile);
        TarArchiveEntry ae;
        while ((ae = input.getNextTarEntry()) != null) {
            if (ae.isDirectory())
                Log.e("[neoUpdate]", "Dir: " + ae.getName());
            else
                Log.e("[neoUpdate]", "File: " + ae.getName());
            if (ae.getName().equalsIgnoreCase("neoupdate.json")) {
                byte buff[] = new byte[(int) ae.getSize()];
                input.read(buff);
                input.close();
                return new String(buff);
            }
        }
        input.close();
    } catch (Exception e) {
        e.printStackTrace();
    }
    return null;
}

From source file:com.puppetlabs.geppetto.forge.util.TarUtils.java

/**
 * Unpack the content read from <i>source</i> into <i>targetFolder</i>. If the
 * <i>skipTopFolder</i> is set, then don't assume that the archive contains one
 * single folder and unpack the content of that folder, not including the folder
 * itself./*  ww w .  j a  v  a2s. c  o m*/
 * 
 * @param source
 *            The input source. Must be in <i>TAR</i> format.
 * @param targetFolder
 *            The destination folder for the unpack. Not used when a <tt>fileCatcher</tt> is provided
 * @param skipTopFolder
 *            Set to <code>true</code> to unpack beneath the top folder
 *            of the archive. The archive must consist of one single folder and nothing else
 *            in order for this to work.
 * @param fileCatcher
 *            Used when specific files should be picked from the archive without writing them to disk. Can be
 *            <tt>null</tt>.
 * @throws IOException
 */
public static void unpack(InputStream source, File targetFolder, boolean skipTopFolder, FileCatcher fileCatcher)
        throws IOException {
    String topFolderName = null;
    Map<File, Map<Integer, List<String>>> chmodMap = new HashMap<File, Map<Integer, List<String>>>();
    TarArchiveInputStream in = new TarArchiveInputStream(source);
    try {
        TarArchiveEntry te = in.getNextTarEntry();
        if (te == null) {
            throw new IOException("No entry in the tar file");
        }
        do {
            if (te.isGlobalPaxHeader())
                continue;

            String name = te.getName();
            if (skipTopFolder) {
                int firstSlash = name.indexOf('/');
                if (firstSlash < 0)
                    throw new IOException("Archive doesn't contain one single folder");

                String tfName = name.substring(0, firstSlash);
                if (topFolderName == null)
                    topFolderName = tfName;
                else if (!tfName.equals(topFolderName))
                    throw new IOException("Archive doesn't contain one single folder");
                name = name.substring(firstSlash + 1);
            }
            if (name.length() == 0)
                continue;

            String linkName = te.getLinkName();
            if (linkName != null) {
                if (linkName.trim().equals(""))
                    linkName = null;
            }

            if (fileCatcher != null) {
                if (linkName == null && !te.isDirectory() && fileCatcher.accept(name)) {
                    if (fileCatcher.catchData(name, in))
                        // We're done here
                        return;
                }
                continue;
            }

            File outFile = new File(targetFolder, name);
            if (linkName != null) {
                if (!OsUtil.link(targetFolder, name, te.getLinkName()))
                    throw new IOException("Archive contains links but they are not supported on this platform");
            } else {
                if (te.isDirectory()) {
                    outFile.mkdirs();
                } else {
                    outFile.getParentFile().mkdirs();
                    OutputStream target = new FileOutputStream(outFile);
                    StreamUtil.copy(in, target);
                    target.close();
                    outFile.setLastModified(te.getModTime().getTime());
                }
                registerChmodFile(chmodMap, targetFolder, Integer.valueOf(te.getMode()), name);
            }
        } while ((te = in.getNextTarEntry()) != null);
    } finally {
        StreamUtil.close(in);
    }
    chmod(chmodMap);
}

From source file:com.vividsolutions.jump.io.CompressedFile.java

/**
 * Utility file open function - handles compressed and un-compressed files.
 * //  w w w . j ava2  s .  c om
 * @param filePath
 *          name of the file to search for.
 * @param compressedEntry
 *          name of the compressed file.
 * 
 *          <p>
 *          If compressedEntry = null, opens a FileInputStream on filePath
 *          </p>
 * 
 *          <p>
 *          If filePath ends in ".zip" - opens the compressed Zip and
 *          looks for the file called compressedEntry
 *          </p>
 * 
 *          <p>
 *          If filePath ends in ".gz" - opens the compressed .gz file.
 *          </p>
 */
public static InputStream openFile(String filePath, String compressedEntry) throws IOException {

    System.out.println(filePath + " extract " + compressedEntry);

    if (isTar(filePath)) {
        InputStream is = new BufferedInputStream(new FileInputStream(filePath));
        if (filePath.toLowerCase().endsWith("gz"))
            is = new GzipCompressorInputStream(is, true);
        else if (filePath.matches("(?i).*bz2?"))
            is = new BZip2CompressorInputStream(is, true);
        else if (filePath.matches("(?i).*xz"))
            is = new XZCompressorInputStream(is, true);

        TarArchiveInputStream tis = new TarArchiveInputStream(is);
        if (compressedEntry == null)
            return is;

        TarArchiveEntry entry;
        while ((entry = tis.getNextTarEntry()) != null) {
            if (entry.getName().equals(compressedEntry))
                return tis;
        }

        throw createArchiveFNFE(filePath, compressedEntry);
    }

    else if (compressedEntry == null && isGZip(filePath)) {
        // gz compressed file -- easy
        InputStream is = new BufferedInputStream(new FileInputStream(filePath));
        return new GzipCompressorInputStream(is, true);
    }

    else if (compressedEntry == null && isBZip(filePath)) {
        // bz compressed file -- easy
        InputStream is = new BufferedInputStream(new FileInputStream(filePath));
        return new BZip2CompressorInputStream(is, true);
        //return new org.itadaki.bzip2.BZip2InputStream(is, false);
    }

    else if (compressedEntry == null && isXZ(filePath)) {
        InputStream is = new BufferedInputStream(new FileInputStream(filePath));
        return new XZCompressorInputStream(is, true);
    }

    else if (compressedEntry != null && isZip(filePath)) {

        ZipFile zipFile = new ZipFile(filePath);
        ZipArchiveEntry zipEntry = zipFile.getEntry(compressedEntry);

        if (zipEntry != null)
            return zipFile.getInputStream(zipEntry);

        throw createArchiveFNFE(filePath, compressedEntry);
    }

    else if (compressedEntry != null && isSevenZ(filePath)) {

        SevenZFileGiveStream sevenZFile = new SevenZFileGiveStream(new File(filePath));
        SevenZArchiveEntry entry;
        while ((entry = sevenZFile.getNextEntry()) != null) {
            if (entry.getName().equals(compressedEntry))
                return sevenZFile.getCurrentEntryInputStream();
        }
        throw createArchiveFNFE(filePath, compressedEntry);
    }
    // return plain stream if no compressedEntry
    else if (compressedEntry == null) {
        return new FileInputStream(filePath);
    }

    else {
        throw new IOException("Couldn't determine compressed file type for file '" + filePath
                + "' supposedly containing '" + compressedEntry + "'.");
    }
}

From source file:deployer.TestUtils.java

public static ByteBuffer createSampleOpenShiftWebAppTarBall() throws IOException, ArchiveException {
    ByteArrayInputStream bis = new ByteArrayInputStream(createSampleAppTarBall(ArtifactType.WebApp).array());
    CompressorInputStream cis = new GzipCompressorInputStream(bis);
    ArchiveInputStream ais = new TarArchiveInputStream(cis);

    ByteArrayOutputStream bos = new ByteArrayOutputStream(bis.available() + 2048);
    CompressorOutputStream cos = new GzipCompressorOutputStream(bos);
    ArchiveOutputStream aos = new TarArchiveOutputStream(cos);

    ArchiveEntry nextEntry;// w  w w  . jav  a  2  s .  c  o  m
    while ((nextEntry = ais.getNextEntry()) != null) {
        aos.putArchiveEntry(nextEntry);
        IOUtils.copy(ais, aos);
        aos.closeArchiveEntry();
    }
    ais.close();
    cis.close();
    bis.close();

    TarArchiveEntry entry = new TarArchiveEntry(
            Paths.get(".openshift", CONFIG_DIRECTORY, "/standalone.xml").toFile());
    byte[] xmlData = SAMPLE_STANDALONE_DATA.getBytes();
    entry.setSize(xmlData.length);
    aos.putArchiveEntry(entry);
    IOUtils.write(xmlData, aos);
    aos.closeArchiveEntry();

    aos.finish();
    cos.close();
    bos.flush();
    return ByteBuffer.wrap(bos.toByteArray());
}