Example usage for org.apache.commons.compress.archivers ArchiveEntry getName

List of usage examples for org.apache.commons.compress.archivers ArchiveEntry getName

Introduction

In this page you can find the example usage for org.apache.commons.compress.archivers ArchiveEntry getName.

Prototype

public String getName();

Source Link

Document

The name of the entry in the archive.

Usage

From source file:org.apache.flex.utilities.converter.retrievers.BaseRetriever.java

protected void unpack(File inputArchive, File targetDirectory) throws RetrieverException {
    if (!targetDirectory.mkdirs()) {
        throw new RetrieverException(
                "Unable to create extraction directory " + targetDirectory.getAbsolutePath());
    }/*w ww.j  av a  2s .c  o  m*/

    ArchiveInputStream archiveInputStream = null;
    ArchiveEntry entry;
    try {

        final CountingInputStream inputStream = new CountingInputStream(new FileInputStream(inputArchive));

        final long inputFileSize = inputArchive.length();

        if (inputArchive.getName().endsWith(".tbz2")) {
            archiveInputStream = new TarArchiveInputStream(new BZip2CompressorInputStream(inputStream));
        } else {
            archiveInputStream = new ArchiveStreamFactory()
                    .createArchiveInputStream(new BufferedInputStream(inputStream));
        }

        final ProgressBar progressBar = new ProgressBar(inputFileSize);
        while ((entry = archiveInputStream.getNextEntry()) != null) {
            final File outputFile = new File(targetDirectory, entry.getName());

            // Entry is a directory.
            if (entry.isDirectory()) {
                if (!outputFile.exists()) {
                    if (!outputFile.mkdirs()) {
                        throw new RetrieverException(
                                "Could not create output directory " + outputFile.getAbsolutePath());
                    }
                }
            }

            // Entry is a file.
            else {
                final byte[] data = new byte[BUFFER_MAX];
                final FileOutputStream fos = new FileOutputStream(outputFile);
                BufferedOutputStream dest = null;
                try {
                    dest = new BufferedOutputStream(fos, BUFFER_MAX);

                    int count;
                    while ((count = archiveInputStream.read(data, 0, BUFFER_MAX)) != -1) {
                        dest.write(data, 0, count);
                        progressBar.updateProgress(inputStream.getBytesRead());
                    }
                } finally {
                    if (dest != null) {
                        dest.flush();
                        dest.close();
                    }
                }
            }

            progressBar.updateProgress(inputStream.getBytesRead());
        }
    } catch (FileNotFoundException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    } catch (ArchiveException e) {
        e.printStackTrace();
    } finally {
        if (archiveInputStream != null) {
            try {
                archiveInputStream.close();
            } catch (Exception e) {
                // Ignore...
            }
        }
    }
}

From source file:org.apache.karaf.kittests.Helper.java

protected static void extract(ArchiveInputStream is, File targetDir) throws IOException {
    try {//from   w w  w  .ja v  a 2 s  .  c  o  m
        if (targetDir.exists()) {
            FileUtils.forceDelete(targetDir);
        }
        targetDir.mkdirs();
        ArchiveEntry entry = is.getNextEntry();
        while (entry != null) {
            String name = entry.getName();
            name = name.substring(name.indexOf("/") + 1);
            File file = new File(targetDir, name);
            if (entry.isDirectory()) {
                file.mkdirs();
            } else {
                file.getParentFile().mkdirs();
                OutputStream os = new FileOutputStream(file);
                try {
                    IOUtils.copy(is, os);
                } finally {
                    IOUtils.closeQuietly(os);
                }
            }
            entry = is.getNextEntry();
        }
    } finally {
        is.close();
    }
}

From source file:org.apache.karaf.tooling.exam.container.internal.KarafTestContainer.java

private void extract(ArchiveInputStream is, File targetDir) throws IOException {
    try {/*from  www  .j  a  v a  2 s .  c  om*/
        if (targetDir.exists()) {
            FileUtils.forceDelete(targetDir);
        }
        targetDir.mkdirs();
        ArchiveEntry entry = is.getNextEntry();
        while (entry != null) {
            String name = entry.getName();
            name = name.substring(name.indexOf("/") + 1);
            File file = new File(targetDir, name);
            if (entry.isDirectory()) {
                file.mkdirs();
            } else {
                file.getParentFile().mkdirs();
                OutputStream os = new FileOutputStream(file);
                try {
                    IOUtils.copy(is, os);
                } finally {
                    IOUtils.closeQuietly(os);
                }
            }
            entry = is.getNextEntry();
        }
    } finally {
        is.close();
    }
}

From source file:org.apache.karaf.tooling.RunMojo.java

private static void extract(ArchiveInputStream is, File targetDir) throws IOException {
    try {/*from w ww .  ja v a 2 s  .  com*/
        if (targetDir.exists()) {
            FileUtils.forceDelete(targetDir);
        }
        targetDir.mkdirs();
        ArchiveEntry entry = is.getNextEntry();
        while (entry != null) {
            String name = entry.getName();
            name = name.substring(name.indexOf("/") + 1);
            File file = new File(targetDir, name);
            if (entry.isDirectory()) {
                file.mkdirs();
            } else {
                file.getParentFile().mkdirs();
                OutputStream os = new FileOutputStream(file);
                try {
                    IOUtils.copy(is, os);
                } finally {
                    IOUtils.closeQuietly(os);
                }
            }
            entry = is.getNextEntry();
        }
    } finally {
        is.close();
    }
}

From source file:org.apache.nifi.processors.standard.TestMergeContent.java

@Test
public void testTar() throws IOException {
    final TestRunner runner = TestRunners.newTestRunner(new MergeContent());
    runner.setProperty(MergeContent.MAX_BIN_AGE, "1 sec");
    runner.setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_TAR);

    final Map<String, String> attributes = new HashMap<>();
    attributes.put(CoreAttributes.MIME_TYPE.key(), "application/plain-text");

    attributes.put(CoreAttributes.FILENAME.key(), "AShortFileName");
    runner.enqueue("Hello".getBytes("UTF-8"), attributes);
    attributes.put(CoreAttributes.FILENAME.key(), "ALongerrrFileName");
    runner.enqueue(", ".getBytes("UTF-8"), attributes);
    attributes.put(CoreAttributes.FILENAME.key(),
            "AReallyLongggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggFileName");
    runner.enqueue("World!".getBytes("UTF-8"), attributes);
    runner.run();/*  w  w w  .  j a  v a 2s.co  m*/

    runner.assertQueueEmpty();
    runner.assertTransferCount(MergeContent.REL_MERGED, 1);
    runner.assertTransferCount(MergeContent.REL_FAILURE, 0);
    runner.assertTransferCount(MergeContent.REL_ORIGINAL, 3);

    final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0);
    try (final InputStream rawIn = new ByteArrayInputStream(runner.getContentAsByteArray(bundle));
            final TarArchiveInputStream in = new TarArchiveInputStream(rawIn)) {
        ArchiveEntry entry = in.getNextEntry();
        Assert.assertNotNull(entry);
        assertEquals("AShortFileName", entry.getName());
        final byte[] part1 = IOUtils.toByteArray(in);
        Assert.assertTrue(Arrays.equals("Hello".getBytes("UTF-8"), part1));

        entry = in.getNextEntry();
        assertEquals("ALongerrrFileName", entry.getName());
        final byte[] part2 = IOUtils.toByteArray(in);
        Assert.assertTrue(Arrays.equals(", ".getBytes("UTF-8"), part2));

        entry = in.getNextEntry();
        assertEquals(
                "AReallyLongggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggFileName",
                entry.getName());
        final byte[] part3 = IOUtils.toByteArray(in);
        Assert.assertTrue(Arrays.equals("World!".getBytes("UTF-8"), part3));
    }
    bundle.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/tar");
}

From source file:org.apache.rat.walker.ArchiveWalker.java

/**
 * Run a report over all files and directories in this GZIPWalker,
 * ignoring any files/directories set to be ignored.
 * //from   w  w  w  .j a  v  a2s  .  c  om
 * @param report the defined RatReport to run on this GZIP walker.
 * 
 */
public void run(final RatReport report) throws RatException {

    try {
        ArchiveInputStream input;

        /* I am really sad that classes aren't first-class objects in
           Java :'( */
        try {
            input = new TarArchiveInputStream(new GzipCompressorInputStream(new FileInputStream(file)));
        } catch (IOException e) {
            try {
                input = new TarArchiveInputStream(new BZip2CompressorInputStream(new FileInputStream(file)));
            } catch (IOException e2) {
                input = new ZipArchiveInputStream(new FileInputStream(file));
            }
        }

        ArchiveEntry entry = input.getNextEntry();
        while (entry != null) {
            File f = new File(entry.getName());
            byte[] contents = new byte[(int) entry.getSize()];
            int offset = 0;
            int length = contents.length;

            while (offset < entry.getSize()) {
                int actualRead = input.read(contents, offset, length);
                length -= actualRead;
                offset += actualRead;
            }

            if (!entry.isDirectory() && !ignored(f)) {
                report(report, contents, f);
            }

            entry = input.getNextEntry();
        }

        input.close();
    } catch (IOException e) {
        throw new RatException(e);
    }
}

From source file:org.apache.tika.parser.pkg.PackageExtractor.java

/**
 * Parses the given stream as a package of multiple underlying files.
 * The package entries are parsed using the delegate parser instance.
 * It is not an error if the entry can not be parsed, in that case
 * just the entry name (if given) is emitted.
 *
 * @param archive package stream//from w  w  w.  ja v a  2 s  . c o m
 * @param xhtml content handler
 * @throws IOException if an IO error occurs
 * @throws SAXException if a SAX error occurs
 */
public void unpack(ArchiveInputStream archive, XHTMLContentHandler xhtml) throws IOException, SAXException {
    try {
        ArchiveEntry entry = archive.getNextEntry();
        while (entry != null) {
            if (!entry.isDirectory()) {
                String name = entry.getName();

                if (archive.canReadEntryData(entry)) {
                    Metadata entrydata = new Metadata();
                    if (name != null && name.length() > 0) {
                        entrydata.set(Metadata.RESOURCE_NAME_KEY, name);
                    }
                    if (extractor.shouldParseEmbedded(entrydata)) {
                        extractor.parseEmbedded(archive, xhtml, entrydata, true);
                    }
                } else if (name != null && name.length() > 0) {
                    xhtml.element("p", name);
                }
            }
            entry = archive.getNextEntry();
        }
    } finally {
        archive.close();
    }
}

From source file:org.apache.tika.parser.pkg.PackageParser.java

private void parseEntry(ArchiveInputStream archive, ArchiveEntry entry, EmbeddedDocumentExtractor extractor,
        Metadata parentMetadata, XHTMLContentHandler xhtml) throws SAXException, IOException, TikaException {
    String name = entry.getName();
    if (archive.canReadEntryData(entry)) {
        // Fetch the metadata on the entry contained in the archive
        Metadata entrydata = handleEntryMetadata(name, null, entry.getLastModifiedDate(), entry.getSize(),
                xhtml);/*  w  w w.ja  va 2s  .  c o m*/

        // Recurse into the entry if desired
        if (extractor.shouldParseEmbedded(entrydata)) {
            // For detectors to work, we need a mark/reset supporting
            // InputStream, which ArchiveInputStream isn't, so wrap
            TemporaryResources tmp = new TemporaryResources();
            try {
                TikaInputStream tis = TikaInputStream.get(archive, tmp);
                extractor.parseEmbedded(tis, xhtml, entrydata, true);
            } finally {
                tmp.dispose();
            }
        }
    } else {
        name = (name == null) ? "" : name;
        if (entry instanceof ZipArchiveEntry) {
            boolean usesEncryption = ((ZipArchiveEntry) entry).getGeneralPurposeBit().usesEncryption();
            if (usesEncryption) {
                EmbeddedDocumentUtil.recordEmbeddedStreamException(
                        new EncryptedDocumentException("stream (" + name + ") is encrypted"), parentMetadata);
            }
        } else {
            EmbeddedDocumentUtil.recordEmbeddedStreamException(
                    new TikaException("Can't read archive stream (" + name + ")"), parentMetadata);
        }
        if (name.length() > 0) {
            xhtml.element("p", name);
        }
    }
}

From source file:org.apache.tika.server.CXFTestBase.java

protected Map<String, String> readArchiveFromStream(ArchiveInputStream zip) throws IOException {
    Map<String, String> data = new HashMap<String, String>();
    while (true) {
        ArchiveEntry entry = zip.getNextEntry();
        if (entry == null) {
            break;
        }//from  w  w w .  ja va  2 s .c  om

        ByteArrayOutputStream bos = new ByteArrayOutputStream();
        IOUtils.copy(zip, bos);
        data.put(entry.getName(), DigestUtils.md5Hex(bos.toByteArray()));
    }

    return data;
}

From source file:org.apereo.portal.io.xml.JaxbPortalDataHandlerService.java

/**
 * Extracts the archive resource and then runs the batch-import process on it.
 *//*from  w  w  w .jav a  2 s.co  m*/
protected void importDataArchive(final Resource resource, final ArchiveInputStream resourceStream,
        BatchImportOptions options) {

    final File tempDir = Files.createTempDir();
    try {
        ArchiveEntry archiveEntry;
        while ((archiveEntry = resourceStream.getNextEntry()) != null) {
            final File entryFile = new File(tempDir, archiveEntry.getName());
            if (archiveEntry.isDirectory()) {
                entryFile.mkdirs();
            } else {
                entryFile.getParentFile().mkdirs();

                Files.copy(new InputSupplier<InputStream>() {
                    @Override
                    public InputStream getInput() throws IOException {
                        return new CloseShieldInputStream(resourceStream);
                    }
                }, entryFile);
            }
        }

        importDataDirectory(tempDir, null, options);
    } catch (IOException e) {
        throw new RuntimeException(
                "Failed to extract data from '" + resource + "' to '" + tempDir + "' for batch import.", e);
    } finally {
        FileUtils.deleteQuietly(tempDir);
    }
}