Example usage for org.apache.commons.compress.archivers ArchiveEntry isDirectory

List of usage examples for org.apache.commons.compress.archivers ArchiveEntry isDirectory

Introduction

In this page you can find the example usage for org.apache.commons.compress.archivers ArchiveEntry isDirectory.

Prototype

public boolean isDirectory();

Source Link

Document

True if the entry refers to a directory

Usage

From source file:org.apache.flex.utilities.converter.retrievers.BaseRetriever.java

protected void unpack(File inputArchive, File targetDirectory) throws RetrieverException {
    if (!targetDirectory.mkdirs()) {
        throw new RetrieverException(
                "Unable to create extraction directory " + targetDirectory.getAbsolutePath());
    }// w w  w  .ja  v a 2 s.c o  m

    ArchiveInputStream archiveInputStream = null;
    ArchiveEntry entry;
    try {

        final CountingInputStream inputStream = new CountingInputStream(new FileInputStream(inputArchive));

        final long inputFileSize = inputArchive.length();

        if (inputArchive.getName().endsWith(".tbz2")) {
            archiveInputStream = new TarArchiveInputStream(new BZip2CompressorInputStream(inputStream));
        } else {
            archiveInputStream = new ArchiveStreamFactory()
                    .createArchiveInputStream(new BufferedInputStream(inputStream));
        }

        final ProgressBar progressBar = new ProgressBar(inputFileSize);
        while ((entry = archiveInputStream.getNextEntry()) != null) {
            final File outputFile = new File(targetDirectory, entry.getName());

            // Entry is a directory.
            if (entry.isDirectory()) {
                if (!outputFile.exists()) {
                    if (!outputFile.mkdirs()) {
                        throw new RetrieverException(
                                "Could not create output directory " + outputFile.getAbsolutePath());
                    }
                }
            }

            // Entry is a file.
            else {
                final byte[] data = new byte[BUFFER_MAX];
                final FileOutputStream fos = new FileOutputStream(outputFile);
                BufferedOutputStream dest = null;
                try {
                    dest = new BufferedOutputStream(fos, BUFFER_MAX);

                    int count;
                    while ((count = archiveInputStream.read(data, 0, BUFFER_MAX)) != -1) {
                        dest.write(data, 0, count);
                        progressBar.updateProgress(inputStream.getBytesRead());
                    }
                } finally {
                    if (dest != null) {
                        dest.flush();
                        dest.close();
                    }
                }
            }

            progressBar.updateProgress(inputStream.getBytesRead());
        }
    } catch (FileNotFoundException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    } catch (ArchiveException e) {
        e.printStackTrace();
    } finally {
        if (archiveInputStream != null) {
            try {
                archiveInputStream.close();
            } catch (Exception e) {
                // Ignore...
            }
        }
    }
}

From source file:org.apache.karaf.kittests.Helper.java

protected static void extract(ArchiveInputStream is, File targetDir) throws IOException {
    try {/*  www  .j a v a  2s.c o  m*/
        if (targetDir.exists()) {
            FileUtils.forceDelete(targetDir);
        }
        targetDir.mkdirs();
        ArchiveEntry entry = is.getNextEntry();
        while (entry != null) {
            String name = entry.getName();
            name = name.substring(name.indexOf("/") + 1);
            File file = new File(targetDir, name);
            if (entry.isDirectory()) {
                file.mkdirs();
            } else {
                file.getParentFile().mkdirs();
                OutputStream os = new FileOutputStream(file);
                try {
                    IOUtils.copy(is, os);
                } finally {
                    IOUtils.closeQuietly(os);
                }
            }
            entry = is.getNextEntry();
        }
    } finally {
        is.close();
    }
}

From source file:org.apache.karaf.tooling.exam.container.internal.KarafTestContainer.java

private void extract(ArchiveInputStream is, File targetDir) throws IOException {
    try {/*from w  ww .ja va2 s .c  o m*/
        if (targetDir.exists()) {
            FileUtils.forceDelete(targetDir);
        }
        targetDir.mkdirs();
        ArchiveEntry entry = is.getNextEntry();
        while (entry != null) {
            String name = entry.getName();
            name = name.substring(name.indexOf("/") + 1);
            File file = new File(targetDir, name);
            if (entry.isDirectory()) {
                file.mkdirs();
            } else {
                file.getParentFile().mkdirs();
                OutputStream os = new FileOutputStream(file);
                try {
                    IOUtils.copy(is, os);
                } finally {
                    IOUtils.closeQuietly(os);
                }
            }
            entry = is.getNextEntry();
        }
    } finally {
        is.close();
    }
}

From source file:org.apache.karaf.tooling.RunMojo.java

private static void extract(ArchiveInputStream is, File targetDir) throws IOException {
    try {//from   ww  w . ja  v a2 s. com
        if (targetDir.exists()) {
            FileUtils.forceDelete(targetDir);
        }
        targetDir.mkdirs();
        ArchiveEntry entry = is.getNextEntry();
        while (entry != null) {
            String name = entry.getName();
            name = name.substring(name.indexOf("/") + 1);
            File file = new File(targetDir, name);
            if (entry.isDirectory()) {
                file.mkdirs();
            } else {
                file.getParentFile().mkdirs();
                OutputStream os = new FileOutputStream(file);
                try {
                    IOUtils.copy(is, os);
                } finally {
                    IOUtils.closeQuietly(os);
                }
            }
            entry = is.getNextEntry();
        }
    } finally {
        is.close();
    }
}

From source file:org.apache.rat.walker.ArchiveWalker.java

/**
 * Run a report over all files and directories in this GZIPWalker,
 * ignoring any files/directories set to be ignored.
 * //from  www .j  a  v a  2 s  .  c o m
 * @param report the defined RatReport to run on this GZIP walker.
 * 
 */
public void run(final RatReport report) throws RatException {

    try {
        ArchiveInputStream input;

        /* I am really sad that classes aren't first-class objects in
           Java :'( */
        try {
            input = new TarArchiveInputStream(new GzipCompressorInputStream(new FileInputStream(file)));
        } catch (IOException e) {
            try {
                input = new TarArchiveInputStream(new BZip2CompressorInputStream(new FileInputStream(file)));
            } catch (IOException e2) {
                input = new ZipArchiveInputStream(new FileInputStream(file));
            }
        }

        ArchiveEntry entry = input.getNextEntry();
        while (entry != null) {
            File f = new File(entry.getName());
            byte[] contents = new byte[(int) entry.getSize()];
            int offset = 0;
            int length = contents.length;

            while (offset < entry.getSize()) {
                int actualRead = input.read(contents, offset, length);
                length -= actualRead;
                offset += actualRead;
            }

            if (!entry.isDirectory() && !ignored(f)) {
                report(report, contents, f);
            }

            entry = input.getNextEntry();
        }

        input.close();
    } catch (IOException e) {
        throw new RatException(e);
    }
}

From source file:org.apache.tika.parser.pkg.PackageExtractor.java

/**
 * Parses the given stream as a package of multiple underlying files.
 * The package entries are parsed using the delegate parser instance.
 * It is not an error if the entry can not be parsed, in that case
 * just the entry name (if given) is emitted.
 *
 * @param archive package stream/*from   w  ww.j av a 2  s . c  o m*/
 * @param xhtml content handler
 * @throws IOException if an IO error occurs
 * @throws SAXException if a SAX error occurs
 */
public void unpack(ArchiveInputStream archive, XHTMLContentHandler xhtml) throws IOException, SAXException {
    try {
        ArchiveEntry entry = archive.getNextEntry();
        while (entry != null) {
            if (!entry.isDirectory()) {
                String name = entry.getName();

                if (archive.canReadEntryData(entry)) {
                    Metadata entrydata = new Metadata();
                    if (name != null && name.length() > 0) {
                        entrydata.set(Metadata.RESOURCE_NAME_KEY, name);
                    }
                    if (extractor.shouldParseEmbedded(entrydata)) {
                        extractor.parseEmbedded(archive, xhtml, entrydata, true);
                    }
                } else if (name != null && name.length() > 0) {
                    xhtml.element("p", name);
                }
            }
            entry = archive.getNextEntry();
        }
    } finally {
        archive.close();
    }
}

From source file:org.apache.tika.parser.pkg.PackageParser.java

public void parse(InputStream stream, ContentHandler handler, Metadata metadata, ParseContext context)
        throws IOException, SAXException, TikaException {

    //lazily load the MediaTypeRegistry at parse time
    //only want to call getDefaultConfig() once, and can't
    //load statically because of the ForkParser
    TikaConfig config = context.get(TikaConfig.class);
    MediaTypeRegistry mediaTypeRegistry = null;
    if (config != null) {
        mediaTypeRegistry = config.getMediaTypeRegistry();
    } else {//ww  w  .  ja  v  a2 s  . c o  m
        if (bufferedMediaTypeRegistry == null) {
            //buffer this for next time.
            synchronized (lock) {
                //now that we're locked, check again
                if (bufferedMediaTypeRegistry == null) {
                    bufferedMediaTypeRegistry = TikaConfig.getDefaultConfig().getMediaTypeRegistry();
                }
            }
        }
        mediaTypeRegistry = bufferedMediaTypeRegistry;
    }

    // Ensure that the stream supports the mark feature
    if (!stream.markSupported()) {
        stream = new BufferedInputStream(stream);
    }

    TemporaryResources tmp = new TemporaryResources();
    ArchiveInputStream ais = null;
    try {
        ArchiveStreamFactory factory = context.get(ArchiveStreamFactory.class, new ArchiveStreamFactory());
        // At the end we want to close the archive stream to release
        // any associated resources, but the underlying document stream
        // should not be closed

        ais = factory.createArchiveInputStream(new CloseShieldInputStream(stream));

    } catch (StreamingNotSupportedException sne) {
        // Most archive formats work on streams, but a few need files
        if (sne.getFormat().equals(ArchiveStreamFactory.SEVEN_Z)) {
            // Rework as a file, and wrap
            stream.reset();
            TikaInputStream tstream = TikaInputStream.get(stream, tmp);

            // Seven Zip suports passwords, was one given?
            String password = null;
            PasswordProvider provider = context.get(PasswordProvider.class);
            if (provider != null) {
                password = provider.getPassword(metadata);
            }

            SevenZFile sevenz;
            if (password == null) {
                sevenz = new SevenZFile(tstream.getFile());
            } else {
                sevenz = new SevenZFile(tstream.getFile(), password.getBytes("UnicodeLittleUnmarked"));
            }

            // Pending a fix for COMPRESS-269 / TIKA-1525, this bit is a little nasty
            ais = new SevenZWrapper(sevenz);
        } else {
            tmp.close();
            throw new TikaException("Unknown non-streaming format " + sne.getFormat(), sne);
        }
    } catch (ArchiveException e) {
        tmp.close();
        throw new TikaException("Unable to unpack document stream", e);
    }

    updateMediaType(ais, mediaTypeRegistry, metadata);
    // Use the delegate parser to parse the contained document
    EmbeddedDocumentExtractor extractor = EmbeddedDocumentUtil.getEmbeddedDocumentExtractor(context);

    XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata);
    xhtml.startDocument();

    try {
        ArchiveEntry entry = ais.getNextEntry();
        while (entry != null) {
            if (!entry.isDirectory()) {
                parseEntry(ais, entry, extractor, metadata, xhtml);
            }
            entry = ais.getNextEntry();
        }
    } catch (UnsupportedZipFeatureException zfe) {
        // If it's an encrypted document of unknown password, report as such
        if (zfe.getFeature() == Feature.ENCRYPTION) {
            throw new EncryptedDocumentException(zfe);
        }
        // Otherwise throw the exception
        throw new TikaException("UnsupportedZipFeature", zfe);
    } catch (PasswordRequiredException pre) {
        throw new EncryptedDocumentException(pre);
    } finally {
        ais.close();
        tmp.close();
    }

    xhtml.endDocument();
}

From source file:org.apache.tika.parser.pkg.SimulationDetector.java

private static MediaType detectSimulation(TikaInputStream tis) {
    try {/*from  w  w w  .j  ava2 s . c  o  m*/
        CompressorInputStream cis = new CompressorStreamFactory().createCompressorInputStream(tis);
        BufferedInputStream bis = new BufferedInputStream(cis);
        ArchiveInputStream input = new ArchiveStreamFactory().createArchiveInputStream(bis);
        ArchiveEntry entry = null;
        do {
            entry = input.getNextEntry();
            //input.mark(MAGIC.length()+10);
            if (!entry.isDirectory()) {
                byte[] content = new byte[MAGIC.length()];
                if (entry.getSize() > content.length) {
                    int offset = 0;
                    int length = content.length;
                    while (length > 0) {
                        int n = input.read(content, offset, length);
                        offset += n;
                        length -= n;
                    }
                    String s = new String(content, "ASCII");
                    if (MAGIC.equals(s))
                        return new MediaType("application", "enzosimulation");
                }
            }
            //input.reset();
        } while (entry != null);

        return null;
    } catch (Exception e) {
        return null;
    }
}

From source file:org.apereo.portal.io.xml.JaxbPortalDataHandlerService.java

/**
 * Extracts the archive resource and then runs the batch-import process on it.
 */// w w  w  .  j a  va2 s .  c  o m
protected void importDataArchive(final Resource resource, final ArchiveInputStream resourceStream,
        BatchImportOptions options) {

    final File tempDir = Files.createTempDir();
    try {
        ArchiveEntry archiveEntry;
        while ((archiveEntry = resourceStream.getNextEntry()) != null) {
            final File entryFile = new File(tempDir, archiveEntry.getName());
            if (archiveEntry.isDirectory()) {
                entryFile.mkdirs();
            } else {
                entryFile.getParentFile().mkdirs();

                Files.copy(new InputSupplier<InputStream>() {
                    @Override
                    public InputStream getInput() throws IOException {
                        return new CloseShieldInputStream(resourceStream);
                    }
                }, entryFile);
            }
        }

        importDataDirectory(tempDir, null, options);
    } catch (IOException e) {
        throw new RuntimeException(
                "Failed to extract data from '" + resource + "' to '" + tempDir + "' for batch import.", e);
    } finally {
        FileUtils.deleteQuietly(tempDir);
    }
}

From source file:org.arquillian.spacelift.task.archive.UncompressTool.java

@Override
protected File process(File input) throws Exception {
    ArchiveEntry entry = null;

    /** Read entries using the getNextEntry method **/

    ArchiveInputStream compressedInputStream = compressedInputStream(new FileInputStream(input));

    while ((entry = compressedInputStream.getNextEntry()) != null) {

        File file = new File(this.dest, remapEntryName(entry.getName()));

        if (entry.isDirectory()) {
            file.mkdirs();/* w  w w  .ja  v  a  2s. c o  m*/
        } else {

            if (!file.getParentFile().exists()) {
                file.getParentFile().mkdirs();
            }

            int count;
            byte data[] = new byte[BUFFER];

            FileOutputStream fos = new FileOutputStream(file);
            BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER);
            while ((count = compressedInputStream.read(data, 0, BUFFER)) != -1) {
                dest.write(data, 0, count);
            }
            dest.close();

            int permissionsMode = permissionsMode(entry);
            if (permissionsMode != 0) {
                FilePermission filePermission = PermissionsUtil.toFilePermission(permissionsMode);
                PermissionsUtil.applyPermission(file, filePermission);
            }
        }
    }

    compressedInputStream.close();

    return this.dest;
}