Example usage for org.apache.commons.compress.archivers ArchiveEntry getName

List of usage examples for org.apache.commons.compress.archivers ArchiveEntry getName

Introduction

In this page you can find the example usage for org.apache.commons.compress.archivers ArchiveEntry getName.

Prototype

public String getName();

Source Link

Document

The name of the entry in the archive.

Usage

From source file:hudson.plugins.report.jck.parsers.JtregReportParser.java

@Override
public Suite parsePath(Path path) {
    List<Test> testsList = new ArrayList<>();
    try (ArchiveInputStream in = streamPath(path)) {
        XMLInputFactory inputFactory = XMLInputFactory.newInstance();
        ArchiveEntry entry;
        while ((entry = in.getNextEntry()) != null) {
            String entryName = entry.getName();
            if (entryName == null || !entryName.endsWith(".jtr.xml")) {
                continue;
            }/*  w  w w  .j a  v a  2 s . c o m*/
            try {
                XMLStreamReader reader = inputFactory.createXMLStreamReader(new CloseShieldInputStream(in),
                        "UTF-8");
                testsList.addAll(parseTestsuites(reader));
            } catch (Exception ex) {
                ex.printStackTrace();
            }
        }
    } catch (Exception ex) {
        ex.printStackTrace();
    }
    if (testsList.isEmpty()) {
        return null;
    }
    List<String> testNames = testsList.stream().sequential().map(t -> t.getName()).sorted()
            .collect(Collectors.toList());
    List<Test> testProblems = testsList.stream().sequential()
            .filter(t -> t.getStatus() == TestStatus.ERROR || t.getStatus() == TestStatus.FAILED).sorted()
            .collect(Collectors.toList());
    ReportFull fullReport = new ReportFull(
            (int) testsList.stream().sequential().filter(t -> t.getStatus() == TestStatus.PASSED).count(),
            (int) testsList.stream().sequential().filter(t -> t.getStatus() == TestStatus.NOT_RUN).count(),
            (int) testsList.stream().sequential().filter(t -> t.getStatus() == TestStatus.FAILED).count(),
            (int) testsList.stream().sequential().filter(t -> t.getStatus() == TestStatus.ERROR).count(),
            testsList.size(), testProblems, testNames);
    return new Suite(suiteName(path), fullReport);
}

From source file:net.sf.sveditor.core.tests.utils.BundleUtils.java

public void unpackBundleTarToFS(String bundle_path, File fs_path) {
    URL url = fBundle.getEntry(bundle_path);
    TestCase.assertNotNull(url);//  www .  ja  v a 2 s  . c o  m

    if (!fs_path.isDirectory()) {
        TestCase.assertTrue(fs_path.mkdirs());
    }
    InputStream in = null;
    TarArchiveInputStream tar_stream = null;

    try {
        in = url.openStream();
    } catch (IOException e) {
        TestCase.fail("Failed to open data file " + bundle_path + " : " + e.getMessage());
    }

    tar_stream = new TarArchiveInputStream(in);

    try {
        byte tmp[] = new byte[4 * 1024];
        int cnt;

        ArchiveEntry te;

        while ((te = tar_stream.getNextEntry()) != null) {
            // System.out.println("Entry: \"" + ze.getName() + "\"");
            File entry_f = new File(fs_path, te.getName());
            if (te.getName().endsWith("/")) {
                // Directory
                continue;
            }
            if (!entry_f.getParentFile().exists()) {
                TestCase.assertTrue(entry_f.getParentFile().mkdirs());
            }
            FileOutputStream fos = new FileOutputStream(entry_f);
            BufferedOutputStream bos = new BufferedOutputStream(fos, tmp.length);

            while ((cnt = tar_stream.read(tmp, 0, tmp.length)) > 0) {
                bos.write(tmp, 0, cnt);
            }
            bos.flush();
            bos.close();
            fos.close();

            //            tar_stream.closeEntry();
        }
        tar_stream.close();
    } catch (IOException e) {
        e.printStackTrace();
        TestCase.fail("Failed to unpack tar file: " + e.getMessage());
    }
}

From source file:net.sf.sveditor.core.tests.utils.BundleUtils.java

public void unpackBundleTgzToFS(String bundle_path, File fs_path) {
    URL url = fBundle.getEntry(bundle_path);
    TestCase.assertNotNull(url);//from w w w.jav a2 s.c o  m

    if (!fs_path.isDirectory()) {
        TestCase.assertTrue(fs_path.mkdirs());
    }
    InputStream in = null;
    GzipCompressorInputStream gz_stream = null;
    TarArchiveInputStream tar_stream = null;

    try {
        in = url.openStream();
    } catch (IOException e) {
        TestCase.fail("Failed to open data file " + bundle_path + " : " + e.getMessage());
    }

    try {
        gz_stream = new GzipCompressorInputStream(in);
    } catch (IOException e) {
        TestCase.fail("Failed to uncompress data file " + bundle_path + " : " + e.getMessage());
    }

    tar_stream = new TarArchiveInputStream(gz_stream);

    try {
        byte tmp[] = new byte[4 * 1024];
        int cnt;

        ArchiveEntry te;

        while ((te = tar_stream.getNextEntry()) != null) {
            // System.out.println("Entry: \"" + ze.getName() + "\"");
            File entry_f = new File(fs_path, te.getName());
            if (te.getName().endsWith("/")) {
                // Directory
                continue;
            }
            if (!entry_f.getParentFile().exists()) {
                TestCase.assertTrue(entry_f.getParentFile().mkdirs());
            }
            FileOutputStream fos = new FileOutputStream(entry_f);
            BufferedOutputStream bos = new BufferedOutputStream(fos, tmp.length);

            while ((cnt = tar_stream.read(tmp, 0, tmp.length)) > 0) {
                bos.write(tmp, 0, cnt);
            }
            bos.flush();
            bos.close();
            fos.close();

            //            tar_stream.closeEntry();
        }
        tar_stream.close();
    } catch (IOException e) {
        e.printStackTrace();
        TestCase.fail("Failed to unpack tar file: " + e.getMessage());
    }
}

From source file:com.qwazr.tools.ArchiverTool.java

public void extract(File sourceFile, File destDir) throws IOException, ArchiveException {
    final InputStream is = new BufferedInputStream(new FileInputStream(sourceFile));
    try {//w w w  . j  a va 2 s . c  om
        ArchiveInputStream in = new ArchiveStreamFactory().createArchiveInputStream(is);
        try {
            ArchiveEntry entry;
            while ((entry = in.getNextEntry()) != null) {
                if (!in.canReadEntryData(entry))
                    continue;
                if (entry.isDirectory()) {
                    new File(destDir, entry.getName()).mkdir();
                    continue;
                }
                if (entry instanceof ZipArchiveEntry)
                    if (((ZipArchiveEntry) entry).isUnixSymlink())
                        continue;
                File destFile = new File(destDir, entry.getName());
                File parentDir = destFile.getParentFile();
                if (!parentDir.exists())
                    parentDir.mkdirs();
                IOUtils.copy(in, destFile);
            }
        } catch (IOException e) {
            throw new IOException("Unable to extract the archive: " + sourceFile.getPath(), e);
        } finally {
            IOUtils.closeQuietly(in);
        }
    } catch (ArchiveException e) {
        throw new ArchiveException("Unable to extract the archive: " + sourceFile.getPath(), e);
    } finally {
        IOUtils.closeQuietly(is);
    }
}

From source file:com.qwazr.library.archiver.ArchiverTool.java

public void extract(final Path sourceFile, final Path destDir) throws IOException, ArchiveException {
    try (final InputStream is = new BufferedInputStream(Files.newInputStream(sourceFile))) {
        try (final ArchiveInputStream in = new ArchiveStreamFactory().createArchiveInputStream(is)) {
            ArchiveEntry entry;
            while ((entry = in.getNextEntry()) != null) {
                if (!in.canReadEntryData(entry))
                    continue;
                if (entry.isDirectory()) {
                    final Path newDir = destDir.resolve(entry.getName());
                    if (!Files.exists(newDir))
                        Files.createDirectory(newDir);
                    continue;
                }//from w  ww .ja v  a2 s  .  c o m
                if (entry instanceof ZipArchiveEntry)
                    if (((ZipArchiveEntry) entry).isUnixSymlink())
                        continue;
                final Path destFile = destDir.resolve(entry.getName());
                final Path parentDir = destFile.getParent();
                if (!Files.exists(parentDir))
                    Files.createDirectories(parentDir);
                final long entryLastModified = entry.getLastModifiedDate().getTime();
                if (Files.exists(destFile) && Files.isRegularFile(destFile)
                        && Files.getLastModifiedTime(destFile).toMillis() == entryLastModified
                        && entry.getSize() == Files.size(destFile))
                    continue;
                IOUtils.copy(in, destFile);
                Files.setLastModifiedTime(destFile, FileTime.fromMillis(entryLastModified));
            }
        } catch (IOException e) {
            throw new IOException("Unable to extract the archive: " + sourceFile.toAbsolutePath(), e);
        }
    } catch (ArchiveException e) {
        throw new ArchiveException("Unable to extract the archive: " + sourceFile.toAbsolutePath(), e);
    }
}

From source file:io.fabric8.spi.process.AbstractProcessHandler.java

@Override
public final ManagedProcess create(AgentRegistration agentReg, ProcessOptions options,
        ProcessIdentity identity) {//  w  ww .  j a v  a 2  s.  c o  m

    File targetDir = options.getTargetPath().toAbsolutePath().toFile();
    IllegalStateAssertion.assertTrue(targetDir.isDirectory() || targetDir.mkdirs(),
            "Cannot create target dir: " + targetDir);

    File homeDir = null;
    for (MavenCoordinates artefact : options.getMavenCoordinates()) {
        Resource resource = mavenRepository.findMavenResource(artefact);
        IllegalStateAssertion.assertNotNull(resource, "Cannot find maven resource: " + artefact);

        ResourceContent content = resource.adapt(ResourceContent.class);
        IllegalStateAssertion.assertNotNull(content, "Cannot obtain resource content for: " + artefact);

        try {
            ArchiveInputStream ais;
            if ("tar.gz".equals(artefact.getType())) {
                InputStream inputStream = content.getContent();
                ais = new TarArchiveInputStream(new GZIPInputStream(inputStream));
            } else {
                InputStream inputStream = content.getContent();
                ais = new ArchiveStreamFactory().createArchiveInputStream(artefact.getType(), inputStream);
            }
            ArchiveEntry entry = null;
            boolean needContainerHome = homeDir == null;
            while ((entry = ais.getNextEntry()) != null) {
                File targetFile;
                if (needContainerHome) {
                    targetFile = new File(targetDir, entry.getName());
                } else {
                    targetFile = new File(homeDir, entry.getName());
                }
                if (!entry.isDirectory()) {
                    File parentDir = targetFile.getParentFile();
                    IllegalStateAssertion.assertTrue(parentDir.exists() || parentDir.mkdirs(),
                            "Cannot create target directory: " + parentDir);

                    FileOutputStream fos = new FileOutputStream(targetFile);
                    copyStream(ais, fos);
                    fos.close();

                    if (needContainerHome && homeDir == null) {
                        File currentDir = parentDir;
                        while (!currentDir.getParentFile().equals(targetDir)) {
                            currentDir = currentDir.getParentFile();
                        }
                        homeDir = currentDir;
                    }
                }
            }
            ais.close();
        } catch (RuntimeException rte) {
            throw rte;
        } catch (Exception ex) {
            throw new IllegalStateException("Cannot extract artefact: " + artefact, ex);
        }
    }

    managedProcess = new DefaultManagedProcess(identity, options, homeDir.toPath(), State.CREATED);
    managedProcess.addAttribute(ManagedProcess.ATTRIBUTE_KEY_AGENT_REGISTRATION, agentReg);
    managedProcess.addAttribute(ContainerAttributes.ATTRIBUTE_KEY_AGENT_JMX_SERVER_URL,
            agentReg.getJmxServerUrl());
    managedProcess.addAttribute(ContainerAttributes.ATTRIBUTE_KEY_AGENT_JMX_USERNAME,
            agentReg.getJmxUsername());
    managedProcess.addAttribute(ContainerAttributes.ATTRIBUTE_KEY_AGENT_JMX_PASSWORD,
            agentReg.getJmxPassword());
    try {
        doConfigure(managedProcess);
    } catch (Exception ex) {
        throw new LifecycleException("Cannot configure container", ex);
    }
    return new ImmutableManagedProcess(managedProcess);
}

From source file:cc.arduino.utils.ArchiveExtractor.java

public void extract(File archiveFile, File destFolder, int stripPath, boolean overwrite)
        throws IOException, InterruptedException {

    // Folders timestamps must be set at the end of archive extraction
    // (because creating a file in a folder alters the folder's timestamp)
    Map<File, Long> foldersTimestamps = new HashMap<>();

    ArchiveInputStream in = null;//www  .  ja v a2 s .c o m
    try {

        // Create an ArchiveInputStream with the correct archiving algorithm
        if (archiveFile.getName().endsWith("tar.bz2")) {
            in = new TarArchiveInputStream(new BZip2CompressorInputStream(new FileInputStream(archiveFile)));
        } else if (archiveFile.getName().endsWith("zip")) {
            in = new ZipArchiveInputStream(new FileInputStream(archiveFile));
        } else if (archiveFile.getName().endsWith("tar.gz")) {
            in = new TarArchiveInputStream(new GzipCompressorInputStream(new FileInputStream(archiveFile)));
        } else if (archiveFile.getName().endsWith("tar")) {
            in = new TarArchiveInputStream(new FileInputStream(archiveFile));
        } else {
            throw new IOException("Archive format not supported.");
        }

        String pathPrefix = "";

        Map<File, File> hardLinks = new HashMap<>();
        Map<File, Integer> hardLinksMode = new HashMap<>();
        Map<File, String> symLinks = new HashMap<>();
        Map<File, Long> symLinksModifiedTimes = new HashMap<>();

        // Cycle through all the archive entries
        while (true) {
            ArchiveEntry entry = in.getNextEntry();
            if (entry == null) {
                break;
            }

            // Extract entry info
            long size = entry.getSize();
            String name = entry.getName();
            boolean isDirectory = entry.isDirectory();
            boolean isLink = false;
            boolean isSymLink = false;
            String linkName = null;
            Integer mode = null;
            long modifiedTime = entry.getLastModifiedDate().getTime();

            {
                // Skip MacOSX metadata
                // http://superuser.com/questions/61185/why-do-i-get-files-like-foo-in-my-tarball-on-os-x
                int slash = name.lastIndexOf('/');
                if (slash == -1) {
                    if (name.startsWith("._")) {
                        continue;
                    }
                } else {
                    if (name.substring(slash + 1).startsWith("._")) {
                        continue;
                    }
                }
            }

            // Skip git metadata
            // http://www.unix.com/unix-for-dummies-questions-and-answers/124958-file-pax_global_header-means-what.html
            if (name.contains("pax_global_header")) {
                continue;
            }

            if (entry instanceof TarArchiveEntry) {
                TarArchiveEntry tarEntry = (TarArchiveEntry) entry;
                mode = tarEntry.getMode();
                isLink = tarEntry.isLink();
                isSymLink = tarEntry.isSymbolicLink();
                linkName = tarEntry.getLinkName();
            }

            // On the first archive entry, if requested, detect the common path
            // prefix to be stripped from filenames
            if (stripPath > 0 && pathPrefix.isEmpty()) {
                int slash = 0;
                while (stripPath > 0) {
                    slash = name.indexOf("/", slash);
                    if (slash == -1) {
                        throw new IOException("Invalid archive: it must contain a single root folder");
                    }
                    slash++;
                    stripPath--;
                }
                pathPrefix = name.substring(0, slash);
            }

            // Strip the common path prefix when requested
            if (!name.startsWith(pathPrefix)) {
                throw new IOException("Invalid archive: it must contain a single root folder while file " + name
                        + " is outside " + pathPrefix);
            }
            name = name.substring(pathPrefix.length());
            if (name.isEmpty()) {
                continue;
            }
            File outputFile = new File(destFolder, name);

            File outputLinkedFile = null;
            if (isLink) {
                if (!linkName.startsWith(pathPrefix)) {
                    throw new IOException("Invalid archive: it must contain a single root folder while file "
                            + linkName + " is outside " + pathPrefix);
                }
                linkName = linkName.substring(pathPrefix.length());
                outputLinkedFile = new File(destFolder, linkName);
            }
            if (isSymLink) {
                // Symbolic links are referenced with relative paths
                outputLinkedFile = new File(linkName);
                if (outputLinkedFile.isAbsolute()) {
                    System.err.println(I18n.format(tr("Warning: file {0} links to an absolute path {1}"),
                            outputFile, outputLinkedFile));
                    System.err.println();
                }
            }

            // Safety check
            if (isDirectory) {
                if (outputFile.isFile() && !overwrite) {
                    throw new IOException(
                            "Can't create folder " + outputFile + ", a file with the same name exists!");
                }
            } else {
                // - isLink
                // - isSymLink
                // - anything else
                if (outputFile.exists() && !overwrite) {
                    throw new IOException("Can't extract file " + outputFile + ", file already exists!");
                }
            }

            // Extract the entry
            if (isDirectory) {
                if (!outputFile.exists() && !outputFile.mkdirs()) {
                    throw new IOException("Could not create folder: " + outputFile);
                }
                foldersTimestamps.put(outputFile, modifiedTime);
            } else if (isLink) {
                hardLinks.put(outputFile, outputLinkedFile);
                hardLinksMode.put(outputFile, mode);
            } else if (isSymLink) {
                symLinks.put(outputFile, linkName);
                symLinksModifiedTimes.put(outputFile, modifiedTime);
            } else {
                // Create the containing folder if not exists
                if (!outputFile.getParentFile().isDirectory()) {
                    outputFile.getParentFile().mkdirs();
                }
                copyStreamToFile(in, size, outputFile);
                outputFile.setLastModified(modifiedTime);
            }

            // Set file/folder permission
            if (mode != null && !isSymLink && outputFile.exists()) {
                platform.chmod(outputFile, mode);
            }
        }

        for (Map.Entry<File, File> entry : hardLinks.entrySet()) {
            if (entry.getKey().exists() && overwrite) {
                entry.getKey().delete();
            }
            platform.link(entry.getValue(), entry.getKey());
            Integer mode = hardLinksMode.get(entry.getKey());
            if (mode != null) {
                platform.chmod(entry.getKey(), mode);
            }
        }

        for (Map.Entry<File, String> entry : symLinks.entrySet()) {
            if (entry.getKey().exists() && overwrite) {
                entry.getKey().delete();
            }
            platform.symlink(entry.getValue(), entry.getKey());
            entry.getKey().setLastModified(symLinksModifiedTimes.get(entry.getKey()));
        }

    } finally {
        IOUtils.closeQuietly(in);
    }

    // Set folders timestamps
    for (File folder : foldersTimestamps.keySet()) {
        folder.setLastModified(foldersTimestamps.get(folder));
    }
}

From source file:eu.ensure.packproc.ip.PackageProcessor.java

/**
 * Generic entry to the information package-processor.
 * <p>//from  w w w.  j a v a2 s.  c  om
 * Will route to more specific actions based on the plugin-specific configuration.
 * <p>
 * @param name - name of entity (information package)
 * @param inputStream - input stream onto information package
 * @param outputStream - [optionally] output stream onto (new) information package
 * @param context - a context for this processor
 * @throws IOException - if file I/O fails
 * @throws ArchiveException - if information package has unknown packaging format
 * @throws ProcessorException - if processing of information package fails
 * @throws ClassNotFoundException - if action not found
 */
public void process(String name, InputStream inputStream, OutputStream outputStream, ProcessorContext context)
        throws IOException, ArchiveException, ProcessorException, ClassNotFoundException {

    BasicProcessorContext basicContext = context.push(new BasicProcessorContext(name));
    boolean isMutableCall = null != outputStream;

    ArchiveInputStream archiveInputStream = null;
    PackageOutputStream archiveOutputStream = null;
    try {
        // Package readers and writers
        archiveInputStream = factory.createArchiveInputStream(new BufferedInputStream(inputStream));

        if (isMutableCall) {
            archiveOutputStream = PackageOutputStream.createOutputStreamFrom(archiveInputStream, outputStream);
        }

        // Iterate through objects in the input package
        ArchiveEntry archiveEntry = null;

        with_next_entry: while ((archiveEntry = archiveInputStream.getNextEntry()) != null) {

            String entryName = archiveEntry.getName();
            if (archiveEntry.isDirectory()) {
                entryName += "/";
            }

            if (log.isInfoEnabled()) {
                log.info("");
                String info = "### " + entryName;
                long size = archiveEntry.getSize();
                info += " (~" + Number.asHumanApproximate(size) + " or " + size + " bytes)";
                log.info(info);
            }

            // TODO: Triggers for "/" will have to be processed manually here!

            MultiDigestInputStream entryInputStream = null;
            try {
                PackageEntry structureEntry = new PackageEntry(archiveEntry);
                entryInputStream = new MultiDigestInputStream(archiveInputStream); // As it happens to be!

                // Directories are not processed per se
                Iterator<Action> ait = actions.iterator();
                while (ait.hasNext()) {
                    Action action = ait.next();

                    if (action.match(structureEntry.getName())) {
                        if (log.isDebugEnabled()) {
                            log.debug(me() + ":process container");
                        }
                        Processor processor = action.getProcessor();
                        if (processor instanceof ContainerStructureProcessor) {
                            if (action.getMethod().equalsIgnoreCase("process")) {
                                //-----------------------------------------------------------------------------
                                // Since we are referring to a structure (processor), we are probably just
                                // going to process an embedded TAR-file (or the like). We create a
                                // temporary file and recursively feed it to the processor manager...
                                //-----------------------------------------------------------------------------
                                File subInputFile = extractEntry(structureEntry, entryInputStream);
                                File subOutputFile = null;
                                if (isMutableCall) {
                                    subOutputFile = File.createTempFile("temporary-processed", ".package");
                                }
                                try {
                                    InputStream subInputStream = null;
                                    OutputStream subOutputStream = null;
                                    try {
                                        subInputStream = new BufferedInputStream(
                                                new FileInputStream(subInputFile));
                                        if (isMutableCall) {
                                            subOutputStream = new BufferedOutputStream(
                                                    new FileOutputStream(subOutputFile));
                                        }

                                        // Run it through the processor manager which knows what to do with it
                                        manager.applyOnContainerWithStructure(action.getProcessor(),
                                                action.getMethod(), structureEntry.getName(), subInputStream,
                                                subOutputStream, basicContext);
                                    } finally {
                                        if (null != subInputStream)
                                            subInputStream.close();
                                        if (null != subOutputStream)
                                            subOutputStream.close();
                                    }

                                    if (isMutableCall) {
                                        // Add the temporary file to the output stream instead of the original
                                        addEntry(subOutputFile, structureEntry, archiveOutputStream);
                                    }
                                } finally {
                                    if (null != subInputFile && subInputFile.exists())
                                        subInputFile.delete();
                                    if (null != subOutputFile && subOutputFile.exists())
                                        subOutputFile.delete();
                                }
                                continue with_next_entry; // since we operated on a unique entry

                            } else {
                                // Unknown operation on a container file
                                throw new ProcessorException(
                                        "Unknown action on container: " + action.getMethod());
                            }
                        } else if (processor instanceof FileProcessor) {
                            //---------------------------------------------------------------------------------
                            // Since we are referring to a file processor, we will just pass the entry with it's
                            // input stream back to the processor manager that will know what to do with it.
                            //---------------------------------------------------------------------------------
                            manager.applyOnEntry(action.getProcessor(), action.getMethod(), structureEntry,
                                    entryInputStream, archiveOutputStream, basicContext);
                            continue with_next_entry; // since we operated on a unique entry
                        }
                    }
                }

                if (isMutableCall && !addedEntries.contains(structureEntry.getName())) {
                    // We may safely copy file
                    copyEntry(structureEntry, entryInputStream, archiveOutputStream);
                }
            } finally {
                /*
                 * Don't close the entryInputStream! It is just a reference to the archiveInputStream
                 * which we want to continue operating upon.
                 */

                if (!archiveEntry.isDirectory()) {
                    // Collect bitstream information - this is where we associate _actual_ values,
                    // i.e. calculated checksums and calculated byte lengths.
                    Map<String, String> bitstreamInfo = new HashMap<String, String>();

                    // OBSERVE: The following might not be completely valid in all circumstances,
                    // as InputStream.getSize() only returns the number of bytes that you can read
                    // and not necessarily the number of bytes in the stream. But in this case,
                    // I believe it to be valid...
                    if (entryInputStream.getSize() > 0) {
                        bitstreamInfo.put("size", "" + entryInputStream.getSize());

                        Map<String, byte[]> digests = entryInputStream.getDigests();
                        for (String key : digests.keySet()) {
                            byte[] digest = digests.get(key);

                            if (digest.length == 8) {
                                ByteBuffer buf = ByteBuffer.wrap(digest);
                                String value = "" + buf.getLong();
                                bitstreamInfo.put(key, value);
                            } else {
                                StringBuffer hexString = new StringBuffer();
                                for (int i = 0; i < digest.length; i++) {
                                    hexString.append(Integer.toHexString(0xFF & digest[i]));
                                }
                                String value = hexString.toString();
                                bitstreamInfo.put(key, value);
                            }
                        }

                        // Create a package-relative path...
                        File top = new File("/");
                        File contentStream = top; // starting point relative to top

                        // ...and reassemble
                        int start = entryName.startsWith("/") ? 0 : 1; /* skip [example1]/content/... */

                        String[] parts = entryName.split("/");
                        for (int i = start; i < parts.length; i++) {
                            contentStream = new File(contentStream, parts[i]);
                        }
                        bitstreamInfo.put("fileName", parts[parts.length - 1]);

                        String path = contentStream.getPath().replace("\\", "/"); // in case we're on Windoze
                        context.associate("CALCULATED", path, path, bitstreamInfo);
                    }
                }
            }
        }
    } finally {
        if (null != archiveOutputStream)
            archiveOutputStream.close();
        if (null != archiveInputStream)
            archiveInputStream.close();

        context.pop();
    }
}

From source file:de.flapdoodle.embed.process.extract.AbstractExtractor.java

@Override
public ExtractedFileSet extract(DownloadConfig runtime, File source, FilesToExtract toExtract)
        throws IOException {
    Builder builder = ExtractedFileSet.builder(toExtract.baseDir())
            .baseDirIsGenerated(toExtract.baseDirIsGenerated());

    ProgressListener progressListener = runtime.getProgressListener();
    String progressLabel = "Extract " + source;
    progressListener.start(progressLabel);

    ArchiveWrapper archive = archiveStreamWithExceptionHint(source);

    try {//ww w  . j  a  v  a 2  s .c  om
        ArchiveEntry entry;
        while ((entry = archive.getNextEntry()) != null) {
            IExtractionMatch match = toExtract.find(new CommonsArchiveEntryAdapter(entry));
            if (match != null) {
                if (archive.canReadEntryData(entry)) {
                    long size = entry.getSize();
                    FileType type = match.type();
                    File file = match.write(archive.asStream(entry), size);
                    if (type == FileType.Executable) {
                        builder.executable(file);
                    } else {
                        builder.addLibraryFiles(file);
                    }
                    //                  destination.setExecutable(true);
                    progressListener.info(progressLabel, "extract " + entry.getName());
                }
                if (toExtract.nothingLeft()) {
                    progressListener.info(progressLabel, "nothing left");
                    break;
                }
            }
        }

    } finally {
        archive.close();
    }

    progressListener.done(progressLabel);

    return builder.build();
}

From source file:freenet.client.ArchiveManager.java

private void handleTARArchive(ArchiveStoreContext ctx, FreenetURI key, InputStream data, String element,
        ArchiveExtractCallback callback, MutableBoolean gotElement, boolean throwAtExit, ClientContext context)
        throws ArchiveFailureException, ArchiveRestartException {
    if (logMINOR)
        Logger.minor(this, "Handling a TAR Archive");
    TarArchiveInputStream tarIS = null;/*from   w  ww .ja  va 2 s .  c  o m*/
    try {
        tarIS = new TarArchiveInputStream(data);

        // MINOR: Assumes the first entry in the tarball is a directory.
        ArchiveEntry entry;

        byte[] buf = new byte[32768];
        HashSet<String> names = new HashSet<String>();
        boolean gotMetadata = false;

        outerTAR: while (true) {
            try {
                entry = tarIS.getNextEntry();
            } catch (IllegalArgumentException e) {
                // Annoyingly, it can throw this on some corruptions...
                throw new ArchiveFailureException("Error reading archive: " + e.getMessage(), e);
            }
            if (entry == null)
                break;
            if (entry.isDirectory())
                continue;
            String name = stripLeadingSlashes(entry.getName());
            if (names.contains(name)) {
                Logger.error(this, "Duplicate key " + name + " in archive " + key);
                continue;
            }
            long size = entry.getSize();
            if (name.equals(".metadata"))
                gotMetadata = true;
            if (size > maxArchivedFileSize && !name.equals(element)) {
                addErrorElement(
                        ctx, key, name, "File too big: " + size
                                + " greater than current archived file size limit " + maxArchivedFileSize,
                        true);
            } else {
                // Read the element
                long realLen = 0;
                Bucket output = tempBucketFactory.makeBucket(size);
                OutputStream out = output.getOutputStream();

                try {
                    int readBytes;
                    while ((readBytes = tarIS.read(buf)) > 0) {
                        out.write(buf, 0, readBytes);
                        readBytes += realLen;
                        if (readBytes > maxArchivedFileSize) {
                            addErrorElement(ctx, key, name, "File too big: " + maxArchivedFileSize
                                    + " greater than current archived file size limit " + maxArchivedFileSize,
                                    true);
                            out.close();
                            out = null;
                            output.free();
                            continue outerTAR;
                        }
                    }

                } finally {
                    if (out != null)
                        out.close();
                }
                if (size <= maxArchivedFileSize) {
                    addStoreElement(ctx, key, name, output, gotElement, element, callback, context);
                    names.add(name);
                    trimStoredData();
                } else {
                    // We are here because they asked for this file.
                    callback.gotBucket(output, context);
                    gotElement.value = true;
                    addErrorElement(
                            ctx, key, name, "File too big: " + size
                                    + " greater than current archived file size limit " + maxArchivedFileSize,
                            true);
                }
            }
        }

        // If no metadata, generate some
        if (!gotMetadata) {
            generateMetadata(ctx, key, names, gotElement, element, callback, context);
            trimStoredData();
        }
        if (throwAtExit)
            throw new ArchiveRestartException("Archive changed on re-fetch");

        if ((!gotElement.value) && element != null)
            callback.notInArchive(context);

    } catch (IOException e) {
        throw new ArchiveFailureException("Error reading archive: " + e.getMessage(), e);
    } finally {
        Closer.close(tarIS);
    }
}