Example usage for org.apache.commons.compress.archivers ArchiveEntry getLastModifiedDate

List of usage examples for org.apache.commons.compress.archivers ArchiveEntry getLastModifiedDate

Introduction

In this page you can find the example usage for org.apache.commons.compress.archivers ArchiveEntry getLastModifiedDate.

Prototype

public Date getLastModifiedDate();

Source Link

Document

The last modified date of the entry.

Usage

From source file:org.apache.ant.compress.taskdefs.ExpandBase.java

private void expandArchiveStream(String name, ArchiveInputStream is, File dir) throws IOException {
    FileNameMapper mapper = getMapper();
    log("Expanding: " + name + " into " + dir, Project.MSG_INFO);
    boolean empty = true;
    ArchiveEntry ent = null;
    while ((ent = is.getNextEntry()) != null) {
        if (skipUnreadable && !is.canReadEntryData(ent)) {
            log(Messages.skippedIsUnreadable(ent));
            continue;
        }/*from w ww  .  j a  v  a2  s  . c o m*/
        empty = false;
        log("extracting " + ent.getName(), Project.MSG_DEBUG);
        extractFile(FileUtils.getFileUtils(), null, dir, is, ent.getName(), ent.getLastModifiedDate(),
                ent.isDirectory(), mapper);
    }
    if (empty && getFailOnEmptyArchive()) {
        throw new BuildException("archive '" + name + "' is empty");
    }
    log("expand complete", Project.MSG_VERBOSE);
}

From source file:org.apache.tika.parser.pkg.PackageParser.java

private void parseEntry(ArchiveInputStream archive, ArchiveEntry entry, EmbeddedDocumentExtractor extractor,
        Metadata parentMetadata, XHTMLContentHandler xhtml) throws SAXException, IOException, TikaException {
    String name = entry.getName();
    if (archive.canReadEntryData(entry)) {
        // Fetch the metadata on the entry contained in the archive
        Metadata entrydata = handleEntryMetadata(name, null, entry.getLastModifiedDate(), entry.getSize(),
                xhtml);/*from  ww  w .ja v a  2s. com*/

        // Recurse into the entry if desired
        if (extractor.shouldParseEmbedded(entrydata)) {
            // For detectors to work, we need a mark/reset supporting
            // InputStream, which ArchiveInputStream isn't, so wrap
            TemporaryResources tmp = new TemporaryResources();
            try {
                TikaInputStream tis = TikaInputStream.get(archive, tmp);
                extractor.parseEmbedded(tis, xhtml, entrydata, true);
            } finally {
                tmp.dispose();
            }
        }
    } else {
        name = (name == null) ? "" : name;
        if (entry instanceof ZipArchiveEntry) {
            boolean usesEncryption = ((ZipArchiveEntry) entry).getGeneralPurposeBit().usesEncryption();
            if (usesEncryption) {
                EmbeddedDocumentUtil.recordEmbeddedStreamException(
                        new EncryptedDocumentException("stream (" + name + ") is encrypted"), parentMetadata);
            }
        } else {
            EmbeddedDocumentUtil.recordEmbeddedStreamException(
                    new TikaException("Can't read archive stream (" + name + ")"), parentMetadata);
        }
        if (name.length() > 0) {
            xhtml.element("p", name);
        }
    }
}

From source file:org.artifactory.model.xstream.fs.ArchiveEntryImpl.java

public ArchiveEntryImpl(@Nonnull ArchiveEntry... entries) {
    if (entries.length == 0) {
        throw new IllegalArgumentException("Cannot create ZipEntryInfo without a ZipEntry!");
    }/*from w  w w.  j a  v a  2s. c  om*/

    ArchiveEntry entry = entries[entries.length - 1];

    if (entries.length > 1) {
        StringBuilder fullPath = new StringBuilder();
        for (int i = 0; i < entries.length; i++) {
            fullPath.append(entries[i].getName());
            if (i != entries.length - 1) {
                fullPath.append(RepoPath.ARCHIVE_SEP).append('/');
            }
        }
        this.path = fullPath.toString();
    } else {
        this.path = entry.getName();
    }
    this.name = PathUtils.getFileName(entry.getName());
    this.time = entry.getLastModifiedDate().getTime();
    this.size = entry.getSize();
    this.directory = entry.isDirectory();
}

From source file:org.artifactory.util.ZipUtils.java

/**
 * Extracts the given archive file into the given directory
 *
 * @param sourceArchive        Archive to extract
 * @param destinationDirectory Directory to extract archive to
 *//*ww  w .j a v a  2 s .c  o  m*/
private static void extractFiles(File sourceArchive, File destinationDirectory) {
    ArchiveInputStream archiveInputStream = null;
    try {
        archiveInputStream = createArchiveInputStream(sourceArchive);
        ArchiveEntry zipEntry;
        while ((zipEntry = archiveInputStream.getNextEntry()) != null) {
            //Validate entry name before extracting
            String validatedEntryName = validateEntryName(zipEntry.getName());

            if (StringUtils.isNotBlank(validatedEntryName)) {
                extractFile(sourceArchive, destinationDirectory, archiveInputStream, validatedEntryName,
                        zipEntry.getLastModifiedDate(), zipEntry.isDirectory());
            }
        }

    } catch (IOException ioe) {
        throw new RuntimeException("Error while extracting " + sourceArchive.getPath(), ioe);
    } finally {
        IOUtils.closeQuietly(archiveInputStream);
    }
}

From source file:org.fabrician.maven.plugins.CompressUtils.java

private static ArchiveEntry createArchiveEntry(ArchiveEntry entry, OutputStream out, String alternateBaseDir)
        throws IOException {
    String substitutedName = substituteAlternateBaseDir(entry, alternateBaseDir);
    if (out instanceof TarArchiveOutputStream) {
        TarArchiveEntry newEntry = new TarArchiveEntry(substitutedName);
        newEntry.setSize(entry.getSize());
        newEntry.setModTime(entry.getLastModifiedDate());

        if (entry instanceof TarArchiveEntry) {
            TarArchiveEntry old = (TarArchiveEntry) entry;
            newEntry.setSize(old.getSize());
            newEntry.setIds(old.getUserId(), old.getGroupId());
            newEntry.setNames(old.getUserName(), old.getGroupName());
        }/*from   www . jav  a  2s  .c  o m*/
        return newEntry;
    } else if (entry instanceof ZipArchiveEntry) {
        ZipArchiveEntry old = (ZipArchiveEntry) entry;
        ZipArchiveEntry zip = new ZipArchiveEntry(substitutedName);
        zip.setInternalAttributes(old.getInternalAttributes());
        zip.setExternalAttributes(old.getExternalAttributes());
        zip.setExtraFields(old.getExtraFields(true));
        return zip;
    } else {
        return new ZipArchiveEntry(substitutedName);
    }
}

From source file:org.robovm.gradle.tasks.AbstractRoboVMTask.java

private static void extractTarGz(File archive, File destDir) throws IOException {
    TarArchiveInputStream in = null;/*from w w  w . j  a v a 2 s  . co  m*/
    try {
        in = new TarArchiveInputStream(new GZIPInputStream(new FileInputStream(archive)));
        ArchiveEntry entry = null;
        while ((entry = in.getNextEntry()) != null) {
            File f = new File(destDir, entry.getName());
            if (entry.isDirectory()) {
                f.mkdirs();
            } else {
                f.getParentFile().mkdirs();
                OutputStream out = null;
                try {
                    out = new FileOutputStream(f);
                    IOUtils.copy(in, out);
                } finally {
                    IOUtils.closeQuietly(out);
                }
            }
            f.setLastModified(entry.getLastModifiedDate().getTime());
            if (entry instanceof TarArchiveEntry) {
                int mode = ((TarArchiveEntry) entry).getMode();
                if ((mode & 00100) > 0) {
                    // Preserve execute permissions
                    f.setExecutable(true, (mode & 00001) == 0);
                }
            }
        }
    } finally {
        IOUtils.closeQuietly(in);
    }
}

From source file:org.robovm.maven.resolver.Archiver.java

public static void unarchive(Logger logger, File archive, File destDir) throws IOException {
    TarArchiveInputStream in = null;/*from  www  . j a v a 2 s .c  om*/
    try {
        in = new TarArchiveInputStream(new GZIPInputStream(new FileInputStream(archive)));
        ArchiveEntry entry = null;
        while ((entry = in.getNextEntry()) != null) {
            File f = new File(destDir, entry.getName());
            if (entry.isDirectory()) {
                f.mkdirs();
            } else {
                logger.debug(f.getAbsolutePath());
                f.getParentFile().mkdirs();
                OutputStream out = null;
                try {
                    out = new FileOutputStream(f);
                    IOUtils.copy(in, out);
                } finally {
                    IOUtils.closeQuietly(out);
                }
            }
            f.setLastModified(entry.getLastModifiedDate().getTime());
            if (entry instanceof TarArchiveEntry) {
                int mode = ((TarArchiveEntry) entry).getMode();
                if ((mode & 00100) > 0) {
                    // Preserve execute permissions
                    f.setExecutable(true, (mode & 00001) == 0);
                }
            }
        }
    } finally {
        IOUtils.closeQuietly(in);
    }
}

From source file:org.voyanttools.trombone.input.expand.ArchiveExpander.java

/**
 * Get a list of stored document sources from the specified archive stream
 * (that corresponds to the specfied parent stored document source).
 * /*  ww w .j  a va  2  s.  co m*/
 * @param archiveInputStream the full archive input stream
 * @param parentStoredDocumentSource the parent stored document source
 * @return a list of stored document sources in this archive
 * @throws IOException thrown when an IO exception occurs during unarchiving
 */
private List<StoredDocumentSource> getExpandedDocumentSources(ArchiveInputStream archiveInputStream,
        StoredDocumentSource parentStoredDocumentSource) throws IOException {

    List<StoredDocumentSource> expandedDocumentSources = new ArrayList<StoredDocumentSource>();

    ArchiveEntry archiveEntry = archiveInputStream.getNextEntry();
    String parentId = parentStoredDocumentSource.getId();
    DocumentMetadata parentMetadata = parentStoredDocumentSource.getMetadata();
    while (archiveEntry != null) {

        if (archiveEntry.isDirectory() == false) {
            final String filename = archiveEntry.getName();
            final File file = new File(filename);

            // skip directories and skippable files
            if (DocumentFormat.isSkippable(file) == false) {
                DocumentMetadata childMetadata = parentMetadata.asParent(parentStoredDocumentSource.getId(),
                        DocumentMetadata.ParentType.EXPANSION);
                childMetadata.setLocation(file.toString());
                childMetadata.setModified(archiveEntry.getLastModifiedDate().getTime());
                childMetadata.setSource(Source.STREAM);
                childMetadata.setTitle(file.getName().replaceFirst("\\.\\w+$", ""));
                String id = DigestUtils.md5Hex(parentId + filename);
                InputSource inputSource = new InputStreamInputSource(id, childMetadata,
                        new CloseShieldInputStream(archiveInputStream));
                StoredDocumentSource storedDocumentSource = storedDocumentSourceStorage
                        .getStoredDocumentSource(inputSource);
                expandedDocumentSources
                        .addAll(this.expander.getExpandedStoredDocumentSources(storedDocumentSource)); // expand this recursively
            }
        }
        archiveEntry = archiveInputStream.getNextEntry();
    }

    return expandedDocumentSources;
}