Example usage for org.apache.commons.compress.archivers.zip ZipArchiveEntry getTime

List of usage examples for org.apache.commons.compress.archivers.zip ZipArchiveEntry getTime

Introduction

In this page you can find the example usage for org.apache.commons.compress.archivers.zip ZipArchiveEntry getTime.

Prototype

public long getTime() 

Source Link

Document

Returns the last modification time of the entry.

Usage

From source file:com.google.dart.tools.update.core.internal.UpdateUtils.java

/**
 * Unzip a zip file, notifying the given monitor along the way.
 *//* w w w.j  a v a  2 s.c  o  m*/
public static void unzip(File zipFile, File destination, String taskName, IProgressMonitor monitor)
        throws IOException {

    ZipFile zip = new ZipFile(zipFile);

    //TODO (pquitslund): add real progress units
    if (monitor != null) {
        monitor.beginTask(taskName, 1);
    }

    Enumeration<ZipArchiveEntry> e = zip.getEntries();
    while (e.hasMoreElements()) {
        ZipArchiveEntry entry = e.nextElement();
        File file = new File(destination, entry.getName());
        if (entry.isDirectory()) {
            file.mkdirs();
        } else {
            InputStream is = zip.getInputStream(entry);

            File parent = file.getParentFile();
            if (parent != null && parent.exists() == false) {
                parent.mkdirs();
            }

            FileOutputStream os = new FileOutputStream(file);
            try {
                IOUtils.copy(is, os);
            } finally {
                os.close();
                is.close();
            }
            file.setLastModified(entry.getTime());

            int mode = entry.getUnixMode();

            if ((mode & EXEC_MASK) != 0) {
                file.setExecutable(true);
            }

        }
    }

    //TODO (pquitslund): fix progress units
    if (monitor != null) {
        monitor.worked(1);
        monitor.done();
    }

}

From source file:net.sf.regain.crawler.preparator.ZipPreparator.java

/**
 * Prepares the document for indexing/*  w ww. j  a  v a 2  s.  co m*/
 *
 * @param rawDocument the document
 * @throws RegainException if preparation goes wrong
 */
@Override
public void prepare(RawDocument rawDocument) throws RegainException {
    ArchiveInputStream ain = null;
    ZipInputStream zipInputStream = new ZipInputStream(rawDocument.getContentAsStream());

    PreparatorFactory preparatorFactory = new PreparatorFactory(new DummyCrawlerConfig());
    try {
        ain = new ArchiveStreamFactory().createArchiveInputStream("zip", rawDocument.getContentAsStream());
        ZipArchiveEntry entry;
        while ((entry = (ZipArchiveEntry) ain.getNextEntry()) != null) {
            String s = String.format("Entry: %s len %d added %TD", entry.getName(), entry.getSize(),
                    new Date(entry.getTime()));
            System.out.println(s);

            Preparator preparator = null;
            ByteArrayOutputStream byteArrayOutputStream = null;

            RawDocument rawZipDocument = new RawDocument(null, null, null, null);
            rawZipDocument.setUrl(new File(entry.getName()).toURI().toString());

            try {
                byteArrayOutputStream = new ByteArrayOutputStream();
                IOUtils.copy(zipInputStream, byteArrayOutputStream);
                rawZipDocument.setContent(byteArrayOutputStream.toByteArray());
                preparator = preparatorFactory.get(rawZipDocument);

            } finally {
                IOUtils.closeQuietly(byteArrayOutputStream);
            }

            if (preparator != null) {
                preparator.prepare(rawZipDocument);
                // concatenates contents
                setCleanedContent(new StringBuilder().append(getCleanedContent()).append("\n")
                        .append(preparator.getCleanedContent()).toString());
                setTitle(getTitle() + " " + preparator.getTitle());
                setSummary(getSummary() + " " + preparator.getSummary());
                setCleanedMetaData(getCleanedMetaData() + " " + preparator.getCleanedMetaData());
                setHeadlines(getHeadlines() + " " + preparator.getHeadlines());
                preparator.cleanUp();
            }
        }

    } catch (IOException | ArchiveException e) {
        e.printStackTrace();
    } finally {
        //IOUtils.closeQuietly(zipInputStream);
        IOUtils.closeQuietly(ain);
    }

}

From source file:edu.unc.lib.deposit.normalize.Proquest2N3BagJob.java

private void normalizePackage(File packageDir, Model model, Bag depositBag) {

    // Generate a uuid for the main object
    PID primaryPID = new PID("uuid:" + UUID.randomUUID());
    Resource primaryResource;/*  w  w w .  ja  v a2s. c om*/

    // Identify the important files from the deposit
    File dataFile = null, contentFile = null, attachmentDir = null;

    File[] files = packageDir.listFiles();
    for (File file : files) {
        if (file.isDirectory()) {
            attachmentDir = file;
        } else if (file.getName().endsWith(DATA_SUFFIX)) {
            dataFile = file;
        } else {
            contentFile = file;
        }
    }

    long lastModified = -1;
    File zipFile = new File(packageDir.getAbsolutePath() + ".zip");
    try (ZipFile zip = new ZipFile(zipFile)) {
        ZipArchiveEntry entry = zip.getEntry(contentFile.getName());
        lastModified = entry.getTime();
    } catch (IOException e) {
        log.error("Failed to read zip file located at {}.zip", packageDir.getAbsolutePath(), e);
    }

    if (lastModified == -1) {
        lastModified = zipFile.lastModified();
    }

    DateTime modified = new DateTime(lastModified, DateTimeZone.UTC);

    // Deserialize the data document
    SAXBuilder builder = new SAXBuilder();
    Element dataRoot = null;
    Document mods = null;
    try {

        Document dataDocument = builder.build(dataFile);
        dataRoot = dataDocument.getRootElement();

        // Transform the data into MODS and store it to its final resting place
        mods = extractMods(primaryPID, dataRoot, modified);
    } catch (TransformerException e) {
        failJob(e, Type.NORMALIZATION, "Failed to transform metadata to MODS");
    } catch (Exception e) {
        failJob(e, Type.NORMALIZATION, "Unable to deserialize the metadata file");
    }

    // Detect if there are any attachments
    List<?> attachmentElements = dataRoot.getChild("DISS_content").getChildren("DISS_attachment");

    if (attachmentElements == null || attachmentElements.size() == 0) {

        // Simple object with the content as its source data
        primaryResource = populateSimple(model, primaryPID, contentFile);
    } else {
        String title = mods.getRootElement().getChild("titleInfo", MODS_V3_NS).getChildText("title",
                MODS_V3_NS);

        // Has attachments, so it is an aggregate
        primaryResource = populateAggregate(model, primaryPID, attachmentElements, attachmentDir, contentFile,
                title);
    }

    // Store primary resource as child of the deposit
    depositBag.add(primaryResource);

    // Add the data file as a metadata datastream of the primary object
    setSourceMetadata(model, primaryResource, dataFile);

    // Capture other metadata, like embargoes
    setEmbargoUntil(model, primaryResource, dataRoot);

    // Creation date for the content file
    model.add(primaryResource, cdrprop(model, dateCreated), modified.toString(), XSDDatatype.XSDdateTime);
}

From source file:de.crowdcode.movmvn.core.Unzipper.java

/**
 * Unzip a file.// w  ww . ja  v a 2 s .  com
 * 
 * @param archiveFile
 *            to be unzipped
 * @param outPath
 *            the place to put the result
 * @throws IOException
 *             exception
 * @throws ZipException
 *             exception
 */
public void unzipFileToDir(final File archiveFile, final File outPath) throws IOException, ZipException {
    ZipFile zipFile = new ZipFile(archiveFile);
    Enumeration<ZipArchiveEntry> e = zipFile.getEntries();
    while (e.hasMoreElements()) {
        ZipArchiveEntry entry = e.nextElement();
        File file = new File(outPath, entry.getName());
        if (entry.isDirectory()) {
            FileUtils.forceMkdir(file);
        } else {
            InputStream is = zipFile.getInputStream(entry);
            FileOutputStream os = FileUtils.openOutputStream(file);
            try {
                IOUtils.copy(is, os);
            } finally {
                os.close();
                is.close();
            }
            file.setLastModified(entry.getTime());
        }
    }
}

From source file:com.naryx.tagfusion.expression.function.file.ZipList.java

private cfQueryResultData performZiplist(cfSession session, File zipfile, String charset) throws IOException {
    ZipFile zFile = null;/*  w w  w  . java 2s .c  o m*/
    try {
        cfQueryResultData filesQuery = new cfQueryResultData(new String[] { "name", "type", "compressedsize",
                "size", "compressedpercent", "datelastmodified", "comment" }, "CFZIP");
        zFile = new ZipFile(zipfile, charset);

        List<Map<String, cfData>> allResultRows = new ArrayList<Map<String, cfData>>();
        Map<String, cfData> resultRow;
        Enumeration<? extends ZipArchiveEntry> files = zFile.getEntries();
        ZipArchiveEntry nextEntry = null;
        long size;
        double compressed;

        while (files.hasMoreElements()) {
            nextEntry = (ZipArchiveEntry) files.nextElement();
            resultRow = new FastMap<String, cfData>(8);
            resultRow.put("name", new cfStringData(nextEntry.getName()));
            resultRow.put("comment", new cfStringData(nextEntry.getComment()));
            resultRow.put("datelastmodified", new cfDateData(nextEntry.getTime()));

            if (nextEntry.isDirectory()) {
                resultRow.put("compressedsize", new cfNumberData(0));
                resultRow.put("size", new cfNumberData(0));
                resultRow.put("type", new cfStringData("Dir"));
                resultRow.put("compressedpercent", new cfNumberData(0));

            } else {
                size = nextEntry.getSize();
                resultRow.put("compressedsize",
                        new cfStringData(String.valueOf(nextEntry.getCompressedSize())));
                resultRow.put("size", new cfStringData(String.valueOf(size)));
                resultRow.put("type", new cfStringData("File"));
                if (size != 0) {
                    compressed = ((float) nextEntry.getCompressedSize() / (float) size);
                    resultRow.put("compressedpercent",
                            new cfStringData(String.valueOf(100 - (int) (compressed * 100))));
                } else {
                    resultRow.put("compressedpercent", new cfStringData("0"));
                }
            }

            allResultRows.add(resultRow);
        }
        filesQuery.populateQuery(allResultRows);
        return filesQuery;
    } finally {
        try {
            zFile.close();
        } catch (IOException ignored) {
        }
    }
}

From source file:com.facebook.buck.util.unarchive.Unzip.java

private void writeZipContents(ZipFile zip, ZipArchiveEntry entry, ProjectFilesystem filesystem, Path target)
        throws IOException {
    // Write file
    try (InputStream is = zip.getInputStream(entry)) {
        if (entry.isUnixSymlink()) {
            filesystem.createSymLink(target,
                    filesystem.getPath(new String(ByteStreams.toByteArray(is), Charsets.UTF_8)),
                    /* force */ true);
        } else {//from   w w w. j a  va  2  s.c om
            try (OutputStream out = filesystem.newFileOutputStream(target)) {
                ByteStreams.copy(is, out);
            }
        }
    }

    Path filePath = filesystem.resolve(target);
    File file = filePath.toFile();

    // restore mtime for the file
    file.setLastModified(entry.getTime());

    // TODO(simons): Implement what the comment below says we should do.
    //
    // Sets the file permissions of the output file given the information in {@code entry}'s
    // extra data field. According to the docs at
    // http://www.opensource.apple.com/source/zip/zip-6/unzip/unzip/proginfo/extra.fld there
    // are two extensions that might support file permissions: Acorn and ASi UNIX. We shall
    // assume that inputs are not from an Acorn SparkFS. The relevant section from the docs:
    //
    // <pre>
    //    The following is the layout of the ASi extra block for Unix.  The
    //    local-header and central-header versions are identical.
    //    (Last Revision 19960916)
    //
    //    Value         Size        Description
    //    -----         ----        -----------
    //   (Unix3) 0x756e        Short       tag for this extra block type ("nu")
    //   TSize         Short       total data size for this block
    //   CRC           Long        CRC-32 of the remaining data
    //   Mode          Short       file permissions
    //   SizDev        Long        symlink'd size OR major/minor dev num
    //   UID           Short       user ID
    //   GID           Short       group ID
    //   (var.)        variable    symbolic link filename
    //
    //   Mode is the standard Unix st_mode field from struct stat, containing
    //   user/group/other permissions, setuid/setgid and symlink info, etc.
    // </pre>
    //
    // From the stat man page, we see that the following mask values are defined for the file
    // permissions component of the st_mode field:
    //
    // <pre>
    //   S_ISUID   0004000   set-user-ID bit
    //   S_ISGID   0002000   set-group-ID bit (see below)
    //   S_ISVTX   0001000   sticky bit (see below)
    //
    //   S_IRWXU     00700   mask for file owner permissions
    //
    //   S_IRUSR     00400   owner has read permission
    //   S_IWUSR     00200   owner has write permission
    //   S_IXUSR     00100   owner has execute permission
    //
    //   S_IRWXG     00070   mask for group permissions
    //   S_IRGRP     00040   group has read permission
    //   S_IWGRP     00020   group has write permission
    //   S_IXGRP     00010   group has execute permission
    //
    //   S_IRWXO     00007   mask for permissions for others
    //   (not in group)
    //   S_IROTH     00004   others have read permission
    //   S_IWOTH     00002   others have write permission
    //   S_IXOTH     00001   others have execute permission
    // </pre>
    //
    // For the sake of our own sanity, we're going to assume that no-one is using symlinks,
    // but we'll check and throw if they are.
    //
    // Before we do anything, we should check the header ID. Pfft!
    //
    // Having jumped through all these hoops, it turns out that InfoZIP's "unzip" store the
    // values in the external file attributes of a zip entry (found in the zip's central
    // directory) assuming that the OS creating the zip was one of an enormous list that
    // includes UNIX but not Windows, it first searches for the extra fields, and if not found
    // falls through to a code path that supports MS-DOS and which stores the UNIX file
    // attributes in the upper 16 bits of the external attributes field.
    //
    // We'll support neither approach fully, but we encode whether this file was executable
    // via storing 0100 in the fields that are typically used by zip implementations to store
    // POSIX permissions. If we find it was executable, use the platform independent java
    // interface to make this unpacked file executable.

    Set<PosixFilePermission> permissions = MorePosixFilePermissions
            .fromMode(entry.getExternalAttributes() >> 16);
    if (permissions.contains(PosixFilePermission.OWNER_EXECUTE) && file.getCanonicalFile().exists()) {
        MostFiles.makeExecutable(filePath);
    }
}

From source file:com.facebook.buck.zip.Unzip.java

/**
 * Unzips a file to a destination and returns the paths of the written files.
 *///from ww  w  . j av a 2s.  c o  m
public static ImmutableList<Path> extractZipFile(Path zipFile, ProjectFilesystem filesystem, Path relativePath,
        ExistingFileMode existingFileMode) throws IOException {
    // if requested, clean before extracting
    if (existingFileMode == ExistingFileMode.OVERWRITE_AND_CLEAN_DIRECTORIES) {
        try (ZipFile zip = new ZipFile(zipFile.toFile())) {
            Enumeration<ZipArchiveEntry> entries = zip.getEntries();
            while (entries.hasMoreElements()) {
                ZipArchiveEntry entry = entries.nextElement();
                filesystem.deleteRecursivelyIfExists(relativePath.resolve(entry.getName()));
            }
        }
    }
    ImmutableList.Builder<Path> filesWritten = ImmutableList.builder();
    try (ZipFile zip = new ZipFile(zipFile.toFile())) {
        Enumeration<ZipArchiveEntry> entries = zip.getEntries();
        while (entries.hasMoreElements()) {
            ZipArchiveEntry entry = entries.nextElement();
            String fileName = entry.getName();
            Path target = relativePath.resolve(fileName);

            // TODO(bolinfest): Keep track of which directories have already been written to avoid
            // making unnecessary Files.createDirectories() calls. In practice, a single zip file will
            // have many entries in the same directory.

            if (entry.isDirectory()) {
                // Create the directory and all its parent directories
                filesystem.mkdirs(target);
            } else {
                // Create parent folder
                filesystem.createParentDirs(target);

                filesWritten.add(target);
                // Write file
                try (InputStream is = zip.getInputStream(entry)) {
                    if (entry.isUnixSymlink()) {
                        filesystem.createSymLink(target,
                                filesystem.getPath(new String(ByteStreams.toByteArray(is), Charsets.UTF_8)),
                                /* force */ true);
                    } else {
                        try (OutputStream out = filesystem.newFileOutputStream(target)) {
                            ByteStreams.copy(is, out);
                        }
                    }
                }

                // restore mtime for the file
                filesystem.resolve(target).toFile().setLastModified(entry.getTime());

                // TODO(shs96c): Implement what the comment below says we should do.
                //
                // Sets the file permissions of the output file given the information in {@code entry}'s
                // extra data field. According to the docs at
                // http://www.opensource.apple.com/source/zip/zip-6/unzip/unzip/proginfo/extra.fld there
                // are two extensions that might support file permissions: Acorn and ASi UNIX. We shall
                // assume that inputs are not from an Acorn SparkFS. The relevant section from the docs:
                //
                // <pre>
                //    The following is the layout of the ASi extra block for Unix.  The
                //    local-header and central-header versions are identical.
                //    (Last Revision 19960916)
                //
                //    Value         Size        Description
                //    -----         ----        -----------
                //   (Unix3) 0x756e        Short       tag for this extra block type ("nu")
                //   TSize         Short       total data size for this block
                //   CRC           Long        CRC-32 of the remaining data
                //   Mode          Short       file permissions
                //   SizDev        Long        symlink'd size OR major/minor dev num
                //   UID           Short       user ID
                //   GID           Short       group ID
                //   (var.)        variable    symbolic link filename
                //
                //   Mode is the standard Unix st_mode field from struct stat, containing
                //   user/group/other permissions, setuid/setgid and symlink info, etc.
                // </pre>
                //
                // From the stat man page, we see that the following mask values are defined for the file
                // permissions component of the st_mode field:
                //
                // <pre>
                //   S_ISUID   0004000   set-user-ID bit
                //   S_ISGID   0002000   set-group-ID bit (see below)
                //   S_ISVTX   0001000   sticky bit (see below)
                //
                //   S_IRWXU     00700   mask for file owner permissions
                //
                //   S_IRUSR     00400   owner has read permission
                //   S_IWUSR     00200   owner has write permission
                //   S_IXUSR     00100   owner has execute permission
                //
                //   S_IRWXG     00070   mask for group permissions
                //   S_IRGRP     00040   group has read permission
                //   S_IWGRP     00020   group has write permission
                //   S_IXGRP     00010   group has execute permission
                //
                //   S_IRWXO     00007   mask for permissions for others
                //   (not in group)
                //   S_IROTH     00004   others have read permission
                //   S_IWOTH     00002   others have write permission
                //   S_IXOTH     00001   others have execute permission
                // </pre>
                //
                // For the sake of our own sanity, we're going to assume that no-one is using symlinks,
                // but we'll check and throw if they are.
                //
                // Before we do anything, we should check the header ID. Pfft!
                //
                // Having jumped through all these hoops, it turns out that InfoZIP's "unzip" store the
                // values in the external file attributes of a zip entry (found in the zip's central
                // directory) assuming that the OS creating the zip was one of an enormous list that
                // includes UNIX but not Windows, it first searches for the extra fields, and if not found
                // falls through to a code path that supports MS-DOS and which stores the UNIX file
                // attributes in the upper 16 bits of the external attributes field.
                //
                // We'll support neither approach fully, but we encode whether this file was executable
                // via storing 0100 in the fields that are typically used by zip implementations to store
                // POSIX permissions. If we find it was executable, use the platform independent java
                // interface to make this unpacked file executable.

                Set<PosixFilePermission> permissions = MorePosixFilePermissions
                        .fromMode(entry.getExternalAttributes() >> 16);
                if (permissions.contains(PosixFilePermission.OWNER_EXECUTE)) {
                    MoreFiles.makeExecutable(filesystem.resolve(target));
                }
            }
        }
    }
    return filesWritten.build();
}

From source file:at.spardat.xma.xdelta.JarPatcher.java

/**
 * Apply delta.//from ww w  .j  a  v a 2  s  . co  m
 *
 * @param patch the patch
 * @param source the source
 * @param output the output
 * @param list the list
 * @param prefix the prefix
 * @throws IOException Signals that an I/O exception has occurred.
 */
public void applyDelta(ZipFile patch, ZipFile source, ZipArchiveOutputStream output, BufferedReader list,
        String prefix) throws IOException {
    String fileName = null;
    try {
        for (fileName = (next == null ? list.readLine()
                : next); fileName != null; fileName = (next == null ? list.readLine() : next)) {
            if (next != null)
                next = null;
            if (!fileName.startsWith(prefix)) {
                next = fileName;
                return;
            }
            int crcDelim = fileName.lastIndexOf(':');
            int crcStart = fileName.lastIndexOf('|');
            long crc = Long.valueOf(fileName.substring(crcStart + 1, crcDelim), 16);
            long crcSrc = Long.valueOf(fileName.substring(crcDelim + 1), 16);
            fileName = fileName.substring(prefix.length(), crcStart);
            if ("META-INF/file.list".equalsIgnoreCase(fileName))
                continue;
            if (fileName.contains("!")) {
                String[] embeds = fileName.split("\\!");
                ZipArchiveEntry original = getEntry(source, embeds[0], crcSrc);
                File originalFile = File.createTempFile("jardelta-tmp-origin-", ".zip");
                File outputFile = File.createTempFile("jardelta-tmp-output-", ".zip");
                Exception thrown = null;
                try (FileOutputStream out = new FileOutputStream(originalFile);
                        InputStream in = source.getInputStream(original)) {
                    int read = 0;
                    while (-1 < (read = in.read(buffer))) {
                        out.write(buffer, 0, read);
                    }
                    out.flush();
                    applyDelta(patch, new ZipFile(originalFile), new ZipArchiveOutputStream(outputFile), list,
                            prefix + embeds[0] + "!");
                } catch (Exception e) {
                    thrown = e;
                    throw e;
                } finally {
                    originalFile.delete();
                    try (FileInputStream in = new FileInputStream(outputFile)) {
                        if (thrown == null) {
                            ZipArchiveEntry outEntry = copyEntry(original);
                            output.putArchiveEntry(outEntry);
                            int read = 0;
                            while (-1 < (read = in.read(buffer))) {
                                output.write(buffer, 0, read);
                            }
                            output.flush();
                            output.closeArchiveEntry();
                        }
                    } finally {
                        outputFile.delete();
                    }
                }
            } else {
                try {
                    ZipArchiveEntry patchEntry = getEntry(patch, prefix + fileName, crc);
                    if (patchEntry != null) { // new Entry
                        ZipArchiveEntry outputEntry = JarDelta.entryToNewName(patchEntry, fileName);
                        output.putArchiveEntry(outputEntry);
                        if (!patchEntry.isDirectory()) {
                            try (InputStream in = patch.getInputStream(patchEntry)) {
                                int read = 0;
                                while (-1 < (read = in.read(buffer))) {
                                    output.write(buffer, 0, read);
                                }
                            }
                        }
                        closeEntry(output, outputEntry, crc);
                    } else {
                        ZipArchiveEntry sourceEntry = getEntry(source, fileName, crcSrc);
                        if (sourceEntry == null) {
                            throw new FileNotFoundException(
                                    fileName + " not found in " + sourceName + " or " + patchName);
                        }
                        if (sourceEntry.isDirectory()) {
                            ZipArchiveEntry outputEntry = new ZipArchiveEntry(sourceEntry);
                            output.putArchiveEntry(outputEntry);
                            closeEntry(output, outputEntry, crc);
                            continue;
                        }
                        patchEntry = getPatchEntry(patch, prefix + fileName + ".gdiff", crc);
                        if (patchEntry != null) { // changed Entry
                            ZipArchiveEntry outputEntry = new ZipArchiveEntry(sourceEntry);
                            outputEntry.setTime(patchEntry.getTime());
                            output.putArchiveEntry(outputEntry);
                            byte[] sourceBytes = new byte[(int) sourceEntry.getSize()];
                            try (InputStream sourceStream = source.getInputStream(sourceEntry)) {
                                for (int erg = sourceStream
                                        .read(sourceBytes); erg < sourceBytes.length; erg += sourceStream
                                                .read(sourceBytes, erg, sourceBytes.length - erg))
                                    ;
                            }
                            InputStream patchStream = patch.getInputStream(patchEntry);
                            GDiffPatcher diffPatcher = new GDiffPatcher();
                            diffPatcher.patch(sourceBytes, patchStream, output);
                            patchStream.close();
                            outputEntry.setCrc(crc);
                            closeEntry(output, outputEntry, crc);
                        } else { // unchanged Entry
                            ZipArchiveEntry outputEntry = new ZipArchiveEntry(sourceEntry);
                            output.putArchiveEntry(outputEntry);
                            try (InputStream in = source.getInputStream(sourceEntry)) {
                                int read = 0;
                                while (-1 < (read = in.read(buffer))) {
                                    output.write(buffer, 0, read);
                                }
                            }
                            output.flush();
                            closeEntry(output, outputEntry, crc);
                        }
                    }
                } catch (PatchException pe) {
                    IOException ioe = new IOException();
                    ioe.initCause(pe);
                    throw ioe;
                }
            }
        }
    } catch (Exception e) {
        System.err.println(prefix + fileName);
        throw e;
    } finally {
        source.close();
        output.close();
    }
}

From source file:at.spardat.xma.xdelta.JarDelta.java

/**
 * Compute delta./*  ww  w  .  ja v a  2  s. c  o m*/
 *
 * @param source the source
 * @param target the target
 * @param output the output
 * @param list the list
 * @param prefix the prefix
 * @throws IOException Signals that an I/O exception has occurred.
 */
public void computeDelta(ZipFile source, ZipFile target, ZipArchiveOutputStream output, PrintWriter list,
        String prefix) throws IOException {
    try {
        for (Enumeration<ZipArchiveEntry> enumer = target.getEntries(); enumer.hasMoreElements();) {
            calculatedDelta = null;
            ZipArchiveEntry targetEntry = enumer.nextElement();
            ZipArchiveEntry sourceEntry = findBestSource(source, target, targetEntry);
            String nextEntryName = prefix + targetEntry.getName();
            if (sourceEntry != null && zipFilesPattern.matcher(sourceEntry.getName()).matches()
                    && !equal(sourceEntry, targetEntry)) {
                nextEntryName += "!";
            }
            nextEntryName += "|" + Long.toHexString(targetEntry.getCrc());
            if (sourceEntry != null) {
                nextEntryName += ":" + Long.toHexString(sourceEntry.getCrc());
            } else {
                nextEntryName += ":0";
            }
            list.println(nextEntryName);
            if (targetEntry.isDirectory()) {
                if (sourceEntry == null) {
                    ZipArchiveEntry outputEntry = entryToNewName(targetEntry, prefix + targetEntry.getName());
                    output.putArchiveEntry(outputEntry);
                    output.closeArchiveEntry();
                }
            } else {
                if (sourceEntry == null || sourceEntry.getSize() <= Delta.DEFAULT_CHUNK_SIZE
                        || targetEntry.getSize() <= Delta.DEFAULT_CHUNK_SIZE) { // new Entry od. alter Eintrag od. neuer Eintrag leer
                    ZipArchiveEntry outputEntry = entryToNewName(targetEntry, prefix + targetEntry.getName());
                    output.putArchiveEntry(outputEntry);
                    try (InputStream in = target.getInputStream(targetEntry)) {
                        int read = 0;
                        while (-1 < (read = in.read(buffer))) {
                            output.write(buffer, 0, read);
                        }
                        output.flush();
                    }
                    output.closeArchiveEntry();
                } else {
                    if (!equal(sourceEntry, targetEntry)) {
                        if (zipFilesPattern.matcher(sourceEntry.getName()).matches()) {
                            File embeddedTarget = File.createTempFile("jardelta-tmp", ".zip");
                            File embeddedSource = File.createTempFile("jardelta-tmp", ".zip");
                            try (FileOutputStream out = new FileOutputStream(embeddedSource);
                                    InputStream in = source.getInputStream(sourceEntry);
                                    FileOutputStream out2 = new FileOutputStream(embeddedTarget);
                                    InputStream in2 = target.getInputStream(targetEntry)) {
                                int read = 0;
                                while (-1 < (read = in.read(buffer))) {
                                    out.write(buffer, 0, read);
                                }
                                out.flush();
                                read = 0;
                                while (-1 < (read = in2.read(buffer))) {
                                    out2.write(buffer, 0, read);
                                }
                                out2.flush();
                                computeDelta(new ZipFile(embeddedSource), new ZipFile(embeddedTarget), output,
                                        list, prefix + sourceEntry.getName() + "!");
                            } finally {
                                embeddedSource.delete();
                                embeddedTarget.delete();
                            }
                        } else {
                            ZipArchiveEntry outputEntry = new ZipArchiveEntry(
                                    prefix + targetEntry.getName() + ".gdiff");
                            outputEntry.setTime(targetEntry.getTime());
                            outputEntry.setComment("" + targetEntry.getCrc());
                            output.putArchiveEntry(outputEntry);
                            if (calculatedDelta != null) {
                                output.write(calculatedDelta);
                                output.flush();
                            } else {
                                try (ByteArrayOutputStream outbytes = new ByteArrayOutputStream()) {
                                    Delta d = new Delta();
                                    DiffWriter diffWriter = new GDiffWriter(new DataOutputStream(outbytes));
                                    int sourceSize = (int) sourceEntry.getSize();
                                    byte[] sourceBytes = new byte[sourceSize];
                                    try (InputStream sourceStream = source.getInputStream(sourceEntry)) {
                                        for (int erg = sourceStream.read(
                                                sourceBytes); erg < sourceBytes.length; erg += sourceStream
                                                        .read(sourceBytes, erg, sourceBytes.length - erg))
                                            ;
                                    }
                                    d.compute(sourceBytes, target.getInputStream(targetEntry), diffWriter);
                                    output.write(outbytes.toByteArray());
                                }
                            }
                            output.closeArchiveEntry();
                        }
                    }
                }
            }
        }
    } finally {
        source.close();
        target.close();
    }
}

From source file:ee.sk.digidoc.factory.SAXDigiDocFactory.java

/**
 * Reads in a DigiDoc file. One of fname or isSdoc must be given.
 * @param fname signed doc filename//from  w  ww.j ava2s . co  m
 * @param isSdoc opened stream with DigiDoc data
 * The user must open and close it.
 * @param errs list of errors to fill with parsing errors. If given
 * then attempt is made to continue parsing on errors and return them in this list.
 * If not given (null) then the first error found will be thrown.
 * @return signed document object if successfully parsed
 */
private SignedDoc readSignedDocOfType(String fname, InputStream isSdoc, boolean isBdoc, List errs)
        throws DigiDocException {
    // Use an instance of ourselves as the SAX event handler
    SAXDigiDocFactory handler = this;
    m_errs = errs;
    DigiDocVerifyFactory.initProvider();
    SAXParserFactory factory = SAXParserFactory.newInstance();
    if (m_logger.isDebugEnabled())
        m_logger.debug("Start reading ddoc/bdoc " + ((fname != null) ? "from file: " + fname : "from stream")
                + " bdoc: " + isBdoc);
    if (fname == null && isSdoc == null) {
        throw new DigiDocException(DigiDocException.ERR_READ_FILE, "No input file", null);
    }
    if (fname != null) {
        File inFile = new File(fname);
        if (!inFile.canRead() || inFile.length() == 0) {
            throw new DigiDocException(DigiDocException.ERR_READ_FILE, "Empty or unreadable input file", null);
        }
    }
    ZipFile zf = null;
    ZipArchiveInputStream zis = null;
    ZipArchiveEntry ze = null;
    InputStream isEntry = null;
    File fTmp = null;
    try {
        factory.setFeature("http://xml.org/sax/features/external-general-entities", false);
        factory.setFeature("http://xml.org/sax/features/external-parameter-entities", false);
        if (isBdoc) { // bdoc parsing
            // must be a bdoc document ?
            m_doc = new SignedDoc();
            m_doc.setVersion(SignedDoc.BDOC_VERSION_1_0);
            m_doc.setFormat(SignedDoc.FORMAT_BDOC);
            Enumeration eFiles = null;
            if (fname != null) {
                zf = new ZipFile(fname, "UTF-8");
                eFiles = zf.getEntries();
            } else if (isSdoc != null) {
                zis = new ZipArchiveInputStream(isSdoc, "UTF-8", true, true);
            }
            ArrayList lSigFnames = new ArrayList();
            ArrayList lDataFnames = new ArrayList();
            // read all entries
            boolean bHasMimetype = false, bManifest1 = false;
            int nFil = 0;
            while ((zf != null && eFiles.hasMoreElements())
                    || (zis != null && ((ze = zis.getNextZipEntry()) != null))) {
                nFil++;

                // read entry
                if (zf != null) { // ZipFile
                    ze = (ZipArchiveEntry) eFiles.nextElement();
                    isEntry = zf.getInputStream(ze);
                } else { // ZipArchiveInputStream
                    int n = 0, nTot = 0;
                    if ((ze.getName().equals(FILE_MIMETYPE) || ze.getName().equals(FILE_MANIFEST)
                            || (ze.getName().startsWith(FILE_SIGNATURES) && ze.getName().endsWith(".xml")))
                            || (nMaxBdocFilCached <= 0
                                    || (ze.getSize() < nMaxBdocFilCached && ze.getSize() >= 0))) {
                        ByteArrayOutputStream bos = new ByteArrayOutputStream();
                        byte[] data = new byte[2048];
                        while ((n = zis.read(data)) > 0) {
                            bos.write(data, 0, n);
                            nTot += n;
                        }
                        if (m_logger.isDebugEnabled())
                            m_logger.debug("Read: " + nTot + " bytes from zip");
                        data = bos.toByteArray();
                        bos = null;
                        isEntry = new ByteArrayInputStream(data);
                    } else {
                        File fCacheDir = new File(ConfigManager.instance().getStringProperty(
                                "DIGIDOC_DF_CACHE_DIR", System.getProperty("java.io.tmpdir")));
                        fTmp = File.createTempFile("bdoc-data", ".tmp", fCacheDir);
                        FileOutputStream fos = new FileOutputStream(fTmp);
                        byte[] data = new byte[2048];
                        while ((n = zis.read(data)) > 0) {
                            fos.write(data, 0, n);
                            nTot += n;
                        }
                        if (m_logger.isDebugEnabled())
                            m_logger.debug("Read: " + nTot + " bytes from zip to: " + fTmp.getAbsolutePath());
                        fos.close();
                        isEntry = new FileInputStream(fTmp);
                    }
                }
                if (m_logger.isDebugEnabled())
                    m_logger.debug("Entry: " + ze.getName() + " nlen: " + ze.getName().length() + " size: "
                            + ze.getSize() + " dir: " + ze.isDirectory() + " comp-size: "
                            + ze.getCompressedSize());
                // mimetype file
                if (ze.getName().equals(FILE_MIMETYPE)) {
                    if (m_logger.isDebugEnabled())
                        m_logger.debug("Check mimetype!");
                    checkBdocMimetype(isEntry);
                    bHasMimetype = true;
                    m_doc.setComment(ze.getComment());
                    if (nFil != 1) {
                        m_logger.error("mimetype file is " + nFil + " file but must be first");
                        handleError(new DigiDocException(DigiDocException.ERR_DIGIDOC_BADXML,
                                "mimetype file is not first zip entry", null));
                    }
                } else if (ze.getName().equals(FILE_MANIFEST)) { // manifest.xml file
                    if (m_logger.isDebugEnabled())
                        m_logger.debug("Read manifest");
                    if (!bManifest1 && isEntry != null) {
                        bManifest1 = true;
                        BdocManifestParser mfparser = new BdocManifestParser(m_doc);
                        mfparser.readManifest(isEntry);
                    } else {
                        m_logger.error("Found multiple manifest.xml files!");
                        throw new DigiDocException(DigiDocException.ERR_MULTIPLE_MANIFEST_FILES,
                                "Found multiple manifest.xml files!", null);
                    }
                } else if (ze.getName().startsWith(FILE_SIGNATURES) && ze.getName().endsWith(".xml")) { // some signature
                    m_fileName = ze.getName();
                    if (m_logger.isDebugEnabled())
                        m_logger.debug("Reading bdoc siganture: " + m_fileName);
                    boolean bExists = false;
                    for (int j = 0; j < lSigFnames.size(); j++) {
                        String s1 = (String) lSigFnames.get(j);
                        if (s1.equals(m_fileName))
                            bExists = true;
                    }
                    if (bExists) {
                        m_logger.error("Duplicate signature filename: " + m_fileName);
                        handleError(new DigiDocException(DigiDocException.ERR_DIGIDOC_BADXML,
                                "Duplicate signature filename: " + m_fileName, null));
                    } else
                        lSigFnames.add(m_fileName);
                    SAXParser saxParser = factory.newSAXParser();
                    ByteArrayOutputStream bos = new ByteArrayOutputStream();
                    int n = 0;
                    byte[] data = new byte[2048];
                    while ((n = isEntry.read(data)) > 0)
                        bos.write(data, 0, n);
                    data = bos.toByteArray();
                    bos = null;
                    if (m_logger.isDebugEnabled())
                        m_logger.debug(
                                "Parsing bdoc: " + m_fileName + " size: " + ((data != null) ? data.length : 0));
                    saxParser.parse(new SignatureInputStream(new ByteArrayInputStream(data)), this);
                    if (m_logger.isDebugEnabled())
                        m_logger.debug("Parsed bdoc: " + m_fileName);
                    Signature sig1 = m_doc.getLastSignature();
                    m_sigComment = ze.getComment();
                    if (sig1 != null) {
                        sig1.setPath(m_fileName);
                        sig1.setComment(ze.getComment());
                    }
                } else { // probably a data file
                    if (m_logger.isDebugEnabled())
                        m_logger.debug("Read data file: " + ze.getName());
                    if (!ze.isDirectory()) {
                        boolean bExists = false;
                        for (int j = 0; j < lDataFnames.size(); j++) {
                            String s1 = (String) lDataFnames.get(j);
                            if (s1.equals(ze.getName()))
                                bExists = true;
                        }
                        if (bExists) {
                            m_logger.error("Duplicate datafile filename: " + ze.getName());
                            handleError(new DigiDocException(DigiDocException.ERR_DIGIDOC_BADXML,
                                    "Duplicate datafile filename: " + ze.getName(), null));
                        } else
                            lDataFnames.add(ze.getName());
                        DataFile df = m_doc.findDataFileById(ze.getName());
                        if (df != null) {
                            if (ze.getSize() > 0)
                                df.setSize(ze.getSize());
                            df.setContentType(DataFile.CONTENT_BINARY);
                            df.setFileName(ze.getName());
                        } else {
                            df = new DataFile(ze.getName(), DataFile.CONTENT_BINARY, ze.getName(),
                                    "application/binary", m_doc);
                            if (m_doc.getDataFiles() == null)
                                m_doc.setDataFiles(new ArrayList());
                            m_doc.getDataFiles().add(df);
                            //m_doc.addDataFile(df); // this does some intiailization work unnecessary here
                        }
                        // enable caching if requested
                        if (isEntry != null)
                            df.setOrCacheBodyAndCalcHashes(isEntry);
                        df.setComment(ze.getComment());
                        df.setLastModDt(new Date(ze.getTime()));
                        // fix mime type according to DataObjectFormat
                        Signature sig1 = m_doc.getLastSignature();
                        if (sig1 != null) {
                            Reference dRef = sig1.getSignedInfo().getReferenceForDataFile(df);
                            if (dRef != null) {
                                DataObjectFormat dof = sig1.getSignedInfo()
                                        .getDataObjectFormatForReference(dRef);
                                if (dof != null) {
                                    df.setMimeType(dof.getMimeType());
                                }
                            }
                        }
                    }
                }
                if (fTmp != null) {
                    fTmp.delete();
                    fTmp = null;
                }
            } // while zip entries
            if (!bHasMimetype) {
                m_logger.error("No mimetype file");
                handleError(new DigiDocException(DigiDocException.ERR_DIGIDOC_BADXML,
                        "Not a BDOC format file! No mimetype file!", null));
            }
            // if no signatures exist then copy mime-type from manifest.xml to DataFile -s
            if (m_doc.countSignatures() == 0) {
                for (int i = 0; i < m_doc.countDataFiles(); i++) {
                    DataFile df = m_doc.getDataFile(i);
                    if (m_doc.getManifest() != null) {
                        for (int j = 0; j < m_doc.getManifest().getNumFileEntries(); j++) {
                            ManifestFileEntry mfe = m_doc.getManifest().getFileEntry(j);
                            if (mfe.getFullPath() != null && mfe.getFullPath().equals(df.getFileName())) {
                                df.setMimeType(mfe.getMediaType());
                            } // if fullpath
                        } // for
                    } // if
                } // for i
            }
        } else { // ddoc parsing
            if (m_logger.isDebugEnabled())
                m_logger.debug("Reading ddoc: " + fname + " file: " + m_fileName);
            m_fileName = fname;
            SAXParser saxParser = factory.newSAXParser();
            if (fname != null)
                saxParser.parse(new SignatureInputStream(new FileInputStream(fname)), this);
            else if (isSdoc != null)
                saxParser.parse(isSdoc, this);
        }
    } catch (org.xml.sax.SAXParseException ex) {
        m_logger.error("SAX Error: " + ex);
        handleError(ex);

    } catch (Exception ex) {
        m_logger.error("Error reading3: " + ex);
        ex.printStackTrace();
        /*if(ex instanceof DigiDocException){
           DigiDocException dex = (DigiDocException)ex;
           m_logger.error("Dex: " + ex);
           if(dex.getNestedException() != null) {
              dex.getNestedException().printStackTrace();
              m_logger.error("Trace: "); 
           }
        }*/
        handleError(ex);
    } finally { // cleanup
        try {
            if (isEntry != null) {
                isEntry.close();
                isEntry = null;
            }
            if (zis != null)
                zis.close();
            if (zf != null)
                zf.close();
            if (fTmp != null) {
                fTmp.delete();
                fTmp = null;
            }
        } catch (Exception ex) {
            m_logger.error("Error closing streams and files: " + ex);
        }
    }
    // compare Manifest and DataFiles
    boolean bErrList = (errs != null);
    if (errs == null)
        errs = new ArrayList();
    boolean bOk = DigiDocVerifyFactory.verifyManifestEntries(m_doc, errs);
    if (m_doc == null) {
        m_logger.error("Error reading4: doc == null");
        handleError(new DigiDocException(DigiDocException.ERR_DIGIDOC_BADXML,
                "This document is not in ddoc or bdoc format", null));
    }
    if (!bErrList && errs.size() > 0) { // if error list was not used then we have to throw exception. So we will throw the first one since we can only do it once
        DigiDocException ex = (DigiDocException) errs.get(0);
        throw ex;
    }
    return m_doc;
}