Example usage for org.apache.commons.compress.archivers.zip ZipArchiveOutputStream close

List of usage examples for org.apache.commons.compress.archivers.zip ZipArchiveOutputStream close

Introduction

In this page you can find the example usage for org.apache.commons.compress.archivers.zip ZipArchiveOutputStream close.

Prototype

public void close() throws IOException 

Source Link

Document

Closes this output stream and releases any system resources associated with the stream.

Usage

From source file:org.pepstock.jem.util.ZipUtil.java

/** 
  * Creates a zip output stream  at the specified path with the contents of the specified directory. 
  * /*from ww w  .j  a  va2s  . c  o m*/
  * @param folder folder to zip 
  * @param zipOutputStream output stream. Usually a bytearray 
  * @throws IOException if any error occurs 
  */
public static void createZip(File folder, OutputStream zipOutputStream) throws IOException {
    BufferedOutputStream bufferedOutputStream = null;
    ZipArchiveOutputStream zipArchiveOutputStream = null;
    try {
        bufferedOutputStream = new BufferedOutputStream(zipOutputStream);
        zipArchiveOutputStream = new ZipArchiveOutputStream(bufferedOutputStream);
        addFileToZip(zipArchiveOutputStream, folder);
    } finally {
        if (zipArchiveOutputStream != null) {
            zipArchiveOutputStream.finish();
            zipArchiveOutputStream.close();
        }
        if (bufferedOutputStream != null) {
            bufferedOutputStream.close();
        }
        if (zipOutputStream != null) {
            zipOutputStream.close();
        }
    }

}

From source file:org.sakaiproject.archive.impl.SiteZipper.java

/**
 * Zip a site archive. It is stored back in the zip directory
 * @param siteId         site that has already been archived
 * @param m_storagePath      path to where the archives are
 * @return//from w  ww  . j av a  2s.  c o  m
 * @throws IOException
 */
public boolean zipArchive(String siteId, String m_storagePath) throws IOException {

    //get path to archive dir for this site
    //suffix of -archive is hardcoded as per archive service
    String archivePath = m_storagePath + siteId + "-archive";

    //setup timestamp
    SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss");
    String timestamp = dateFormat.format(Calendar.getInstance().getTime());

    //create path to compressed archive
    String compressedArchivePath = m_storagePath + siteId + "-" + timestamp + ".zip";
    File zipFile = new File(compressedArchivePath);

    if (!zipFile.exists()) {
        log.info("Creating zip file: " + compressedArchivePath);
        zipFile.createNewFile();
    }

    FileOutputStream fOut = null;
    FileInputStream zip = null;
    BufferedOutputStream bOut = null;
    ZipArchiveOutputStream zOut = null;

    try {
        fOut = new FileOutputStream(zipFile);
        bOut = new BufferedOutputStream(fOut);
        zOut = new ZipArchiveOutputStream(bOut);
        addFileToZip(zOut, archivePath, ""); //add the directory which will then add all files recursively

        //create a sha1 hash of the zip
        String hashPath = m_storagePath + siteId + "-" + timestamp + ".sha1";
        log.info("Creating hash: " + hashPath);
        zip = new FileInputStream(compressedArchivePath);
        String hash = DigestUtils.sha1Hex(zip);
        FileUtils.writeStringToFile(new File(hashPath), hash);
    } finally {
        zOut.finish();
        zOut.close();
        bOut.close();
        fOut.close();
        zip.close();
    }

    return true;
}

From source file:org.sead.nds.repository.BagGenerator.java

public boolean generateBag(OutputStream outputStream) throws Exception {
    log.info("Generating: Bag to the Future!");
    pubRequest = RO.getPublicationRequest();
    RO.sendStatus(C3PRPubRequestFacade.PENDING_STAGE, Repository.getID() + " is now processing this request");

    File tmp = File.createTempFile("sead-scatter-dirs", "tmp");
    dirs = ScatterZipOutputStream.fileBased(tmp);

    JSONObject oremap = RO.getOREMap();/*from www . j a v a2s  .c o  m*/
    JSONObject aggregation = oremap.getJSONObject("describes");

    // Transfer statistics to oremap for preservation - note that the #
    // files, totalsize are checked after the zip is written
    // so any error will be recorded in the zip, but caught in the log.
    // Other elements are not curently checked.
    JSONObject aggStats = ((JSONObject) pubRequest.get("Aggregation Statistics"));
    aggregation.put("Aggregation Statistics", aggStats);

    if (((JSONObject) pubRequest.get(PubRequestFacade.PREFERENCES)).has("License")) {
        license = ((JSONObject) pubRequest.get(PubRequestFacade.PREFERENCES)).getString("License");

    }
    // Accept license preference and add it as the license on the
    // aggregation
    aggregation.put("License", license);

    if (((JSONObject) pubRequest.get(PubRequestFacade.PREFERENCES)).has("Purpose")) {
        purpose = ((JSONObject) pubRequest.get(PubRequestFacade.PREFERENCES)).getString("Purpose");

    }
    // Accept the purpose and add it to the map and aggregation (both are
    // for this purpose)
    aggregation.put("Purpose", purpose);
    oremap.put("Purpose", purpose);

    // check whether Access Rights set, if so, add it to aggregation
    if (((JSONObject) pubRequest.get(PubRequestFacade.PREFERENCES)).has("Access Rights")) {
        String accessRights = ((JSONObject) pubRequest.get(PubRequestFacade.PREFERENCES))
                .getString("Access Rights");
        aggregation.put("Access Rights", accessRights);
    }

    bagID = aggregation.getString("Identifier");
    String bagName = bagID;
    try {
        // Create valid filename from identifier and extend path with
        // two levels of hash-based subdirs to help distribute files
        bagName = getValidName(bagName);
    } catch (Exception e) {
        log.error("Couldn't create valid filename: " + e.getLocalizedMessage());
        return false;
    }
    // Create data dir in bag, also creates parent bagName dir
    String currentPath = bagName + "/data/";
    createDir(currentPath);

    aggregates = aggregation.getJSONArray("aggregates");

    if (aggregates != null) {
        // Add container and data entries
        // Setup global index of the aggregation and all aggregated
        // resources by Identifier
        resourceIndex = indexResources(bagID, aggregates);
        // Setup global list of succeed(true), fail(false), notused
        // (null) flags
        resourceUsed = new Boolean[aggregates.length() + 1];
        // Process current container (the aggregation itself) and its
        // children
        processContainer(aggregation, currentPath);
    }
    // Create mainifest files
    // pid-mapping.txt - a DataOne recommendation to connect ids and
    // in-bag path/names
    StringBuffer pidStringBuffer = new StringBuffer();
    boolean first = true;
    for (Entry<String, String> pidEntry : pidMap.entrySet()) {
        if (!first) {
            pidStringBuffer.append("\n");
        } else {
            first = false;
        }
        pidStringBuffer.append(pidEntry.getKey() + " " + pidEntry.getValue());
    }
    createFileFromString(bagName + "/pid-mapping.txt", pidStringBuffer.toString());
    // Hash manifest - a hash manifest is required
    // by Bagit spec
    StringBuffer sha1StringBuffer = new StringBuffer();
    first = true;
    for (Entry<String, String> sha1Entry : sha1Map.entrySet()) {
        if (!first) {
            sha1StringBuffer.append("\n");
        } else {
            first = false;
        }
        sha1StringBuffer.append(sha1Entry.getValue() + " " + sha1Entry.getKey());
    }
    if (!(hashtype == null)) {
        String manifestName = bagName + "/manifest-";
        if (hashtype.equals("SHA1 Hash")) {
            manifestName = manifestName + "sha1.txt";
        } else if (hashtype.equals("SHA512 Hash")) {
            manifestName = manifestName + "sha512.txt";
        } else {
            log.warn("Unsupported Hash type: " + hashtype);
        }
        createFileFromString(manifestName, sha1StringBuffer.toString());
    } else {
        log.warn("No Hash values sent - Bag File does not meet BagIT specification requirement");
    }
    // bagit.txt - Required by spec
    createFileFromString(bagName + "/bagit.txt", "BagIt-Version: 0.97\nTag-File-Character-Encoding: UTF-8");

    if (oremap.getJSONObject("describes").has("Creator")) {
        aggregation.put("Creator",
                RO.expandPeople(RO.normalizeValues(oremap.getJSONObject("describes").get("Creator"))));
    }
    if (oremap.getJSONObject("describes").has("Contact")) {
        aggregation.put("Contact",
                RO.expandPeople(RO.normalizeValues(oremap.getJSONObject("describes").get("Contact"))));
    }

    // Generate DOI:
    oremap.getJSONObject("describes").put(PubRequestFacade.EXTERNAL_IDENTIFIER,
            Repository.createDOIForRO(bagID, RO));

    oremap.getJSONObject("describes").put("Publication Date",
            new SimpleDateFormat("yyyy-MM-dd").format(Calendar.getInstance().getTime()));

    Object context = oremap.get("@context");
    // FixMe - should test that these labels don't have a different
    // definition (currently we're just checking to see if they a
    // already defined)
    addIfNeeded(context, "License", "http://purl.org/dc/terms/license");
    addIfNeeded(context, "Purpose", "http://sead-data.net/vocab/publishing#Purpose");
    addIfNeeded(context, "Access Rights", "http://purl.org/dc/terms/accessRights");
    addIfNeeded(context, PubRequestFacade.EXTERNAL_IDENTIFIER, "http://purl.org/dc/terms/identifier");
    addIfNeeded(context, "Publication Date", "http://purl.org/dc/terms/issued");

    // Aggregation Statistics
    // For keys in Agg Stats:
    for (String key : ((Set<String>) aggStats.keySet())) {
        addIfNeeded(context, key, getURIForKey(pubRequest.get("@context"), key));
    }

    oremap.put("@id", linkRewriter.rewriteOREMapLink(oremap.getString("@id"), bagID));
    aggregation.put("@id", linkRewriter.rewriteAggregationLink(aggregation.getString("@id"), bagID));
    // Serialize oremap itself (pretty printed) - SEAD recommendation
    // (DataOne distributes metadata files within the bag
    // FixMe - add missing hash values if needed and update context
    // (read and cache files or read twice?)
    createFileFromString(bagName + "/oremap.jsonld.txt", oremap.toString(2));

    // Add a bag-info file
    createFileFromString(bagName + "/bag-info.txt", generateInfoFile(pubRequest, oremap));

    log.info("Creating bag: " + bagName);

    ZipArchiveOutputStream zipArchiveOutputStream = new ZipArchiveOutputStream(outputStream);

    // Add all the waiting contents - dirs created first, then data
    // files
    // are retrieved via URLs in parallel (defaults to one thread per
    // processor)
    // directly to the zip file
    log.debug("Starting write");
    writeTo(zipArchiveOutputStream);
    log.info("Zipfile Written");
    // Finish
    zipArchiveOutputStream.close();
    log.debug("Closed");

    // Validate oremap - all entries are part of the collection
    for (int i = 0; i < resourceUsed.length; i++) {
        Boolean b = resourceUsed[i];
        if (b == null) {
            RO.sendStatus("Problem", pidMap.get(resourceIndex.get(i)) + " was not used");
        } else if (!b) {
            RO.sendStatus("Problem", pidMap.get(resourceIndex.get(i)) + " was not included successfully");
        } else {
            // Successfully included - now check for hash value and
            // generate if needed
            if (i > 0) { // Not root container
                if (!sha1Map.containsKey(pidMap.get(resourceIndex.get(i)))) {

                    if (!RO.childIsContainer(i - 1))
                        log.warn("Missing sha1 hash for: " + resourceIndex.get(i));
                    // FixMe - actually generate it before adding the
                    // oremap
                    // to the zip
                }
            }
        }

    }
    return true;

}

From source file:org.springframework.boot.gradle.tasks.bundling.BootZipCopyAction.java

@Override
public WorkResult execute(CopyActionProcessingStream stream) {
    ZipArchiveOutputStream zipStream;
    Spec<FileTreeElement> loaderEntries;
    try {//from   www .  j  a v  a  2 s  .c om
        FileOutputStream fileStream = new FileOutputStream(this.output);
        writeLaunchScriptIfNecessary(fileStream);
        zipStream = new ZipArchiveOutputStream(fileStream);
        if (this.encoding != null) {
            zipStream.setEncoding(this.encoding);
        }
        loaderEntries = writeLoaderClassesIfNecessary(zipStream);
    } catch (IOException ex) {
        throw new GradleException("Failed to create " + this.output, ex);
    }
    try {
        stream.process(new ZipStreamAction(zipStream, this.output, this.preserveFileTimestamps,
                this.requiresUnpack, createExclusionSpec(loaderEntries), this.compressionResolver));
    } finally {
        try {
            zipStream.close();
        } catch (IOException ex) {
            // Continue
        }
    }
    return () -> true;
}

From source file:org.structr.web.function.CreateArchiveFunction.java

@Override
public Object apply(ActionContext ctx, Object caller, Object[] sources) throws FrameworkException {

    if (!(sources[1] instanceof File || sources[1] instanceof Folder || sources[1] instanceof Collection
            || sources.length < 2)) {

        logParameterError(caller, sources, ctx.isJavaScriptContext());

        return usage(ctx.isJavaScriptContext());
    }//from  www  . j  a  v  a  2  s .  c  o  m

    final ConfigurationProvider config = StructrApp.getConfiguration();

    try {

        java.io.File newArchive = java.io.File.createTempFile(sources[0].toString(), "zip");

        ZipArchiveOutputStream zaps = new ZipArchiveOutputStream(newArchive);
        zaps.setEncoding("UTF8");
        zaps.setUseLanguageEncodingFlag(true);
        zaps.setCreateUnicodeExtraFields(ZipArchiveOutputStream.UnicodeExtraFieldPolicy.ALWAYS);
        zaps.setFallbackToUTF8(true);

        if (sources[1] instanceof File) {

            File file = (File) sources[1];
            addFileToZipArchive(file.getProperty(AbstractFile.name), file, zaps);

        } else if (sources[1] instanceof Folder) {

            Folder folder = (Folder) sources[1];
            addFilesToArchive(folder.getProperty(Folder.name) + "/", folder.getFiles(), zaps);
            addFoldersToArchive(folder.getProperty(Folder.name) + "/", folder.getFolders(), zaps);

        } else if (sources[1] instanceof Collection) {

            for (Object fileOrFolder : (Collection) sources[1]) {

                if (fileOrFolder instanceof File) {

                    File file = (File) fileOrFolder;
                    addFileToZipArchive(file.getProperty(AbstractFile.name), file, zaps);
                } else if (fileOrFolder instanceof Folder) {

                    Folder folder = (Folder) fileOrFolder;
                    addFilesToArchive(folder.getProperty(Folder.name) + "/", folder.getFiles(), zaps);
                    addFoldersToArchive(folder.getProperty(Folder.name) + "/", folder.getFolders(), zaps);
                } else {

                    logParameterError(caller, sources, ctx.isJavaScriptContext());
                    return usage(ctx.isJavaScriptContext());
                }
            }
        } else {

            logParameterError(caller, sources, ctx.isJavaScriptContext());
            return usage(ctx.isJavaScriptContext());
        }

        zaps.close();

        Class archiveClass = null;

        if (sources.length > 2) {

            archiveClass = config.getNodeEntityClass(sources[2].toString());

        }

        if (archiveClass == null) {

            archiveClass = org.structr.web.entity.File.class;
        }

        try (final FileInputStream fis = new FileInputStream(newArchive)) {
            return FileHelper.createFile(ctx.getSecurityContext(), fis, "application/zip", archiveClass,
                    sources[0].toString() + ".zip");
        }

    } catch (IOException e) {

        logException(caller, e, sources);
    }
    return null;
}

From source file:org.trustedanalytics.servicebroker.gearpump.service.file.FileHelper.java

public static byte[] prepareZipFile(byte[] zipFileTestContent) throws IOException {
    ByteArrayOutputStream byteOutput = null;
    ZipArchiveOutputStream zipOutput = null;
    try {//  w  w  w.  j  a v a 2s . c om
        byteOutput = new ByteArrayOutputStream();
        zipOutput = new ZipArchiveOutputStream(byteOutput);
        ZipArchiveEntry entry = new ZipArchiveEntry(FILE_NAME);
        entry.setSize(zipFileTestContent.length);
        addArchiveEntry(zipOutput, entry, zipFileTestContent);
    } finally {
        zipOutput.close();
        byteOutput.close();
    }

    return byteOutput.toByteArray();
}

From source file:org.waarp.common.tar.ZipUtility.java

/**
 * Create a new Zip from a root directory
 * /*from w w w  .j  av  a 2  s  . c  o  m*/
 * @param directory
 *            the base directory
 * @param filename
 *            the output filename
 * @param absolute
 *            store absolute filepath (from directory) or only filename
 * @return True if OK
 */
public static boolean createZipFromDirectory(String directory, String filename, boolean absolute) {
    File rootDir = new File(directory);
    File saveFile = new File(filename);
    // recursive call
    ZipArchiveOutputStream zaos;
    try {
        zaos = new ZipArchiveOutputStream(new FileOutputStream(saveFile));
    } catch (FileNotFoundException e) {
        return false;
    }
    try {
        recurseFiles(rootDir, rootDir, zaos, absolute);
    } catch (IOException e2) {
        try {
            zaos.close();
        } catch (IOException e) {
            // ignore
        }
        return false;
    }
    try {
        zaos.finish();
    } catch (IOException e1) {
        // ignore
    }
    try {
        zaos.flush();
    } catch (IOException e) {
        // ignore
    }
    try {
        zaos.close();
    } catch (IOException e) {
        // ignore
    }
    return true;
}

From source file:org.waarp.common.tar.ZipUtility.java

/**
 * Create a new Zip from an array of Files (only name of files will be used)
 * /*  w w w  . j a v  a2s  .com*/
 * @param files
 *            array of files to add
 * @param filename
 *            the output filename
 * @return True if OK
 */
public static boolean createZipFromFiles(File[] files, String filename) {
    File saveFile = new File(filename);
    ZipArchiveOutputStream zaos;
    try {
        zaos = new ZipArchiveOutputStream(new FileOutputStream(saveFile));
    } catch (FileNotFoundException e) {
        return false;
    }
    for (File file : files) {
        try {
            addFile(file, zaos);
        } catch (IOException e) {
            try {
                zaos.close();
            } catch (IOException e1) {
                // ignore
            }
            return false;
        }
    }
    try {
        zaos.finish();
    } catch (IOException e1) {
        // ignore
    }
    try {
        zaos.flush();
    } catch (IOException e) {
        // ignore
    }
    try {
        zaos.close();
    } catch (IOException e) {
        // ignore
    }
    return true;
}

From source file:org.wso2.carbon.connector.util.FileCompressUtil.java

/**
 * Compress the files based on the archive type
 * //from www . j  a v a  2  s . c  o  m
 * 
 * @param files
 * @param file
 * @param archiveType
 * @throws IOException
 */
public void compressFiles(Collection files, File file, ArchiveType archiveType) throws IOException {
    log.info("Compressing " + files.size() + " to " + file.getAbsoluteFile());
    // Create the output stream for the output file
    FileOutputStream fos;
    switch (archiveType) {
    case TAR_GZIP:
        fos = new FileOutputStream(new File(file.getCanonicalPath() + ".tar" + ".gz"));
        // Wrap the output file stream in streams that will tar and gzip
        // everything
        TarArchiveOutputStream taos = new TarArchiveOutputStream(
                new GZIPOutputStream(new BufferedOutputStream(fos)));
        // TAR has an 8 gig file limit by default, this gets around that
        taos.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_STAR);
        // to get past the 8 gig limit; TAR originally didn't support
        // long file names, so enable the
        // support for it
        taos.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
        // Get to putting all the files in the compressed output file
        Iterator iterator = files.iterator();
        while (iterator.hasNext()) {
            File f = (File) iterator.next();
            addFilesToCompression(taos, f, ".", ArchiveType.TAR_GZIP);
            // do something to object here...
        }

        // Close everything up
        taos.close();
        fos.close();
        break;
    case ZIP:
        fos = new FileOutputStream(new File(file.getCanonicalPath() + ".zip"));
        // Wrap the output file stream in streams that will tar and zip
        // everything
        ZipArchiveOutputStream zaos = new ZipArchiveOutputStream(new BufferedOutputStream(fos));
        zaos.setEncoding("UTF-8");
        zaos.setCreateUnicodeExtraFields(ZipArchiveOutputStream.UnicodeExtraFieldPolicy.ALWAYS);

        // Get to putting all the files in the compressed output file
        Iterator iterator1 = files.iterator();
        while (iterator1.hasNext()) {
            File f = (File) iterator1.next();
            addFilesToCompression(zaos, f, ".", ArchiveType.ZIP);
            // do something to object here...
        }

        // Close everything up
        zaos.close();
        fos.close();
        break;
    }
}

From source file:org.zuinnote.hadoop.office.format.common.writer.msexcel.internal.EncryptedZipEntrySource.java

public void setInputStream(InputStream is) throws IOException {
    this.tmpFile = TempFile.createTempFile("hadoopoffice-protected", ".zip");

    ZipArchiveInputStream zis = new ZipArchiveInputStream(is);
    FileOutputStream fos = new FileOutputStream(tmpFile);
    ZipArchiveOutputStream zos = new ZipArchiveOutputStream(fos);
    ZipArchiveEntry ze;/*w  ww  .j  ava 2 s  .co  m*/
    while ((ze = (ZipArchiveEntry) zis.getNextEntry()) != null) {
        // rewrite zip entries to match the size of the encrypted data (with padding)
        ZipArchiveEntry zeNew = new ZipArchiveEntry(ze.getName());
        zeNew.setComment(ze.getComment());
        zeNew.setExtra(ze.getExtra());
        zeNew.setTime(ze.getTime());
        zos.putArchiveEntry(zeNew);
        FilterOutputStream fos2 = new FilterOutputStream(zos) {
            // do not close underlyzing ZipOutputStream
            @Override
            public void close() {
            }
        };
        OutputStream nos;
        if (this.ciEncoder != null) { // encrypt if needed
            nos = new CipherOutputStream(fos2, this.ciEncoder);
        } else { // do not encrypt
            nos = fos2;
        }
        IOUtils.copy(zis, nos);
        nos.close();
        if (fos2 != null) {
            fos2.close();
        }
        zos.closeArchiveEntry();

    }
    zos.close();
    fos.close();
    zis.close();
    IOUtils.closeQuietly(is);
    this.zipFile = new ZipFile(this.tmpFile);

}