Example usage for org.apache.commons.compress.archivers.zip ZipArchiveOutputStream ZipArchiveOutputStream

List of usage examples for org.apache.commons.compress.archivers.zip ZipArchiveOutputStream ZipArchiveOutputStream

Introduction

In this page you can find the example usage for org.apache.commons.compress.archivers.zip ZipArchiveOutputStream ZipArchiveOutputStream.

Prototype

public ZipArchiveOutputStream(File file) throws IOException 

Source Link

Document

Creates a new ZIP OutputStream writing to a File.

Usage

From source file:org.sakaiproject.archive.impl.SiteZipper.java

/**
 * Zip a site archive. It is stored back in the zip directory
 * @param siteId         site that has already been archived
 * @param m_storagePath      path to where the archives are
 * @return//  w w w  . j  a  va 2 s .c  om
 * @throws IOException
 */
public boolean zipArchive(String siteId, String m_storagePath) throws IOException {

    //get path to archive dir for this site
    //suffix of -archive is hardcoded as per archive service
    String archivePath = m_storagePath + siteId + "-archive";

    //setup timestamp
    SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss");
    String timestamp = dateFormat.format(Calendar.getInstance().getTime());

    //create path to compressed archive
    String compressedArchivePath = m_storagePath + siteId + "-" + timestamp + ".zip";
    File zipFile = new File(compressedArchivePath);

    if (!zipFile.exists()) {
        log.info("Creating zip file: " + compressedArchivePath);
        zipFile.createNewFile();
    }

    FileOutputStream fOut = null;
    FileInputStream zip = null;
    BufferedOutputStream bOut = null;
    ZipArchiveOutputStream zOut = null;

    try {
        fOut = new FileOutputStream(zipFile);
        bOut = new BufferedOutputStream(fOut);
        zOut = new ZipArchiveOutputStream(bOut);
        addFileToZip(zOut, archivePath, ""); //add the directory which will then add all files recursively

        //create a sha1 hash of the zip
        String hashPath = m_storagePath + siteId + "-" + timestamp + ".sha1";
        log.info("Creating hash: " + hashPath);
        zip = new FileInputStream(compressedArchivePath);
        String hash = DigestUtils.sha1Hex(zip);
        FileUtils.writeStringToFile(new File(hashPath), hash);
    } finally {
        zOut.finish();
        zOut.close();
        bOut.close();
        fOut.close();
        zip.close();
    }

    return true;
}

From source file:org.sead.nds.repository.BagGenerator.java

public boolean generateBag(OutputStream outputStream) throws Exception {
    log.info("Generating: Bag to the Future!");
    pubRequest = RO.getPublicationRequest();
    RO.sendStatus(C3PRPubRequestFacade.PENDING_STAGE, Repository.getID() + " is now processing this request");

    File tmp = File.createTempFile("sead-scatter-dirs", "tmp");
    dirs = ScatterZipOutputStream.fileBased(tmp);

    JSONObject oremap = RO.getOREMap();/*from   www.  j  a  v a  2  s  .c o  m*/
    JSONObject aggregation = oremap.getJSONObject("describes");

    // Transfer statistics to oremap for preservation - note that the #
    // files, totalsize are checked after the zip is written
    // so any error will be recorded in the zip, but caught in the log.
    // Other elements are not curently checked.
    JSONObject aggStats = ((JSONObject) pubRequest.get("Aggregation Statistics"));
    aggregation.put("Aggregation Statistics", aggStats);

    if (((JSONObject) pubRequest.get(PubRequestFacade.PREFERENCES)).has("License")) {
        license = ((JSONObject) pubRequest.get(PubRequestFacade.PREFERENCES)).getString("License");

    }
    // Accept license preference and add it as the license on the
    // aggregation
    aggregation.put("License", license);

    if (((JSONObject) pubRequest.get(PubRequestFacade.PREFERENCES)).has("Purpose")) {
        purpose = ((JSONObject) pubRequest.get(PubRequestFacade.PREFERENCES)).getString("Purpose");

    }
    // Accept the purpose and add it to the map and aggregation (both are
    // for this purpose)
    aggregation.put("Purpose", purpose);
    oremap.put("Purpose", purpose);

    // check whether Access Rights set, if so, add it to aggregation
    if (((JSONObject) pubRequest.get(PubRequestFacade.PREFERENCES)).has("Access Rights")) {
        String accessRights = ((JSONObject) pubRequest.get(PubRequestFacade.PREFERENCES))
                .getString("Access Rights");
        aggregation.put("Access Rights", accessRights);
    }

    bagID = aggregation.getString("Identifier");
    String bagName = bagID;
    try {
        // Create valid filename from identifier and extend path with
        // two levels of hash-based subdirs to help distribute files
        bagName = getValidName(bagName);
    } catch (Exception e) {
        log.error("Couldn't create valid filename: " + e.getLocalizedMessage());
        return false;
    }
    // Create data dir in bag, also creates parent bagName dir
    String currentPath = bagName + "/data/";
    createDir(currentPath);

    aggregates = aggregation.getJSONArray("aggregates");

    if (aggregates != null) {
        // Add container and data entries
        // Setup global index of the aggregation and all aggregated
        // resources by Identifier
        resourceIndex = indexResources(bagID, aggregates);
        // Setup global list of succeed(true), fail(false), notused
        // (null) flags
        resourceUsed = new Boolean[aggregates.length() + 1];
        // Process current container (the aggregation itself) and its
        // children
        processContainer(aggregation, currentPath);
    }
    // Create mainifest files
    // pid-mapping.txt - a DataOne recommendation to connect ids and
    // in-bag path/names
    StringBuffer pidStringBuffer = new StringBuffer();
    boolean first = true;
    for (Entry<String, String> pidEntry : pidMap.entrySet()) {
        if (!first) {
            pidStringBuffer.append("\n");
        } else {
            first = false;
        }
        pidStringBuffer.append(pidEntry.getKey() + " " + pidEntry.getValue());
    }
    createFileFromString(bagName + "/pid-mapping.txt", pidStringBuffer.toString());
    // Hash manifest - a hash manifest is required
    // by Bagit spec
    StringBuffer sha1StringBuffer = new StringBuffer();
    first = true;
    for (Entry<String, String> sha1Entry : sha1Map.entrySet()) {
        if (!first) {
            sha1StringBuffer.append("\n");
        } else {
            first = false;
        }
        sha1StringBuffer.append(sha1Entry.getValue() + " " + sha1Entry.getKey());
    }
    if (!(hashtype == null)) {
        String manifestName = bagName + "/manifest-";
        if (hashtype.equals("SHA1 Hash")) {
            manifestName = manifestName + "sha1.txt";
        } else if (hashtype.equals("SHA512 Hash")) {
            manifestName = manifestName + "sha512.txt";
        } else {
            log.warn("Unsupported Hash type: " + hashtype);
        }
        createFileFromString(manifestName, sha1StringBuffer.toString());
    } else {
        log.warn("No Hash values sent - Bag File does not meet BagIT specification requirement");
    }
    // bagit.txt - Required by spec
    createFileFromString(bagName + "/bagit.txt", "BagIt-Version: 0.97\nTag-File-Character-Encoding: UTF-8");

    if (oremap.getJSONObject("describes").has("Creator")) {
        aggregation.put("Creator",
                RO.expandPeople(RO.normalizeValues(oremap.getJSONObject("describes").get("Creator"))));
    }
    if (oremap.getJSONObject("describes").has("Contact")) {
        aggregation.put("Contact",
                RO.expandPeople(RO.normalizeValues(oremap.getJSONObject("describes").get("Contact"))));
    }

    // Generate DOI:
    oremap.getJSONObject("describes").put(PubRequestFacade.EXTERNAL_IDENTIFIER,
            Repository.createDOIForRO(bagID, RO));

    oremap.getJSONObject("describes").put("Publication Date",
            new SimpleDateFormat("yyyy-MM-dd").format(Calendar.getInstance().getTime()));

    Object context = oremap.get("@context");
    // FixMe - should test that these labels don't have a different
    // definition (currently we're just checking to see if they a
    // already defined)
    addIfNeeded(context, "License", "http://purl.org/dc/terms/license");
    addIfNeeded(context, "Purpose", "http://sead-data.net/vocab/publishing#Purpose");
    addIfNeeded(context, "Access Rights", "http://purl.org/dc/terms/accessRights");
    addIfNeeded(context, PubRequestFacade.EXTERNAL_IDENTIFIER, "http://purl.org/dc/terms/identifier");
    addIfNeeded(context, "Publication Date", "http://purl.org/dc/terms/issued");

    // Aggregation Statistics
    // For keys in Agg Stats:
    for (String key : ((Set<String>) aggStats.keySet())) {
        addIfNeeded(context, key, getURIForKey(pubRequest.get("@context"), key));
    }

    oremap.put("@id", linkRewriter.rewriteOREMapLink(oremap.getString("@id"), bagID));
    aggregation.put("@id", linkRewriter.rewriteAggregationLink(aggregation.getString("@id"), bagID));
    // Serialize oremap itself (pretty printed) - SEAD recommendation
    // (DataOne distributes metadata files within the bag
    // FixMe - add missing hash values if needed and update context
    // (read and cache files or read twice?)
    createFileFromString(bagName + "/oremap.jsonld.txt", oremap.toString(2));

    // Add a bag-info file
    createFileFromString(bagName + "/bag-info.txt", generateInfoFile(pubRequest, oremap));

    log.info("Creating bag: " + bagName);

    ZipArchiveOutputStream zipArchiveOutputStream = new ZipArchiveOutputStream(outputStream);

    // Add all the waiting contents - dirs created first, then data
    // files
    // are retrieved via URLs in parallel (defaults to one thread per
    // processor)
    // directly to the zip file
    log.debug("Starting write");
    writeTo(zipArchiveOutputStream);
    log.info("Zipfile Written");
    // Finish
    zipArchiveOutputStream.close();
    log.debug("Closed");

    // Validate oremap - all entries are part of the collection
    for (int i = 0; i < resourceUsed.length; i++) {
        Boolean b = resourceUsed[i];
        if (b == null) {
            RO.sendStatus("Problem", pidMap.get(resourceIndex.get(i)) + " was not used");
        } else if (!b) {
            RO.sendStatus("Problem", pidMap.get(resourceIndex.get(i)) + " was not included successfully");
        } else {
            // Successfully included - now check for hash value and
            // generate if needed
            if (i > 0) { // Not root container
                if (!sha1Map.containsKey(pidMap.get(resourceIndex.get(i)))) {

                    if (!RO.childIsContainer(i - 1))
                        log.warn("Missing sha1 hash for: " + resourceIndex.get(i));
                    // FixMe - actually generate it before adding the
                    // oremap
                    // to the zip
                }
            }
        }

    }
    return true;

}

From source file:org.seasr.meandre.components.tools.io.WriteArchive.java

@Override
public void executeCallBack(ComponentContext cc) throws Exception {
    componentInputCache.storeIfAvailable(cc, IN_LOCATION);
    componentInputCache.storeIfAvailable(cc, IN_FILE_NAME);
    componentInputCache.storeIfAvailable(cc, IN_DATA);

    if (archiveStream == null && componentInputCache.hasData(IN_LOCATION)) {
        Object input = componentInputCache.retrieveNext(IN_LOCATION);
        if (input instanceof StreamDelimiter)
            throw new ComponentExecutionException(
                    String.format("Stream delimiters should not arrive on port '%s'!", IN_LOCATION));

        String location = DataTypeParser.parseAsString(input)[0];
        if (appendExtension)
            location += String.format(".%s", archiveFormat);
        outputFile = getLocation(location, defaultFolder);
        File parentDir = outputFile.getParentFile();

        if (!parentDir.exists()) {
            if (parentDir.mkdirs())
                console.finer("Created directory: " + parentDir);
        } else if (!parentDir.isDirectory())
            throw new IOException(parentDir.toString() + " must be a directory!");

        if (appendTimestamp) {
            String name = outputFile.getName();
            String timestamp = new SimpleDateFormat(timestampFormat).format(new Date());

            int pos = name.lastIndexOf(".");
            if (pos < 0)
                name += "_" + timestamp;
            else/*  w  w w . j a v a 2s.  com*/
                name = String.format("%s_%s%s", name.substring(0, pos), timestamp, name.substring(pos));

            outputFile = new File(parentDir, name);
        }

        console.fine(String.format("Writing file %s", outputFile));

        if (archiveFormat.equals("zip")) {
            archiveStream = new ZipArchiveOutputStream(outputFile);
            ((ZipArchiveOutputStream) archiveStream).setLevel(Deflater.BEST_COMPRESSION);
        }

        else

        if (archiveFormat.equals("tar") || archiveFormat.equals("tgz")) {
            OutputStream fileStream = new BufferedOutputStream(new FileOutputStream(outputFile));
            if (archiveFormat.equals("tgz"))
                fileStream = new GzipCompressorOutputStream(fileStream);
            archiveStream = new TarArchiveOutputStream(fileStream);
        }
    }

    // Return if we haven't received a zip or tar location yet
    if (archiveStream == null)
        return;

    while (componentInputCache.hasDataAll(new String[] { IN_FILE_NAME, IN_DATA })) {
        Object inFileName = componentInputCache.retrieveNext(IN_FILE_NAME);
        Object inData = componentInputCache.retrieveNext(IN_DATA);

        // check for StreamInitiator
        if (inFileName instanceof StreamInitiator || inData instanceof StreamInitiator) {
            if (inFileName instanceof StreamInitiator && inData instanceof StreamInitiator) {
                StreamInitiator siFileName = (StreamInitiator) inFileName;
                StreamInitiator siData = (StreamInitiator) inData;

                if (siFileName.getStreamId() != siData.getStreamId())
                    throw new ComponentExecutionException("Unequal stream ids received!!!");

                if (siFileName.getStreamId() == streamId)
                    isStreaming = true;
                else
                    // Forward the delimiter(s)
                    cc.pushDataComponentToOutput(OUT_LOCATION, siFileName);

                continue;
            } else
                throw new ComponentExecutionException("Unbalanced StreamDelimiter received!");
        }

        // check for StreamTerminator
        if (inFileName instanceof StreamTerminator || inData instanceof StreamTerminator) {
            if (inFileName instanceof StreamTerminator && inData instanceof StreamTerminator) {
                StreamTerminator stFileName = (StreamTerminator) inFileName;
                StreamTerminator stData = (StreamTerminator) inData;

                if (stFileName.getStreamId() != stData.getStreamId())
                    throw new ComponentExecutionException("Unequal stream ids received!!!");

                if (stFileName.getStreamId() == streamId) {
                    // end of stream reached
                    closeArchiveAndPushOutput();
                    isStreaming = false;
                    break;
                } else {
                    // Forward the delimiter(s)
                    if (isStreaming)
                        console.warning(
                                "Likely streaming error - received StreamTerminator for a different stream id than the current active stream! - forwarding it");
                    cc.pushDataComponentToOutput(OUT_LOCATION, stFileName);
                    continue;
                }
            } else
                throw new ComponentExecutionException("Unbalanced StreamDelimiter received!");
        }

        byte[] entryData = null;

        if (inData instanceof byte[] || inData instanceof Bytes)
            entryData = DataTypeParser.parseAsByteArray(inData);

        else

        if (inData instanceof Document) {
            ByteArrayOutputStream baos = new ByteArrayOutputStream();
            DOMUtils.writeXML((Document) inData, baos, outputProperties);
            entryData = baos.toByteArray();
        }

        else
            entryData = DataTypeParser.parseAsString(inData)[0].getBytes("UTF-8");

        String entryName = DataTypeParser.parseAsString(inFileName)[0];

        console.fine(String.format("Adding %s entry: %s", archiveFormat.toUpperCase(), entryName));

        ArchiveEntry entry = null;
        if (archiveFormat.equals("zip"))
            entry = new ZipArchiveEntry(entryName);

        else

        if (archiveFormat.equals("tar") || archiveFormat.equals("tgz")) {
            entry = new TarArchiveEntry(entryName);
            ((TarArchiveEntry) entry).setSize(entryData.length);
        }

        archiveStream.putArchiveEntry(entry);
        archiveStream.write(entryData);
        archiveStream.closeArchiveEntry();

        if (!isStreaming) {
            closeArchiveAndPushOutput();
            break;
        }
    }
}

From source file:org.sigmah.server.file.impl.BackupArchiveJob.java

/**
 * {@inheritDoc}/*ww  w.j  a  va2  s. c  o  m*/
 */
@Override
public void run() {

    final Path tempArchiveFile = arguments.tempArchiveFile;
    final Path finalArchiveFile = arguments.finalArchiveFile;

    try (final ZipArchiveOutputStream zipOutputStream = new ZipArchiveOutputStream(
            Files.newOutputStream(tempArchiveFile))) {

        zipOutputStream.setMethod(ZipOutputStream.DEFLATED);
        zipOutputStream.setLevel(Deflater.BEST_COMPRESSION);

        final RepositoryElement repository = buildOrgUnitRepository(arguments.backup, arguments.userId);
        repository.setName("");

        zipRepository(repository, zipOutputStream, "");

        // TODO Delete existing previous organization file(s).

        // Renames temporary '.tmp' file to complete '.zip' file.
        Files.move(tempArchiveFile, finalArchiveFile, StandardCopyOption.REPLACE_EXISTING);

    } catch (final Throwable t) {

        if (LOG.isErrorEnabled()) {
            LOG.error("An error occurred during backup archive generation process.", t);
        }

        try {

            Files.deleteIfExists(tempArchiveFile);
            Files.deleteIfExists(finalArchiveFile);

        } catch (final IOException e) {
            if (LOG.isErrorEnabled()) {
                LOG.error("An error occurred while deleting archive error file.", e);
            }
        }
    }
}

From source file:org.silverpeas.core.util.ZipUtil.java

/**
 * Compress a file into a zip file.//from   www . ja  v a 2 s .  c  o m
 *
 * @param filePath
 * @param zipFilePath
 * @return
 * @throws IOException
 */
public static long compressFile(String filePath, String zipFilePath) throws IOException {
    try (ZipArchiveOutputStream zos = new ZipArchiveOutputStream(new FileOutputStream(zipFilePath));
            InputStream in = new FileInputStream(filePath)) {
        zos.setFallbackToUTF8(true);
        zos.setCreateUnicodeExtraFields(NOT_ENCODEABLE);
        zos.setEncoding(Charsets.UTF_8.name());
        String entryName = FilenameUtils.getName(filePath);
        entryName = entryName.replace(File.separatorChar, '/');
        zos.putArchiveEntry(new ZipArchiveEntry(entryName));
        IOUtils.copy(in, zos);
        zos.closeArchiveEntry();
        return new File(zipFilePath).length();
    }
}

From source file:org.silverpeas.core.util.ZipUtil.java

/**
 * Mthode compressant un dossier de faon rcursive au format zip.
 *
 * @param folderToZip - dossier  compresser
 * @param zipFile - fichier zip  creer/*from w w  w.  ja va2s .  c om*/
 * @return la taille du fichier zip gnr en octets
 * @throws FileNotFoundException
 * @throws IOException
 */
public static long compressPathToZip(File folderToZip, File zipFile) throws IOException {
    try (ZipArchiveOutputStream zos = new ZipArchiveOutputStream(new FileOutputStream(zipFile))) {
        zos.setFallbackToUTF8(true);
        zos.setCreateUnicodeExtraFields(NOT_ENCODEABLE);
        zos.setEncoding(Charsets.UTF_8.name());
        Collection<File> folderContent = FileUtils.listFiles(folderToZip, null, true);
        for (File file : folderContent) {
            String entryName = file.getPath().substring(folderToZip.getParent().length() + 1);
            entryName = FilenameUtils.separatorsToUnix(entryName);
            zos.putArchiveEntry(new ZipArchiveEntry(entryName));
            try (InputStream in = new FileInputStream(file)) {
                IOUtils.copy(in, zos);
                zos.closeArchiveEntry();
            }
        }
    }
    return zipFile.length();
}

From source file:org.silverpeas.core.util.ZipUtil.java

/**
 * Mthode permettant la cration et l'organisation d'un fichier zip en lui passant directement un
 * flux d'entre/*from   w w w.ja v a 2s . c om*/
 *
 * @param inputStream - flux de donnes  enregistrer dans le zip
 * @param filePathNameToCreate - chemin et nom du fichier port par les donnes du flux dans le
 * zip
 * @param outfilename - chemin et nom du fichier zip  creer ou complter
 * @throws IOException
 */
public static void compressStreamToZip(InputStream inputStream, String filePathNameToCreate, String outfilename)
        throws IOException {
    try (ZipArchiveOutputStream zos = new ZipArchiveOutputStream(new FileOutputStream(outfilename))) {
        zos.setFallbackToUTF8(true);
        zos.setCreateUnicodeExtraFields(NOT_ENCODEABLE);
        zos.setEncoding("UTF-8");
        zos.putArchiveEntry(new ZipArchiveEntry(filePathNameToCreate));
        IOUtils.copy(inputStream, zos);
        zos.closeArchiveEntry();
    }
}

From source file:org.sourcepit.tools.shared.resources.internal.mojo.ZipUtils.java

public static void zip(File directory, File archive, String encoding) throws IOException {
    createFileOnDemand(archive);// w  w w  .ja  va  2  s . c  o m
    final int pathOffset = getAbsolutePathLength(directory);
    final ZipArchiveOutputStream zipOut = new ZipArchiveOutputStream(archive);
    zipOut.setEncoding(encoding);
    try {
        for (File file : directory.listFiles()) {
            appendFileOrDirectory(pathOffset, zipOut, file);
        }
    } finally {
        IOUtils.closeQuietly(zipOut);
    }
}

From source file:org.springframework.boot.gradle.tasks.bundling.BootZipCopyAction.java

@Override
public WorkResult execute(CopyActionProcessingStream stream) {
    ZipArchiveOutputStream zipStream;//from w ww  . j a  va2s .c  om
    Spec<FileTreeElement> loaderEntries;
    try {
        FileOutputStream fileStream = new FileOutputStream(this.output);
        writeLaunchScriptIfNecessary(fileStream);
        zipStream = new ZipArchiveOutputStream(fileStream);
        if (this.encoding != null) {
            zipStream.setEncoding(this.encoding);
        }
        loaderEntries = writeLoaderClassesIfNecessary(zipStream);
    } catch (IOException ex) {
        throw new GradleException("Failed to create " + this.output, ex);
    }
    try {
        stream.process(new ZipStreamAction(zipStream, this.output, this.preserveFileTimestamps,
                this.requiresUnpack, createExclusionSpec(loaderEntries), this.compressionResolver));
    } finally {
        try {
            zipStream.close();
        } catch (IOException ex) {
            // Continue
        }
    }
    return () -> true;
}

From source file:org.structr.web.function.CreateArchiveFunction.java

@Override
public Object apply(ActionContext ctx, Object caller, Object[] sources) throws FrameworkException {

    if (!(sources[1] instanceof File || sources[1] instanceof Folder || sources[1] instanceof Collection
            || sources.length < 2)) {

        logParameterError(caller, sources, ctx.isJavaScriptContext());

        return usage(ctx.isJavaScriptContext());
    }/*w  w w  . j  a  v a  2  s  . c o  m*/

    final ConfigurationProvider config = StructrApp.getConfiguration();

    try {

        java.io.File newArchive = java.io.File.createTempFile(sources[0].toString(), "zip");

        ZipArchiveOutputStream zaps = new ZipArchiveOutputStream(newArchive);
        zaps.setEncoding("UTF8");
        zaps.setUseLanguageEncodingFlag(true);
        zaps.setCreateUnicodeExtraFields(ZipArchiveOutputStream.UnicodeExtraFieldPolicy.ALWAYS);
        zaps.setFallbackToUTF8(true);

        if (sources[1] instanceof File) {

            File file = (File) sources[1];
            addFileToZipArchive(file.getProperty(AbstractFile.name), file, zaps);

        } else if (sources[1] instanceof Folder) {

            Folder folder = (Folder) sources[1];
            addFilesToArchive(folder.getProperty(Folder.name) + "/", folder.getFiles(), zaps);
            addFoldersToArchive(folder.getProperty(Folder.name) + "/", folder.getFolders(), zaps);

        } else if (sources[1] instanceof Collection) {

            for (Object fileOrFolder : (Collection) sources[1]) {

                if (fileOrFolder instanceof File) {

                    File file = (File) fileOrFolder;
                    addFileToZipArchive(file.getProperty(AbstractFile.name), file, zaps);
                } else if (fileOrFolder instanceof Folder) {

                    Folder folder = (Folder) fileOrFolder;
                    addFilesToArchive(folder.getProperty(Folder.name) + "/", folder.getFiles(), zaps);
                    addFoldersToArchive(folder.getProperty(Folder.name) + "/", folder.getFolders(), zaps);
                } else {

                    logParameterError(caller, sources, ctx.isJavaScriptContext());
                    return usage(ctx.isJavaScriptContext());
                }
            }
        } else {

            logParameterError(caller, sources, ctx.isJavaScriptContext());
            return usage(ctx.isJavaScriptContext());
        }

        zaps.close();

        Class archiveClass = null;

        if (sources.length > 2) {

            archiveClass = config.getNodeEntityClass(sources[2].toString());

        }

        if (archiveClass == null) {

            archiveClass = org.structr.web.entity.File.class;
        }

        try (final FileInputStream fis = new FileInputStream(newArchive)) {
            return FileHelper.createFile(ctx.getSecurityContext(), fis, "application/zip", archiveClass,
                    sources[0].toString() + ".zip");
        }

    } catch (IOException e) {

        logException(caller, e, sources);
    }
    return null;
}