Example usage for org.apache.commons.compress.archivers ArchiveStreamFactory TAR

List of usage examples for org.apache.commons.compress.archivers ArchiveStreamFactory TAR

Introduction

In this page you can find the example usage for org.apache.commons.compress.archivers ArchiveStreamFactory TAR.

Prototype

String TAR

To view the source code for org.apache.commons.compress.archivers ArchiveStreamFactory TAR.

Click Source Link

Document

Constant used to identify the TAR archive format.

Usage

From source file:org.openbaton.marketplace.core.VNFPackageManagement.java

public ByteArrayOutputStream compose(String id) throws IOException, ArchiveException {

    VNFPackageMetadata vnfPackageMetadata = vnfPackageMetadataRepository.findFirstById(id);
    String vnfPackageName = vnfPackageMetadata.getName();
    VirtualNetworkFunctionDescriptor vnfd = vnfPackageMetadata.getVnfd();
    VNFPackage vnfPackage = vnfPackageMetadata.getVnfPackage();
    ImageMetadata imageMetadata = vnfPackageMetadata.getImageMetadata();
    NFVImage nfvImage = vnfPackageMetadata.getNfvImage();
    String vnfdJson = mapper.toJson(vnfd);

    HashMap<String, Object> imageConfigJson = new ObjectMapper().readValue(mapper.toJson(nfvImage),
            HashMap.class);
    imageConfigJson.put("minDisk", imageConfigJson.get("minDiskSpace"));
    Object minCPU = imageConfigJson.get("minCPU");
    if (minCPU != null) {
        imageConfigJson.put("minCPU", Integer.parseInt((String) minCPU));
    } else {//from  w w w  .j ava2s .com
        imageConfigJson.put("minCPU", 0);
    }
    imageConfigJson.remove("minDiskSpace");
    imageConfigJson.remove("id");
    imageConfigJson.remove("hb_version");

    HashMap<String, String> imageMetadataJson = new ObjectMapper().readValue(mapper.toJson(imageMetadata),
            HashMap.class);
    imageMetadataJson.put("link", imageMetadata.getLink());
    imageMetadataJson.remove("id");
    imageMetadataJson.remove("hb_version");

    ByteArrayOutputStream tar_output = new ByteArrayOutputStream();
    ArchiveOutputStream my_tar_ball = new ArchiveStreamFactory()
            .createArchiveOutputStream(ArchiveStreamFactory.TAR, tar_output);

    //prepare Metadata.yaml
    File tar_input_file = File.createTempFile("Metadata", null);
    Map<String, Object> data = new HashMap<String, Object>();
    data.put("name", vnfPackageName);
    data.put("description", vnfPackageMetadata.getDescription());
    data.put("provider", vnfPackageMetadata.getProvider());
    data.put("requirements", vnfPackageMetadata.getRequirements());
    data.put("shared", vnfPackageMetadata.isShared());
    data.put("image", imageMetadataJson);
    data.put("image-config", imageConfigJson);
    data.put("scripts-link", vnfPackage.getScriptsLink());
    DumperOptions options = new DumperOptions();
    options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
    Yaml yaml = new Yaml(options);
    FileWriter writer = new FileWriter(tar_input_file);
    yaml.dump(data, writer);
    TarArchiveEntry tar_file = new TarArchiveEntry(tar_input_file, "Metadata.yaml");
    tar_file.setSize(tar_input_file.length());
    my_tar_ball.putArchiveEntry(tar_file);
    IOUtils.copy(new FileInputStream(tar_input_file), my_tar_ball);
    /* Close Archieve entry, write trailer information */
    my_tar_ball.closeArchiveEntry();

    //prepare VNFD
    tar_input_file = File.createTempFile("vnfd", null);
    tar_file = new TarArchiveEntry(tar_input_file, "vnfd.json");
    writer = new FileWriter(tar_input_file);
    writer.write(vnfdJson);
    writer.close();
    tar_file.setSize(tar_input_file.length());
    my_tar_ball.putArchiveEntry(tar_file);
    IOUtils.copy(new FileInputStream(tar_input_file), my_tar_ball);
    /* Close Archieve entry, write trailer information */
    my_tar_ball.closeArchiveEntry();

    //scripts
    for (Script script : vnfPackage.getScripts()) {
        tar_input_file = File.createTempFile("script", null);
        tar_file = new TarArchiveEntry(tar_input_file, "scripts/" + script.getName());
        FileOutputStream outputStream = new FileOutputStream(tar_input_file);
        outputStream.write(script.getPayload());
        outputStream.close();
        tar_file.setSize(tar_input_file.length());
        my_tar_ball.putArchiveEntry(tar_file);
        IOUtils.copy(new FileInputStream(tar_input_file), my_tar_ball);
        my_tar_ball.closeArchiveEntry();
    }

    //close tar
    my_tar_ball.finish();
    /* Close output stream, our files are zipped */
    tar_output.close();
    return tar_output;
}

From source file:org.springframework.cloud.stream.app.tensorflow.util.ModelExtractor.java

/**
 * Detect the Archive and the Compressor from the file extension
 *
 * @param fileName File name with extension
 * @return Returns a tuple of the detected (Archive, Compressor). Null stands for not available archive or detector.
 * The (null, null) response stands for no Archive or Compressor discovered.
 *///from  w ww  . j  a  v a2  s .  com
private String[] detectArchiveAndCompressor(String fileName) {

    String normalizedFileName = fileName.trim().toLowerCase();

    if (normalizedFileName.endsWith(".tar.gz") || normalizedFileName.endsWith(".tgz")
            || normalizedFileName.endsWith(".taz")) {
        return new String[] { ArchiveStreamFactory.TAR, CompressorStreamFactory.GZIP };
    } else if (normalizedFileName.endsWith(".tar.bz2") || normalizedFileName.endsWith(".tbz2")
            || normalizedFileName.endsWith(".tbz")) {
        return new String[] { ArchiveStreamFactory.TAR, CompressorStreamFactory.BZIP2 };
    } else if (normalizedFileName.endsWith(".cpgz")) {
        return new String[] { ArchiveStreamFactory.CPIO, CompressorStreamFactory.GZIP };
    } else if (hasArchive(normalizedFileName)) {
        return new String[] { findArchive(normalizedFileName).get(), null };
    } else if (hasCompressor(normalizedFileName)) {
        return new String[] { null, findCompressor(normalizedFileName).get() };
    } else if (normalizedFileName.endsWith(".gzip")) {
        return new String[] { null, CompressorStreamFactory.GZIP };
    } else if (normalizedFileName.endsWith(".bz2") || normalizedFileName.endsWith(".bz")) {
        return new String[] { null, CompressorStreamFactory.BZIP2 };
    }

    // No archived/compressed
    return new String[] { null, null };
}

From source file:org.structr.websocket.command.UnarchiveCommand.java

@Override
public void processMessage(WebSocketMessage webSocketData) {

    final Set<String> supportedByArchiveStreamFactory = new HashSet<>(
            Arrays.asList(new String[] { ArchiveStreamFactory.AR, ArchiveStreamFactory.ARJ,
                    ArchiveStreamFactory.CPIO, ArchiveStreamFactory.DUMP, ArchiveStreamFactory.JAR,
                    ArchiveStreamFactory.TAR, ArchiveStreamFactory.ZIP }));

    final SecurityContext securityContext = getWebSocket().getSecurityContext();
    final App app = StructrApp.getInstance(securityContext);

    try {/*  ww w .  j a v a  2  s  . c  o  m*/

        final String id = (String) webSocketData.getId();
        final File file;

        try (final Tx tx = app.tx()) {

            file = app.get(File.class, id);

            if (file == null) {
                getWebSocket().send(
                        MessageBuilder.status().code(400).message("File not found: ".concat(id)).build(), true);
                return;
            }

            final String fileExtension = StringUtils.substringAfterLast(file.getName(), ".");
            if (!supportedByArchiveStreamFactory.contains(fileExtension)) {

                getWebSocket().send(MessageBuilder.status().code(400)
                        .message("Unsupported archive format: ".concat(fileExtension)).build(), true);
                return;
            }

            tx.success();
        }

        // no transaction here since this is a bulk command
        unarchive(securityContext, file);

    } catch (Throwable t) {

        t.printStackTrace();

        String msg = t.toString();

        try (final Tx tx = app.tx()) {

            // return error message
            getWebSocket().send(
                    MessageBuilder.status().code(400)
                            .message("Could not unarchive file: ".concat((msg != null) ? msg : "")).build(),
                    true);

            tx.success();

        } catch (FrameworkException ignore) {
        }

    }
}

From source file:org.voyanttools.trombone.input.expand.ArchiveExpander.java

public List<StoredDocumentSource> getExpandedStoredDocumentSources(StoredDocumentSource storedDocumentSource)
        throws IOException {

    // first try to see if we've been here already
    String id = storedDocumentSource.getId();
    List<StoredDocumentSource> archivedStoredDocumentSources = storedDocumentSourceStorage
            .getMultipleExpandedStoredDocumentSources(id);
    if (archivedStoredDocumentSources != null && archivedStoredDocumentSources.isEmpty() == false) {
        return archivedStoredDocumentSources;
    }/*from  w  w  w .  j  a va2  s.c o  m*/

    InputStream inputStream = null;
    try {
        ArchiveStreamFactory archiveStreamFactory = new ArchiveStreamFactory();
        inputStream = storedDocumentSourceStorage
                .getStoredDocumentSourceInputStream(storedDocumentSource.getId());
        BufferedInputStream bis = new BufferedInputStream(inputStream);

        String filename = storedDocumentSource.getMetadata().getLocation();
        ArchiveInputStream archiveInputStream;

        if (filename.toLowerCase().endsWith("tgz") || filename.toLowerCase().endsWith("tar.gz")) { // decompress and then untar
            archiveInputStream = archiveStreamFactory.createArchiveInputStream(ArchiveStreamFactory.TAR,
                    new GZIPInputStream(bis));
        } else if (filename.toLowerCase().endsWith("tbz2") || filename.toLowerCase().endsWith("tar.bz2")) { // decompress and then untar
            archiveInputStream = archiveStreamFactory.createArchiveInputStream(ArchiveStreamFactory.TAR,
                    new BZip2CompressorInputStream(bis));
        } else {
            archiveInputStream = archiveStreamFactory.createArchiveInputStream(bis);
        }
        archivedStoredDocumentSources = getExpandedDocumentSources(archiveInputStream, storedDocumentSource);
        storedDocumentSourceStorage.setMultipleExpandedStoredDocumentSources(storedDocumentSource.getId(),
                archivedStoredDocumentSources);
        return archivedStoredDocumentSources;
    } catch (ArchiveException e) {
        throw new IOException("A problem was encountered reading this archive: "
                + storedDocumentSource.getMetadata().getLocation(), e);
    } finally {
        if (inputStream != null) {
            inputStream.close();
        }
    }
}