Example usage for org.apache.commons.compress.archivers ArchiveInputStream read

List of usage examples for org.apache.commons.compress.archivers ArchiveInputStream read

Introduction

In this page you can find the example usage for org.apache.commons.compress.archivers ArchiveInputStream read.

Prototype

public int read(byte b[], int off, int len) throws IOException 

Source Link

Document

Reads up to len bytes of data from the input stream into an array of bytes.

Usage

From source file:de.fanero.uncompress.stream.EmptyArchiveInputStreamTest.java

@Test
public void testReadWithBuffer2() throws Exception {

    ArchiveInputStream stream = EmptyArchiveInputStream.getInstance();

    byte[] buffer = new byte[2];
    assertThat(stream.read(buffer, 0, 2), is(-1));
}

From source file:com.mirth.connect.util.ArchiveUtils.java

/**
 * Extracts an archive using generic stream factories provided by commons-compress.
 *//*from   w  w w.ja  va2 s  .  com*/
private static void extractGenericArchive(File archiveFile, File destinationFolder) throws CompressException {
    try {
        InputStream inputStream = new BufferedInputStream(FileUtils.openInputStream(archiveFile));

        try {
            inputStream = new CompressorStreamFactory().createCompressorInputStream(inputStream);
        } catch (CompressorException e) {
            // a compressor was not recognized in the stream, in this case we leave the inputStream as-is
        }

        ArchiveInputStream archiveInputStream = new ArchiveStreamFactory()
                .createArchiveInputStream(inputStream);
        ArchiveEntry entry;
        int inputOffset = 0;
        byte[] buffer = new byte[BUFFER_SIZE];

        try {
            while (null != (entry = archiveInputStream.getNextEntry())) {
                File outputFile = new File(
                        destinationFolder.getAbsolutePath() + IOUtils.DIR_SEPARATOR + entry.getName());

                if (entry.isDirectory()) {
                    FileUtils.forceMkdir(outputFile);
                } else {
                    FileOutputStream outputStream = null;

                    try {
                        outputStream = FileUtils.openOutputStream(outputFile);
                        int bytesRead;
                        int outputOffset = 0;

                        while ((bytesRead = archiveInputStream.read(buffer, inputOffset, BUFFER_SIZE)) > 0) {
                            outputStream.write(buffer, outputOffset, bytesRead);
                            inputOffset += bytesRead;
                            outputOffset += bytesRead;
                        }
                    } finally {
                        IOUtils.closeQuietly(outputStream);
                    }
                }
            }
        } finally {
            IOUtils.closeQuietly(archiveInputStream);
        }
    } catch (Exception e) {
        throw new CompressException(e);
    }
}

From source file:io.github.blindio.prospero.core.browserdrivers.phantomjs.AbstractUnarchiver.java

protected void extract(ArchiveInputStream arcInStream) throws IOException {
    ArchiveEntry entry = null;//from w w w .j  a  v a2  s .co m

    /** Read the tar entries using the getNextEntry method **/

    while ((entry = (ArchiveEntry) arcInStream.getNextEntry()) != null) {

        System.out.println("Extracting: " + entry.getName());

        /** If the entry is a directory, create the directory. **/

        if (entry.isDirectory()) {

            File f = new File(getDestDirectory() + SystemUtils.FILE_SEPARATOR + entry.getName());
            f.mkdirs();
        }
        /**
         * If the entry is a file,write the decompressed file to the disk
         * and close destination stream.
         **/
        else {
            int count;
            byte data[] = new byte[BUFFER];

            FileOutputStream fos = new FileOutputStream(
                    getDestDirectory() + SystemUtils.FILE_SEPARATOR + entry.getName());
            BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER);
            while ((count = arcInStream.read(data, 0, BUFFER)) != -1) {
                dest.write(data, 0, count);
            }
            dest.close();
        }
    }

    /** Close the input stream **/

    arcInStream.close();
    System.out.println("untar completed successfully!!");
}

From source file:org.apache.flex.utilities.converter.retrievers.BaseRetriever.java

protected void unpack(File inputArchive, File targetDirectory) throws RetrieverException {
    if (!targetDirectory.mkdirs()) {
        throw new RetrieverException(
                "Unable to create extraction directory " + targetDirectory.getAbsolutePath());
    }/*from ww  w  .  j a  v  a 2 s .  c  o m*/

    ArchiveInputStream archiveInputStream = null;
    ArchiveEntry entry;
    try {

        final CountingInputStream inputStream = new CountingInputStream(new FileInputStream(inputArchive));

        final long inputFileSize = inputArchive.length();

        if (inputArchive.getName().endsWith(".tbz2")) {
            archiveInputStream = new TarArchiveInputStream(new BZip2CompressorInputStream(inputStream));
        } else {
            archiveInputStream = new ArchiveStreamFactory()
                    .createArchiveInputStream(new BufferedInputStream(inputStream));
        }

        final ProgressBar progressBar = new ProgressBar(inputFileSize);
        while ((entry = archiveInputStream.getNextEntry()) != null) {
            final File outputFile = new File(targetDirectory, entry.getName());

            // Entry is a directory.
            if (entry.isDirectory()) {
                if (!outputFile.exists()) {
                    if (!outputFile.mkdirs()) {
                        throw new RetrieverException(
                                "Could not create output directory " + outputFile.getAbsolutePath());
                    }
                }
            }

            // Entry is a file.
            else {
                final byte[] data = new byte[BUFFER_MAX];
                final FileOutputStream fos = new FileOutputStream(outputFile);
                BufferedOutputStream dest = null;
                try {
                    dest = new BufferedOutputStream(fos, BUFFER_MAX);

                    int count;
                    while ((count = archiveInputStream.read(data, 0, BUFFER_MAX)) != -1) {
                        dest.write(data, 0, count);
                        progressBar.updateProgress(inputStream.getBytesRead());
                    }
                } finally {
                    if (dest != null) {
                        dest.flush();
                        dest.close();
                    }
                }
            }

            progressBar.updateProgress(inputStream.getBytesRead());
        }
    } catch (FileNotFoundException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    } catch (ArchiveException e) {
        e.printStackTrace();
    } finally {
        if (archiveInputStream != null) {
            try {
                archiveInputStream.close();
            } catch (Exception e) {
                // Ignore...
            }
        }
    }
}

From source file:org.apache.rat.walker.ArchiveWalker.java

/**
 * Run a report over all files and directories in this GZIPWalker,
 * ignoring any files/directories set to be ignored.
 * /*from   w ww  . j  a  va  2s . co  m*/
 * @param report the defined RatReport to run on this GZIP walker.
 * 
 */
public void run(final RatReport report) throws RatException {

    try {
        ArchiveInputStream input;

        /* I am really sad that classes aren't first-class objects in
           Java :'( */
        try {
            input = new TarArchiveInputStream(new GzipCompressorInputStream(new FileInputStream(file)));
        } catch (IOException e) {
            try {
                input = new TarArchiveInputStream(new BZip2CompressorInputStream(new FileInputStream(file)));
            } catch (IOException e2) {
                input = new ZipArchiveInputStream(new FileInputStream(file));
            }
        }

        ArchiveEntry entry = input.getNextEntry();
        while (entry != null) {
            File f = new File(entry.getName());
            byte[] contents = new byte[(int) entry.getSize()];
            int offset = 0;
            int length = contents.length;

            while (offset < entry.getSize()) {
                int actualRead = input.read(contents, offset, length);
                length -= actualRead;
                offset += actualRead;
            }

            if (!entry.isDirectory() && !ignored(f)) {
                report(report, contents, f);
            }

            entry = input.getNextEntry();
        }

        input.close();
    } catch (IOException e) {
        throw new RatException(e);
    }
}

From source file:org.apache.tika.parser.pkg.SimulationDetector.java

private static MediaType detectSimulation(TikaInputStream tis) {
    try {/*from  ww  w . jav a  2 s . c  om*/
        CompressorInputStream cis = new CompressorStreamFactory().createCompressorInputStream(tis);
        BufferedInputStream bis = new BufferedInputStream(cis);
        ArchiveInputStream input = new ArchiveStreamFactory().createArchiveInputStream(bis);
        ArchiveEntry entry = null;
        do {
            entry = input.getNextEntry();
            //input.mark(MAGIC.length()+10);
            if (!entry.isDirectory()) {
                byte[] content = new byte[MAGIC.length()];
                if (entry.getSize() > content.length) {
                    int offset = 0;
                    int length = content.length;
                    while (length > 0) {
                        int n = input.read(content, offset, length);
                        offset += n;
                        length -= n;
                    }
                    String s = new String(content, "ASCII");
                    if (MAGIC.equals(s))
                        return new MediaType("application", "enzosimulation");
                }
            }
            //input.reset();
        } while (entry != null);

        return null;
    } catch (Exception e) {
        return null;
    }
}

From source file:org.arquillian.spacelift.task.archive.UncompressTool.java

@Override
protected File process(File input) throws Exception {
    ArchiveEntry entry = null;// ww  w.j a v a  2 s.  c o m

    /** Read entries using the getNextEntry method **/

    ArchiveInputStream compressedInputStream = compressedInputStream(new FileInputStream(input));

    while ((entry = compressedInputStream.getNextEntry()) != null) {

        File file = new File(this.dest, remapEntryName(entry.getName()));

        if (entry.isDirectory()) {
            file.mkdirs();
        } else {

            if (!file.getParentFile().exists()) {
                file.getParentFile().mkdirs();
            }

            int count;
            byte data[] = new byte[BUFFER];

            FileOutputStream fos = new FileOutputStream(file);
            BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER);
            while ((count = compressedInputStream.read(data, 0, BUFFER)) != -1) {
                dest.write(data, 0, count);
            }
            dest.close();

            int permissionsMode = permissionsMode(entry);
            if (permissionsMode != 0) {
                FilePermission filePermission = PermissionsUtil.toFilePermission(permissionsMode);
                PermissionsUtil.applyPermission(file, filePermission);
            }
        }
    }

    compressedInputStream.close();

    return this.dest;
}

From source file:org.arquillian.spacelift.tool.basic.UncompressTool.java

@Override
protected File process(File input) throws Exception {
    ArchiveEntry entry = null;//from w ww  .j  av a 2  s.co m

    /** Read entries using the getNextEntry method **/

    ArchiveInputStream compressedInputStream = compressedInputStream(new FileInputStream(input));

    while ((entry = compressedInputStream.getNextEntry()) != null) {

        File file = new File(this.dest, entry.getName());

        if (entry.isDirectory()) {
            file.mkdirs();
        } else {

            if (!file.getParentFile().exists()) {
                file.getParentFile().mkdirs();
            }

            int count;
            byte data[] = new byte[BUFFER];

            FileOutputStream fos = new FileOutputStream(file);
            BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER);
            while ((count = compressedInputStream.read(data, 0, BUFFER)) != -1) {
                dest.write(data, 0, count);
            }
            dest.close();

            int permissionsMode = permissionsMode(entry);
            if (permissionsMode != 0) {
                FilePermission filePermission = PermissionsUtil.toFilePermission(permissionsMode);
                PermissionsUtil.applyPermission(file, filePermission);
            }
        }
    }

    compressedInputStream.close();

    return this.dest;
}

From source file:org.moe.cli.utils.ArchiveUtils.java

public static void untarArchive(ArchiveInputStream archStrim, File destination) throws IOException {
    TarArchiveEntry entry;//  w w w.  jav  a2s  .  c  o m

    while ((entry = (TarArchiveEntry) archStrim.getNextEntry()) != null) {
        if (entry.isDirectory()) {
            String dest = entry.getName();
            File destFolder = new File(destination, dest);
            if (!destFolder.exists()) {
                destFolder.mkdirs();
            }
        } else {
            int count;
            byte[] data = new byte[2048];
            File d = new File(destination, entry.getName());

            if (!d.getParentFile().exists()) {
                d.getParentFile().mkdirs();
            }

            if (entry.isSymbolicLink()) {
                String link = entry.getLinkName();

                String entryName = entry.getName();
                int parentIdx = entryName.lastIndexOf("/");

                String newLink = entryName.substring(0, parentIdx) + "/" + link;
                File destFile = new File(destination, newLink);
                File linkFile = new File(destination, entryName);

                Files.createSymbolicLink(Paths.get(linkFile.getPath()), Paths.get(destFile.getPath()));

            } else {
                FileOutputStream fos = new FileOutputStream(d);
                BufferedOutputStream dest = new BufferedOutputStream(fos, 2048);
                while ((count = archStrim.read(data, 0, 2048)) != -1) {
                    dest.write(data, 0, count);
                }
                dest.close();
            }
        }
    }
}

From source file:org.openbaton.marketplace.core.VNFPackageManagement.java

public VNFPackageMetadata add(String fileName, byte[] pack, boolean imageLink)
        throws IOException, VimException, NotFoundException, SQLException, PluginException,
        AlreadyExistingException, PackageIntegrityException, FailedToUploadException {
    try {/* w  ww . ja  v a 2  s.  co  m*/
        updateVims();
    } catch (InterruptedException e) {
        e.printStackTrace();
    } catch (ClassNotFoundException e) {
        e.printStackTrace();
    } catch (SDKException e) {
        e.printStackTrace();
    }

    VNFPackage vnfPackage = new VNFPackage();
    vnfPackage.setScripts(new HashSet<Script>());
    Map<String, Object> metadata = null;
    VirtualNetworkFunctionDescriptor virtualNetworkFunctionDescriptor = null;
    byte[] imageFile = null;
    NFVImage image = new NFVImage();
    ImageMetadata imageMetadata = new ImageMetadata();

    InputStream tarStream;
    ArchiveInputStream myTarFile;
    try {
        tarStream = new ByteArrayInputStream(pack);
        myTarFile = new ArchiveStreamFactory().createArchiveInputStream("tar", tarStream);
    } catch (ArchiveException e) {
        e.printStackTrace();
        throw new IOException();
    }
    TarArchiveEntry entry;
    Map<String, Object> imageDetails = new HashMap<>();
    while ((entry = (TarArchiveEntry) myTarFile.getNextEntry()) != null) {
        /* Get the name of the file */
        if (entry.isFile() && !entry.getName().startsWith("./._")) {
            log.debug("file inside tar: " + entry.getName());
            byte[] content = new byte[(int) entry.getSize()];
            myTarFile.read(content, 0, content.length);
            if (entry.getName().equals("Metadata.yaml")) {
                YamlJsonParser yaml = new YamlJsonParser();
                log.info(new String(content));
                metadata = yaml.parseMap(new String(content));
                //Get configuration for NFVImage
                String[] REQUIRED_PACKAGE_KEYS = new String[] { "name", "description", "provider", "image",
                        "shared" };
                for (String requiredKey : REQUIRED_PACKAGE_KEYS) {
                    if (!metadata.containsKey(requiredKey)) {
                        throw new PackageIntegrityException(
                                "Not found " + requiredKey + " of VNFPackage in Metadata.yaml");
                    }
                    if (metadata.get(requiredKey) == null) {
                        throw new PackageIntegrityException(
                                "Not defined " + requiredKey + " of VNFPackage in Metadata.yaml");
                    }
                }
                vnfPackage.setName((String) metadata.get("name"));

                if (vnfPackageMetadataRepository
                        .findByNameAndUsername(vnfPackage.getName(), userManagement.getCurrentUser())
                        .size() != 0) {
                    throw new AlreadyExistingException("Package with name " + vnfPackage.getName()
                            + " already exists, please " + "change the name");
                }

                if (metadata.containsKey("scripts-link")) {
                    vnfPackage.setScriptsLink((String) metadata.get("scripts-link"));
                }
                if (metadata.containsKey("image")) {
                    imageDetails = (Map<String, Object>) metadata.get("image");
                    String[] REQUIRED_IMAGE_DETAILS = new String[] { "upload" };
                    log.debug("image: " + imageDetails);
                    for (String requiredKey : REQUIRED_IMAGE_DETAILS) {
                        if (!imageDetails.containsKey(requiredKey)) {
                            throw new PackageIntegrityException(
                                    "Not found key: " + requiredKey + " of image in Metadata.yaml");
                        }
                        if (imageDetails.get(requiredKey) == null) {
                            throw new PackageIntegrityException(
                                    "Not defined value of key: " + requiredKey + " of image in Metadata.yaml");
                        }
                    }
                    imageMetadata.setUsername(userManagement.getCurrentUser());
                    imageMetadata.setUpload((String) imageDetails.get("option"));
                    if (imageDetails.containsKey("ids")) {
                        imageMetadata.setIds((List<String>) imageDetails.get("ids"));
                    } else {
                        imageMetadata.setIds(new ArrayList<String>());
                    }
                    if (imageDetails.containsKey("names")) {
                        imageMetadata.setNames((List<String>) imageDetails.get("names"));
                    } else {
                        imageMetadata.setNames(new ArrayList<String>());
                    }
                    if (imageDetails.containsKey("link")) {
                        imageMetadata.setLink((String) imageDetails.get("link"));
                    } else {
                        imageMetadata.setLink(null);
                    }

                    //If upload==true -> create a new Image
                    if (imageDetails.get("upload").equals("true")
                            || imageDetails.get("upload").equals("check")) {
                        vnfPackage.setImageLink((String) imageDetails.get("link"));
                        if (metadata.containsKey("image-config")) {
                            log.debug("image-config: " + metadata.get("image-config"));
                            Map<String, Object> imageConfig = (Map<String, Object>) metadata
                                    .get("image-config");
                            //Check if all required keys are available
                            String[] REQUIRED_IMAGE_CONFIG = new String[] { "name", "diskFormat",
                                    "containerFormat", "minCPU", "minDisk", "minRam", "isPublic" };
                            for (String requiredKey : REQUIRED_IMAGE_CONFIG) {
                                if (!imageConfig.containsKey(requiredKey)) {
                                    throw new PackageIntegrityException("Not found key: " + requiredKey
                                            + " of image-config in Metadata.yaml");
                                }
                                if (imageConfig.get(requiredKey) == null) {
                                    throw new PackageIntegrityException("Not defined value of key: "
                                            + requiredKey + " of image-config in Metadata.yaml");
                                }
                            }
                            image.setName((String) imageConfig.get("name"));
                            image.setDiskFormat(((String) imageConfig.get("diskFormat")).toUpperCase());
                            image.setContainerFormat(
                                    ((String) imageConfig.get("containerFormat")).toUpperCase());
                            image.setMinCPU(Integer.toString((Integer) imageConfig.get("minCPU")));
                            image.setMinDiskSpace((Integer) imageConfig.get("minDisk"));
                            image.setMinRam((Integer) imageConfig.get("minRam"));
                            image.setIsPublic(Boolean
                                    .parseBoolean(Integer.toString((Integer) imageConfig.get("minRam"))));
                        } else {
                            throw new PackageIntegrityException(
                                    "The image-config is not defined. Please define it to upload a new image");
                        }
                    }
                } else {
                    throw new PackageIntegrityException(
                            "The image details are not defined. Please define it to use the right image");
                }
            } else if (!entry.getName().startsWith("scripts/") && entry.getName().endsWith(".json")) {
                //this must be the vnfd
                //and has to be onboarded in the catalogue
                String json = new String(content);
                log.trace("Content of json is: " + json);
                try {
                    virtualNetworkFunctionDescriptor = mapper.fromJson(json,
                            VirtualNetworkFunctionDescriptor.class);
                    //remove the images
                    for (VirtualDeploymentUnit vdu : virtualNetworkFunctionDescriptor.getVdu()) {
                        vdu.setVm_image(new HashSet<String>());
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                }
                log.trace("Created VNFD: " + virtualNetworkFunctionDescriptor);
            } else if (entry.getName().endsWith(".img")) {
                //this must be the image
                //and has to be upladed to the RIGHT vim
                imageFile = content;
                log.debug("imageFile is: " + entry.getName());
                throw new VimException(
                        "Uploading an image file from the VNFPackage is not supported at this moment. Please use the image link"
                                + ".");
            } else if (entry.getName().startsWith("scripts/")) {
                Script script = new Script();
                script.setName(entry.getName().substring(8));
                script.setPayload(content);
                vnfPackage.getScripts().add(script);
            }
        }
    }
    if (metadata == null) {
        throw new PackageIntegrityException("Not found Metadata.yaml");
    }
    if (vnfPackage.getScriptsLink() != null) {
        if (vnfPackage.getScripts().size() > 0) {
            log.debug(
                    "VNFPackageManagement: Remove scripts got by scripts/ because the scripts-link is defined");
            vnfPackage.setScripts(new HashSet<Script>());
        }
    }
    List<String> vimInstances = new ArrayList<>();
    if (imageDetails.get("upload").equals("check")) {
        if (!imageLink) {
            if (vnfPackage.getImageLink() == null && imageFile == null) {
                throw new PackageIntegrityException(
                        "VNFPackageManagement: For option upload=check you must define an image. Neither the image link is "
                                + "defined nor the image file is available. Please define at least one if you want to upload a new image");
            }
        }
    }

    if (imageDetails.get("upload").equals("true")) {
        log.debug("VNFPackageManagement: Uploading a new Image");
        if (vnfPackage.getImageLink() == null && imageFile == null) {
            throw new PackageIntegrityException(
                    "VNFPackageManagement: Neither the image link is defined nor the image file is available. Please define "
                            + "at least one if you want to upload a new image");
        }
    } else {
        if (!imageDetails.containsKey("ids") && !imageDetails.containsKey("names")) {
            throw new PackageIntegrityException(
                    "VNFPackageManagement: Upload option 'false' or 'check' requires at least a list of ids or names to find "
                            + "the right image.");
        }
    }
    vnfPackage.setImage(image);
    myTarFile.close();
    vnfPackage = vnfPackageRepository.save(vnfPackage);
    virtualNetworkFunctionDescriptor.setVnfPackageLocation(vnfPackage.getId());

    VNFPackageMetadata vnfPackageMetadata = new VNFPackageMetadata();
    vnfPackageMetadata.setName(vnfPackage.getName());
    vnfPackageMetadata.setUsername(userManagement.getCurrentUser());
    vnfPackageMetadata.setVnfd(virtualNetworkFunctionDescriptor);
    vnfPackageMetadata.setVnfPackage(vnfPackage);
    vnfPackageMetadata.setNfvImage(image);
    vnfPackageMetadata.setImageMetadata(imageMetadata);
    vnfPackageMetadata.setVnfPackageFileName(fileName);
    vnfPackageMetadata.setVnfPackageFile(pack);
    String description = (String) metadata.get("description");
    if (description.length() > 100) {
        description = description.substring(0, 100);
    }
    vnfPackageMetadata.setDescription(description);
    vnfPackageMetadata.setProvider((String) metadata.get("provider"));
    vnfPackageMetadata.setRequirements((Map) metadata.get("requirements"));
    vnfPackageMetadata.setShared((boolean) metadata.get("shared"));
    vnfPackageMetadata.setMd5sum(DigestUtils.md5DigestAsHex(pack));
    try {
        this.dispatch(vnfPackageMetadata);
    } catch (FailedToUploadException e) {
        vnfPackageRepository.delete(vnfPackage.getId());
        throw e;
    }
    vnfPackageMetadataRepository.save(vnfPackageMetadata);

    //        vnfdRepository.save(virtualNetworkFunctionDescriptor);
    log.debug("Persisted " + vnfPackageMetadata);
    //        log.trace("Onboarded VNFPackage (" + virtualNetworkFunctionDescriptor.getVnfPackageLocation() + ")
    // successfully");

    return vnfPackageMetadata;
}