Example usage for org.apache.commons.compress.archivers.tar TarArchiveEntry getSize

List of usage examples for org.apache.commons.compress.archivers.tar TarArchiveEntry getSize

Introduction

In this page you can find the example usage for org.apache.commons.compress.archivers.tar TarArchiveEntry getSize.

Prototype

public long getSize() 

Source Link

Document

Get this entry's file size.

Usage

From source file:org.openbaton.marketplace.core.VNFPackageManagement.java

public VNFPackageMetadata add(String fileName, byte[] pack, boolean imageLink)
        throws IOException, VimException, NotFoundException, SQLException, PluginException,
        AlreadyExistingException, PackageIntegrityException, FailedToUploadException {
    try {/*from   www .j a  v  a2  s . c  o m*/
        updateVims();
    } catch (InterruptedException e) {
        e.printStackTrace();
    } catch (ClassNotFoundException e) {
        e.printStackTrace();
    } catch (SDKException e) {
        e.printStackTrace();
    }

    VNFPackage vnfPackage = new VNFPackage();
    vnfPackage.setScripts(new HashSet<Script>());
    Map<String, Object> metadata = null;
    VirtualNetworkFunctionDescriptor virtualNetworkFunctionDescriptor = null;
    byte[] imageFile = null;
    NFVImage image = new NFVImage();
    ImageMetadata imageMetadata = new ImageMetadata();

    InputStream tarStream;
    ArchiveInputStream myTarFile;
    try {
        tarStream = new ByteArrayInputStream(pack);
        myTarFile = new ArchiveStreamFactory().createArchiveInputStream("tar", tarStream);
    } catch (ArchiveException e) {
        e.printStackTrace();
        throw new IOException();
    }
    TarArchiveEntry entry;
    Map<String, Object> imageDetails = new HashMap<>();
    while ((entry = (TarArchiveEntry) myTarFile.getNextEntry()) != null) {
        /* Get the name of the file */
        if (entry.isFile() && !entry.getName().startsWith("./._")) {
            log.debug("file inside tar: " + entry.getName());
            byte[] content = new byte[(int) entry.getSize()];
            myTarFile.read(content, 0, content.length);
            if (entry.getName().equals("Metadata.yaml")) {
                YamlJsonParser yaml = new YamlJsonParser();
                log.info(new String(content));
                metadata = yaml.parseMap(new String(content));
                //Get configuration for NFVImage
                String[] REQUIRED_PACKAGE_KEYS = new String[] { "name", "description", "provider", "image",
                        "shared" };
                for (String requiredKey : REQUIRED_PACKAGE_KEYS) {
                    if (!metadata.containsKey(requiredKey)) {
                        throw new PackageIntegrityException(
                                "Not found " + requiredKey + " of VNFPackage in Metadata.yaml");
                    }
                    if (metadata.get(requiredKey) == null) {
                        throw new PackageIntegrityException(
                                "Not defined " + requiredKey + " of VNFPackage in Metadata.yaml");
                    }
                }
                vnfPackage.setName((String) metadata.get("name"));

                if (vnfPackageMetadataRepository
                        .findByNameAndUsername(vnfPackage.getName(), userManagement.getCurrentUser())
                        .size() != 0) {
                    throw new AlreadyExistingException("Package with name " + vnfPackage.getName()
                            + " already exists, please " + "change the name");
                }

                if (metadata.containsKey("scripts-link")) {
                    vnfPackage.setScriptsLink((String) metadata.get("scripts-link"));
                }
                if (metadata.containsKey("image")) {
                    imageDetails = (Map<String, Object>) metadata.get("image");
                    String[] REQUIRED_IMAGE_DETAILS = new String[] { "upload" };
                    log.debug("image: " + imageDetails);
                    for (String requiredKey : REQUIRED_IMAGE_DETAILS) {
                        if (!imageDetails.containsKey(requiredKey)) {
                            throw new PackageIntegrityException(
                                    "Not found key: " + requiredKey + " of image in Metadata.yaml");
                        }
                        if (imageDetails.get(requiredKey) == null) {
                            throw new PackageIntegrityException(
                                    "Not defined value of key: " + requiredKey + " of image in Metadata.yaml");
                        }
                    }
                    imageMetadata.setUsername(userManagement.getCurrentUser());
                    imageMetadata.setUpload((String) imageDetails.get("option"));
                    if (imageDetails.containsKey("ids")) {
                        imageMetadata.setIds((List<String>) imageDetails.get("ids"));
                    } else {
                        imageMetadata.setIds(new ArrayList<String>());
                    }
                    if (imageDetails.containsKey("names")) {
                        imageMetadata.setNames((List<String>) imageDetails.get("names"));
                    } else {
                        imageMetadata.setNames(new ArrayList<String>());
                    }
                    if (imageDetails.containsKey("link")) {
                        imageMetadata.setLink((String) imageDetails.get("link"));
                    } else {
                        imageMetadata.setLink(null);
                    }

                    //If upload==true -> create a new Image
                    if (imageDetails.get("upload").equals("true")
                            || imageDetails.get("upload").equals("check")) {
                        vnfPackage.setImageLink((String) imageDetails.get("link"));
                        if (metadata.containsKey("image-config")) {
                            log.debug("image-config: " + metadata.get("image-config"));
                            Map<String, Object> imageConfig = (Map<String, Object>) metadata
                                    .get("image-config");
                            //Check if all required keys are available
                            String[] REQUIRED_IMAGE_CONFIG = new String[] { "name", "diskFormat",
                                    "containerFormat", "minCPU", "minDisk", "minRam", "isPublic" };
                            for (String requiredKey : REQUIRED_IMAGE_CONFIG) {
                                if (!imageConfig.containsKey(requiredKey)) {
                                    throw new PackageIntegrityException("Not found key: " + requiredKey
                                            + " of image-config in Metadata.yaml");
                                }
                                if (imageConfig.get(requiredKey) == null) {
                                    throw new PackageIntegrityException("Not defined value of key: "
                                            + requiredKey + " of image-config in Metadata.yaml");
                                }
                            }
                            image.setName((String) imageConfig.get("name"));
                            image.setDiskFormat(((String) imageConfig.get("diskFormat")).toUpperCase());
                            image.setContainerFormat(
                                    ((String) imageConfig.get("containerFormat")).toUpperCase());
                            image.setMinCPU(Integer.toString((Integer) imageConfig.get("minCPU")));
                            image.setMinDiskSpace((Integer) imageConfig.get("minDisk"));
                            image.setMinRam((Integer) imageConfig.get("minRam"));
                            image.setIsPublic(Boolean
                                    .parseBoolean(Integer.toString((Integer) imageConfig.get("minRam"))));
                        } else {
                            throw new PackageIntegrityException(
                                    "The image-config is not defined. Please define it to upload a new image");
                        }
                    }
                } else {
                    throw new PackageIntegrityException(
                            "The image details are not defined. Please define it to use the right image");
                }
            } else if (!entry.getName().startsWith("scripts/") && entry.getName().endsWith(".json")) {
                //this must be the vnfd
                //and has to be onboarded in the catalogue
                String json = new String(content);
                log.trace("Content of json is: " + json);
                try {
                    virtualNetworkFunctionDescriptor = mapper.fromJson(json,
                            VirtualNetworkFunctionDescriptor.class);
                    //remove the images
                    for (VirtualDeploymentUnit vdu : virtualNetworkFunctionDescriptor.getVdu()) {
                        vdu.setVm_image(new HashSet<String>());
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                }
                log.trace("Created VNFD: " + virtualNetworkFunctionDescriptor);
            } else if (entry.getName().endsWith(".img")) {
                //this must be the image
                //and has to be upladed to the RIGHT vim
                imageFile = content;
                log.debug("imageFile is: " + entry.getName());
                throw new VimException(
                        "Uploading an image file from the VNFPackage is not supported at this moment. Please use the image link"
                                + ".");
            } else if (entry.getName().startsWith("scripts/")) {
                Script script = new Script();
                script.setName(entry.getName().substring(8));
                script.setPayload(content);
                vnfPackage.getScripts().add(script);
            }
        }
    }
    if (metadata == null) {
        throw new PackageIntegrityException("Not found Metadata.yaml");
    }
    if (vnfPackage.getScriptsLink() != null) {
        if (vnfPackage.getScripts().size() > 0) {
            log.debug(
                    "VNFPackageManagement: Remove scripts got by scripts/ because the scripts-link is defined");
            vnfPackage.setScripts(new HashSet<Script>());
        }
    }
    List<String> vimInstances = new ArrayList<>();
    if (imageDetails.get("upload").equals("check")) {
        if (!imageLink) {
            if (vnfPackage.getImageLink() == null && imageFile == null) {
                throw new PackageIntegrityException(
                        "VNFPackageManagement: For option upload=check you must define an image. Neither the image link is "
                                + "defined nor the image file is available. Please define at least one if you want to upload a new image");
            }
        }
    }

    if (imageDetails.get("upload").equals("true")) {
        log.debug("VNFPackageManagement: Uploading a new Image");
        if (vnfPackage.getImageLink() == null && imageFile == null) {
            throw new PackageIntegrityException(
                    "VNFPackageManagement: Neither the image link is defined nor the image file is available. Please define "
                            + "at least one if you want to upload a new image");
        }
    } else {
        if (!imageDetails.containsKey("ids") && !imageDetails.containsKey("names")) {
            throw new PackageIntegrityException(
                    "VNFPackageManagement: Upload option 'false' or 'check' requires at least a list of ids or names to find "
                            + "the right image.");
        }
    }
    vnfPackage.setImage(image);
    myTarFile.close();
    vnfPackage = vnfPackageRepository.save(vnfPackage);
    virtualNetworkFunctionDescriptor.setVnfPackageLocation(vnfPackage.getId());

    VNFPackageMetadata vnfPackageMetadata = new VNFPackageMetadata();
    vnfPackageMetadata.setName(vnfPackage.getName());
    vnfPackageMetadata.setUsername(userManagement.getCurrentUser());
    vnfPackageMetadata.setVnfd(virtualNetworkFunctionDescriptor);
    vnfPackageMetadata.setVnfPackage(vnfPackage);
    vnfPackageMetadata.setNfvImage(image);
    vnfPackageMetadata.setImageMetadata(imageMetadata);
    vnfPackageMetadata.setVnfPackageFileName(fileName);
    vnfPackageMetadata.setVnfPackageFile(pack);
    String description = (String) metadata.get("description");
    if (description.length() > 100) {
        description = description.substring(0, 100);
    }
    vnfPackageMetadata.setDescription(description);
    vnfPackageMetadata.setProvider((String) metadata.get("provider"));
    vnfPackageMetadata.setRequirements((Map) metadata.get("requirements"));
    vnfPackageMetadata.setShared((boolean) metadata.get("shared"));
    vnfPackageMetadata.setMd5sum(DigestUtils.md5DigestAsHex(pack));
    try {
        this.dispatch(vnfPackageMetadata);
    } catch (FailedToUploadException e) {
        vnfPackageRepository.delete(vnfPackage.getId());
        throw e;
    }
    vnfPackageMetadataRepository.save(vnfPackageMetadata);

    //        vnfdRepository.save(virtualNetworkFunctionDescriptor);
    log.debug("Persisted " + vnfPackageMetadata);
    //        log.trace("Onboarded VNFPackage (" + virtualNetworkFunctionDescriptor.getVnfPackageLocation() + ")
    // successfully");

    return vnfPackageMetadata;
}

From source file:org.openbaton.nfvo.core.api.VNFPackageManagement.java

@Override
public VirtualNetworkFunctionDescriptor onboard(byte[] pack, String projectId)
        throws IOException, VimException, NotFoundException, PluginException, IncompatibleVNFPackage,
        AlreadyExistingException, NetworkServiceIntegrityException {
    log.info("Onboarding VNF Package...");
    VNFPackage vnfPackage = new VNFPackage();
    vnfPackage.setScripts(new HashSet<Script>());
    Map<String, Object> metadata = null;
    VirtualNetworkFunctionDescriptor virtualNetworkFunctionDescriptor = null;
    byte[] imageFile = null;
    NFVImage image = new NFVImage();

    InputStream tarStream;/*from w w  w.jav a2s. c om*/
    ArchiveInputStream myTarFile;
    try {
        tarStream = new ByteArrayInputStream(pack);
        myTarFile = new ArchiveStreamFactory().createArchiveInputStream("tar", tarStream);
    } catch (ArchiveException e) {
        e.printStackTrace();
        throw new IOException();
    }
    TarArchiveEntry entry;
    Map<String, Object> imageDetails = new HashMap<>();
    while ((entry = (TarArchiveEntry) myTarFile.getNextEntry()) != null) {
        /* Get the name of the file */
        if (entry.isFile() && !entry.getName().startsWith("./._")) {
            log.debug("file inside tar: " + entry.getName());
            byte[] content = new byte[(int) entry.getSize()];
            myTarFile.read(content, 0, content.length);
            if (entry.getName().equals("Metadata.yaml")) {
                YamlJsonParser yaml = new YamlJsonParser();
                metadata = yaml.parseMap(new String(content));
                imageDetails = handleMetadata(metadata, vnfPackage, imageDetails, image);

            } else if (!entry.getName().startsWith("scripts/") && entry.getName().endsWith(".json")) {
                //this must be the vnfd
                //and has to be onboarded in the catalogue
                String json = new String(content);
                log.trace("Content of json is: " + json);
                try {
                    virtualNetworkFunctionDescriptor = mapper.fromJson(json,
                            VirtualNetworkFunctionDescriptor.class);
                } catch (Exception e) {
                    e.printStackTrace();
                }
                int i = 1;
                for (VirtualDeploymentUnit vdu : virtualNetworkFunctionDescriptor.getVdu()) {
                    if (vdu.getName() == null) {
                        vdu.setName(virtualNetworkFunctionDescriptor.getName() + "-" + i);
                        i++;
                    }
                }
                for (VirtualDeploymentUnit vdu : virtualNetworkFunctionDescriptor.getVdu()) {
                    log.debug("vdu name: " + vdu.getName());
                }
                log.debug("Created VNFD: " + virtualNetworkFunctionDescriptor.getName());
                log.trace("Created VNFD: " + virtualNetworkFunctionDescriptor);
                nsdUtils.fetchVimInstances(virtualNetworkFunctionDescriptor, projectId);
            } else if (entry.getName().endsWith(".img")) {
                //this must be the image
                //and has to be upladed to the RIGHT vim
                imageFile = content;
                log.debug("imageFile is: " + entry.getName());
                throw new VimException(
                        "Uploading an image file from the VNFPackage is not supported at this moment. Please use the image link"
                                + ".");
            } else if (entry.getName().startsWith("scripts/")) {
                Script script = new Script();
                script.setName(entry.getName().substring(8));
                script.setPayload(content);
                vnfPackage.getScripts().add(script);
            }
        }
    }

    handleImage(vnfPackage, imageFile, virtualNetworkFunctionDescriptor, metadata, image, imageDetails,
            projectId);

    vnfPackage.setImage(image);
    myTarFile.close();
    virtualNetworkFunctionDescriptor.setProjectId(projectId);
    vnfPackage.setProjectId(projectId);
    for (VirtualNetworkFunctionDescriptor vnfd : vnfdRepository.findByProjectId(projectId)) {
        if (vnfd.getVendor().equals(virtualNetworkFunctionDescriptor.getVendor())
                && vnfd.getName().equals(virtualNetworkFunctionDescriptor.getName())
                && vnfd.getVersion().equals(virtualNetworkFunctionDescriptor.getVersion())) {
            throw new AlreadyExistingException("A VNF with this vendor, name and version is already existing");
        }
    }

    nsdUtils.checkIntegrity(virtualNetworkFunctionDescriptor);

    vnfPackageRepository.save(vnfPackage);
    virtualNetworkFunctionDescriptor.setVnfPackageLocation(vnfPackage.getId());
    virtualNetworkFunctionDescriptor = vnfdRepository.save(virtualNetworkFunctionDescriptor);
    log.trace("Persisted " + virtualNetworkFunctionDescriptor);
    log.trace("Onboarded VNFPackage (" + virtualNetworkFunctionDescriptor.getVnfPackageLocation()
            + ") successfully");
    return virtualNetworkFunctionDescriptor;
}

From source file:org.openmrs.module.openconceptlab.client.OclClient.java

@SuppressWarnings("resource")
public OclResponse ungzipAndUntarResponse(InputStream response, Date date) throws IOException {
    GZIPInputStream gzipIn = new GZIPInputStream(response);
    TarArchiveInputStream tarIn = new TarArchiveInputStream(gzipIn);
    boolean foundEntry = false;
    try {// w w  w  .j  a v  a2s.  c om
        TarArchiveEntry entry = tarIn.getNextTarEntry();
        while (entry != null) {
            if (entry.getName().equals("export.json")) {
                foundEntry = true;
                return new OclResponse(tarIn, entry.getSize(), date);
            }
            entry = tarIn.getNextTarEntry();
        }

        tarIn.close();
    } finally {
        if (!foundEntry) {
            IOUtils.closeQuietly(tarIn);
        }
    }
    throw new IOException("Unsupported format of response. Expected tar.gz archive with export.json.");
}

From source file:org.queeg.hadoop.tar.TarExtractor.java

public void extract(ByteSource source) throws IOException {
    TarArchiveInputStream archiveInputStream = new TarArchiveInputStream(source.openStream());

    TarArchiveEntry entry;
    while ((entry = archiveInputStream.getNextTarEntry()) != null) {
        if (entry.isFile()) {
            BoundedInputStream entryInputStream = new BoundedInputStream(archiveInputStream, entry.getSize());
            ByteSink sink = new PathByteSink(conf, new Path(destination, entry.getName()));
            sink.writeFrom(entryInputStream);
        } else if (entry.isDirectory()) {
            ByteStreams.skipFully(archiveInputStream, entry.getSize());
            fs.mkdirs(new Path(destination, entry.getName()));
        }/* ww  w .  ja v  a2s  .co  m*/
    }

    archiveInputStream.close();
}

From source file:org.renjin.cran.ProjectBuilder.java

private void unpackSources(File sourceArchive) throws IOException {
    FileInputStream in = new FileInputStream(sourceArchive);
    GZIPInputStream gzipIn = new GZIPInputStream(in);
    TarArchiveInputStream tarIn = new TarArchiveInputStream(gzipIn);

    TarArchiveEntry entry;
    while ((entry = tarIn.getNextTarEntry()) != null) {
        if (entry.getName().endsWith(".Rd")) {

        } else if (entry.getName().startsWith(pkg + "/src/") && entry.getSize() != 0) {

        } else if (entry.getName().startsWith(pkg + "/R/") && entry.getSize() != 0) {
            extractTo(entry, tarIn, rSourcesDir);

        } else if (entry.getName().equals(pkg + "/DESCRIPTION")) {
            extractTo(entry, tarIn, baseDir);

        } else if (entry.getName().equals(pkg + "/NAMESPACE")) {
            extractTo(entry, tarIn, baseDir);

        } else if (entry.getName().startsWith(pkg + "/tests/") && entry.getSize() != 0) {
            extractTo(entry, tarIn, rTestsDir);

        } else if (entry.getName().startsWith(pkg + "/data/") && entry.getSize() != 0) {
            extractTo(entry, tarIn, resourcesDir);
            addDataset(entry);/* w  ww.  j a va2 s . c  o  m*/
        }
    }
}

From source file:org.rsna.ctp.stdstages.ArchiveImportService.java

private void expandTAR(File tar, File dir) {
    try {//from  ww  w .  j a v  a2 s .c  o m
        TarArchiveInputStream tais = new TarArchiveInputStream(new FileInputStream(tar));
        TarArchiveEntry tae;
        while ((tae = tais.getNextTarEntry()) != null) {
            if (!tae.isDirectory()) {
                FileOutputStream fos = new FileOutputStream(new File(dir, tae.getName()));
                byte[] buf = new byte[4096];
                long count = tae.getSize();
                while (count > 0) {
                    int n = tais.read(buf, 0, buf.length);
                    fos.write(buf, 0, n);
                    count -= n;
                }
                fos.flush();
                fos.close();
            }
        }
        tais.close();
    } catch (Exception ex) {
        logger.warn("Unable to expand: \"" + tar + "\"", ex);
    }
}

From source file:org.savantbuild.io.tar.TarBuilderTest.java

private static void assertTarFileEquals(Path tarFile, String entry, Path original) throws IOException {
    InputStream is = Files.newInputStream(tarFile);
    if (tarFile.toString().endsWith(".gz")) {
        is = new GZIPInputStream(is);
    }/*from www  .  j  a  va  2 s. c  o  m*/

    try (TarArchiveInputStream tis = new TarArchiveInputStream(is)) {
        TarArchiveEntry tarArchiveEntry = tis.getNextTarEntry();
        while (tarArchiveEntry != null && !tarArchiveEntry.getName().equals(entry)) {
            tarArchiveEntry = tis.getNextTarEntry();
        }

        if (tarArchiveEntry == null) {
            fail("Tar [" + tarFile + "] is missing entry [" + entry + "]");
        }

        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        byte[] buf = new byte[1024];
        int length;
        while ((length = tis.read(buf)) != -1) {
            baos.write(buf, 0, length);
        }

        assertEquals(Files.readAllBytes(original), baos.toByteArray());
        assertEquals(tarArchiveEntry.getSize(), Files.size(original));
        assertEquals(tarArchiveEntry.getUserName(), Files.getOwner(original).getName());
        assertEquals(tarArchiveEntry.getGroupName(),
                Files.readAttributes(original, PosixFileAttributes.class).group().getName());
    }
}

From source file:org.savantbuild.io.tar.TarTools.java

/**
 * Untars a TAR file. This also handles tar.gz files by checking the file extension. If the file extension ends in .gz
 * it will read the tarball through a GZIPInputStream.
 *
 * @param file     The TAR file.// w w  w.ja  va 2 s . c  o  m
 * @param to       The directory to untar to.
 * @param useGroup Determines if the group name in the archive is used.
 * @param useOwner Determines if the owner name in the archive is used.
 * @throws IOException If the untar fails.
 */
public static void untar(Path file, Path to, boolean useGroup, boolean useOwner) throws IOException {
    if (Files.notExists(to)) {
        Files.createDirectories(to);
    }

    InputStream is = Files.newInputStream(file);
    if (file.toString().endsWith(".gz")) {
        is = new GZIPInputStream(is);
    }

    try (TarArchiveInputStream tis = new TarArchiveInputStream(is)) {
        TarArchiveEntry entry;
        while ((entry = tis.getNextTarEntry()) != null) {
            Path entryPath = to.resolve(entry.getName());
            if (entry.isDirectory()) {
                // Skip directory entries that don't add any value
                if (entry.getMode() == 0 && entry.getGroupName() == null && entry.getUserName() == null) {
                    continue;
                }

                if (Files.notExists(entryPath)) {
                    Files.createDirectories(entryPath);
                }

                if (entry.getMode() != 0) {
                    Set<PosixFilePermission> permissions = FileTools.toPosixPermissions(entry.getMode());
                    Files.setPosixFilePermissions(entryPath, permissions);
                }

                if (useGroup && entry.getGroupName() != null && !entry.getGroupName().trim().isEmpty()) {
                    GroupPrincipal group = FileSystems.getDefault().getUserPrincipalLookupService()
                            .lookupPrincipalByGroupName(entry.getGroupName());
                    Files.getFileAttributeView(entryPath, PosixFileAttributeView.class).setGroup(group);
                }

                if (useOwner && entry.getUserName() != null && !entry.getUserName().trim().isEmpty()) {
                    UserPrincipal user = FileSystems.getDefault().getUserPrincipalLookupService()
                            .lookupPrincipalByName(entry.getUserName());
                    Files.getFileAttributeView(entryPath, PosixFileAttributeView.class).setOwner(user);
                }
            } else {
                if (Files.notExists(entryPath.getParent())) {
                    Files.createDirectories(entryPath.getParent());
                }

                if (Files.isRegularFile(entryPath)) {
                    if (Files.size(entryPath) == entry.getSize()) {
                        continue;
                    } else {
                        Files.delete(entryPath);
                    }
                }

                Files.createFile(entryPath);

                try (OutputStream os = Files.newOutputStream(entryPath)) {
                    byte[] ba = new byte[1024];
                    int read;
                    while ((read = tis.read(ba)) != -1) {
                        if (read > 0) {
                            os.write(ba, 0, read);
                        }
                    }
                }

                if (entry.getMode() != 0) {
                    Set<PosixFilePermission> permissions = FileTools.toPosixPermissions(entry.getMode());
                    Files.setPosixFilePermissions(entryPath, permissions);
                }

                if (useGroup && entry.getGroupName() != null && !entry.getGroupName().trim().isEmpty()) {
                    GroupPrincipal group = FileSystems.getDefault().getUserPrincipalLookupService()
                            .lookupPrincipalByGroupName(entry.getGroupName());
                    Files.getFileAttributeView(entryPath, PosixFileAttributeView.class).setGroup(group);
                }

                if (useOwner && entry.getUserName() != null && !entry.getUserName().trim().isEmpty()) {
                    UserPrincipal user = FileSystems.getDefault().getUserPrincipalLookupService()
                            .lookupPrincipalByName(entry.getUserName());
                    Files.getFileAttributeView(entryPath, PosixFileAttributeView.class).setOwner(user);
                }
            }
        }
    }
}

From source file:org.sead.sda.LandingPage.java

protected void doGet(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    if (request.getParameter("tag") != null || request.getRequestURI().contains("/sda/list")) {
        String tag = "";

        if (request.getParameter("tag") != null) {
            tag = request.getParameter("tag");
        } else {/*from  w  w w. ja va 2s  . c o m*/
            tag = request.getRequestURI().split("/sda/list=")[1];
        }

        // here we check whether the BagIt zip file for this RO exists in SDA
        SFTP sftp = new SFTP();
        String bagName = getBagNameFromId(tag);
        if (sftp.doesFileExist(Constants.sdaPath + bagName + "/" + bagName + ".zip")) {
            System.out.println("Bag Exists in SDA...");
            request.setAttribute("bagExists", "true");
        }
        sftp.disConnectSessionAndChannel();

        request.setAttribute("obTag", tag);
        request.setAttribute("landingPageUrl", Constants.landingPage);

        String keyList_cp = "@id|status|message|preferences";

        String keyList_ore = "keyword|contact|creator|publication date|title|abstract|license|is version of|similarto|title|describes|@context|aggregates|has part|identifier|label|size";
        //

        keyMapList = new HashMap<String, String>();

        Shimcalls shim = new Shimcalls();
        // Fix: accessing RO from c3pr here is wrong. we have to access the ore map in the
        // published package and read properties from that.
        JSONObject cp = shim.getResearchObject(tag);

        if (cp.isEmpty()) {
            RequestDispatcher dispatcher = request.getRequestDispatcher("/ro.jsp");
            request.setAttribute("roExists", "false");
            dispatcher.forward(request, response);
            return;
        }

        request.setAttribute("roExists", "true");
        SeadMon.addLog(MonConstants.Components.LANDING_PAGE, tag, MonConstants.EventType.ACCESS);

        keyMap(cp, keyList_cp);

        shim.getObjectID(cp, "@id");
        String oreUrl = shim.getID();
        JSONObject oreFile = shim.getResearchObjectORE(oreUrl);
        keyMap(oreFile, keyList_ore);

        JSONObject describes = (JSONObject) oreFile.get(keyMapList.get("describes"));
        Map<String, List<String>> roProperties = new HashMap<String, List<String>>();
        Map<String, String> downloadList = new HashMap<String, String>();
        Map<String, String> linkedHashMap = new LinkedHashMap<String, String>();
        Map<String, String> linkedHashMapTemp = new LinkedHashMap<String, String>();
        Map<String, String> newDownloadList = new LinkedHashMap<String, String>();

        // extract properties from ORE
        JSONArray status = (JSONArray) cp.get(keyMapList.get("Status".toLowerCase()));
        String doi = "No DOI Found"; // handle this as an exception
        String pubDate = null;
        for (Object st : status) {
            JSONObject jsonStatus = (JSONObject) st;
            String stage = (String) jsonStatus.get("stage");
            if ("Success".equals(stage)) {
                doi = (String) jsonStatus.get("message");
                pubDate = (String) jsonStatus.get("date");
            }
        }
        roProperties.put("DOI", Arrays.asList(doi));
        roProperties.put("Publication Date", Arrays.asList(pubDate));
        roProperties.put("Full Metadata",
                Arrays.asList(Constants.landingPage + "/metadata/" + tag + "/oremap"));
        addROProperty("Creator", describes, roProperties);
        //            addROProperty("Publication Date", describes, roProperties);
        addROProperty("Title", describes, roProperties);
        addROProperty("Abstract", describes, roProperties);
        addROProperty("Contact", describes, roProperties);
        addROProperty("Keyword", describes, roProperties);

        JSONObject preferences = (JSONObject) cp.get(keyMapList.get("Preferences".toLowerCase()));

        //addROProperty_License("License", preferences, cp, roProperties);
        addROProperty("License", preferences, roProperties);

        // check access rights
        if (isRORestricted(preferences)) {
            request.setAttribute("accessRestricted", "true");
            List<String> rights = new ArrayList<String>();
            rights.add("Restricted");
            roProperties.put("Access Rights", rights);
        }

        //Map<String, String> properties = new HashMap<String, String>();
        //String Label = properties.get("Label");

        // extract Live Data Links from ORE
        String liveCopy = null;
        if (describes.get(keyMapList.get("Is Version Of".toLowerCase())) != null) {
            String versionOf = describes.get(keyMapList.get("Is Version Of".toLowerCase())).toString();
            if (versionOf.startsWith("http")) {
                liveCopy = versionOf;
            } else if (describes.get(keyMapList.get("similarTo".toLowerCase())) != null) {
                String similar = describes.get(keyMapList.get("similarTo".toLowerCase())).toString();
                similar = similar.substring(0, similar.indexOf("/resteasy") + 1);
                liveCopy = similar + "#collection?uri=" + versionOf;
            }
        }
        if (liveCopy != null) {
            List<String> liveCopyList = new ArrayList<String>();
            if (shim.validUrl(liveCopy)) {
                liveCopyList.add(liveCopy);
            } else {
                liveCopyList.add("Not Available");
            }
            roProperties.put("Live Data Links", liveCopyList);
        }

        // set properties as an attribute
        request.setAttribute("roProperties", roProperties);

        // String title = describes.get(keyMapList.get("Title".toLowerCase())).toString();

        // extract file names from tar archive in SDA
        String requestURI = request.getRequestURI();

        if (requestURI.contains("/sda/list")) {
            int c = 0;
            String[] requestURIsda = requestURI.split("/");
            for (String item : requestURIsda) {
                if (item.equals("sda")) {
                    c++;
                }
            }
            if (c % 2 != 0) {

                //extract RO hierarchy
                try {
                    NewOREmap oreMap = new NewOREmap(oreFile, keyMapList);
                    downloadList = oreMap.getHierarchy();

                    Set<String> nameList = downloadList.keySet();

                    for (String name : nameList) {
                        String[] name_split = name.split("/");
                        String size = null;
                        if (downloadList.get(name) != null) {
                            int bytes = Integer.parseInt(downloadList.get(name));

                            int kb = bytes / 1024;
                            int mb = kb / 1024;
                            int gb = mb / 1024;
                            if (bytes <= 1024) {
                                size = bytes + " Bytes";
                            } else if (kb <= 1024) {
                                size = kb + " KB";
                            } else if (mb <= 1024) {
                                size = mb + " MB";
                            } else {
                                size = gb + " GB";
                            }
                        }

                        String temp = null;
                        if (name_split.length <= 2 && size != null) {

                            temp = "<span style='padding-left:" + 30 * (name_split.length - 2) + "px'>"
                                    + name_split[name_split.length - 1] + "</span>";
                            linkedHashMap.put(name, temp);
                        } else {

                            temp = "<span style='padding-left:" + 30 * (name_split.length - 2) + "px'>" + "|__"
                                    + name_split[name_split.length - 1] + "</span>";
                            linkedHashMapTemp.put(name, temp);
                        }

                        newDownloadList.put(name, size);

                    }

                    for (String key : linkedHashMapTemp.keySet()) {
                        linkedHashMap.put(key, linkedHashMapTemp.get(key));
                    }
                } catch (Exception e) {
                    System.err.println("Landing Page OREmap error: inaccurate keys");
                }

            }

            // set download list as an attribute
            // set linkedHashMap as an attribute
        }
        request.setAttribute("downloadList", newDownloadList);
        request.setAttribute("linkedHashMap", linkedHashMap);

        // forward the user to get_id UI
        RequestDispatcher dispatcher = request.getRequestDispatcher("/ro.jsp");
        dispatcher.forward(request, response);

    } else if (!request.getRequestURI().contains("bootstrap")) {

        // collection title is the last part of the request URI
        String requestURI = request.getRequestURI();
        String newURL = requestURI.substring(requestURI.lastIndexOf("sda/") + 4);
        String title = null;
        String filename = null;

        if (!newURL.contains("/")) {
            title = newURL;
        } else {
            title = newURL.split("/")[0];
            filename = newURL.substring(newURL.indexOf("/") + 1);
        }
        title = URLDecoder.decode(title, "UTF-8");
        newURL = URLDecoder.decode(newURL, "UTF-8");

        // don't allow downloads for restricted ROs
        // Fix: use ORE from package
        Shimcalls shim = new Shimcalls();
        JSONObject ro = shim.getResearchObject(title);

        String keyList_cp = "@id|status|message|preferences";
        keyMapList = new HashMap<String, String>();
        keyMap(ro, keyList_cp);

        if (isRORestricted((JSONObject) ro.get(keyMapList.get("Preferences".toLowerCase())))) {
            return;
        }

        SFTP sftp = new SFTP();
        String bgName = getBagNameFromId(title);
        String target = Constants.sdaPath + bgName + "/" + bgName + ".zip";
        if (!sftp.doesFileExist(target)) {
            target = Constants.sdaPath + title + "/" + title + ".tar";
        }

        System.out.println("title " + title);
        System.out.println("filename " + filename);

        if (!title.equals("*")) {
            InputStream inStream = sftp.downloadFile(target);

            String mimeType = "application/octet-stream";
            response.setContentType(mimeType);

            String headerKey = "Content-Disposition";

            String headerValue = null;
            if (filename != null) {
                if (filename.contains("/")) {
                    filename = filename.substring(filename.lastIndexOf("/") + 1);
                }
                headerValue = String.format("attachment; filename=\"%s\"", filename);
            } else {
                headerValue = String.format("attachment; filename=\"%s\"",
                        target.substring(target.lastIndexOf("/") + 1));
            }
            response.setHeader(headerKey, headerValue);

            OutputStream outStream = response.getOutputStream();
            if (newURL.equals(title)) {
                //download tar file
                SeadMon.addLog(MonConstants.Components.LANDING_PAGE, title, MonConstants.EventType.DOWNLOAD);
                System.out.println("SDA download path: " + target);
                byte[] buffer = new byte[4096];
                int bytesRead;

                while ((bytesRead = inStream.read(buffer)) != -1) {
                    outStream.write(buffer, 0, bytesRead);
                }
            } else {
                //download individual files
                if (target.contains(".tar")) {
                    System.out.println("SDA download path: " + Constants.sdaPath + newURL);
                    TarArchiveInputStream myTarFile = new TarArchiveInputStream(inStream);

                    TarArchiveEntry entry = null;
                    String individualFiles;
                    int offset;

                    while ((entry = myTarFile.getNextTarEntry()) != null) {
                        individualFiles = entry.getName();

                        if (individualFiles.equals(newURL)) {
                            byte[] content = new byte[(int) entry.getSize()];
                            offset = 0;
                            myTarFile.read(content, offset, content.length - offset);
                            outStream.write(content);
                        }
                    }
                    myTarFile.close();
                } else {
                    System.out.println("SDA download path: " + Constants.sdaPath + bgName + "/" + bgName
                            + ".zip/" + bgName + "/" + newURL.substring(newURL.indexOf("/") + 1));
                    BufferedInputStream bin = new BufferedInputStream(inStream);
                    ZipInputStream myZipFile = new ZipInputStream(bin);

                    ZipEntry ze = null;
                    while ((ze = myZipFile.getNextEntry()) != null) {
                        if (ze.getName().equals(bgName + "/" + newURL.substring(newURL.indexOf("/") + 1))) {
                            byte[] buffer = new byte[4096];
                            int len;
                            while ((len = myZipFile.read(buffer)) != -1) {
                                outStream.write(buffer, 0, len);
                            }
                            break;
                        }
                    }
                }
            }
            inStream.close();
            outStream.close();
        }

        sftp.disConnectSessionAndChannel();
    }

}

From source file:org.vafer.jdeb.DataBuilder.java

/**
 * Build the data archive of the deb from the provided DataProducers
 *
 * @param producers//from w  ww.ja va2s . c  o  m
 * @param output
 * @param checksums
 * @param compression the compression method used for the data file
 * @return
 * @throws java.security.NoSuchAlgorithmException
 * @throws java.io.IOException
 * @throws org.apache.commons.compress.compressors.CompressorException
 */
BigInteger buildData(Collection<DataProducer> producers, File output, final StringBuilder checksums,
        Compression compression) throws NoSuchAlgorithmException, IOException, CompressorException {

    final File dir = output.getParentFile();
    if (dir != null && (!dir.exists() || !dir.isDirectory())) {
        throw new IOException("Cannot write data file at '" + output.getAbsolutePath() + "'");
    }

    final TarArchiveOutputStream tarOutputStream = new TarArchiveOutputStream(
            compression.toCompressedOutputStream(new FileOutputStream(output)));
    tarOutputStream.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);

    final MessageDigest digest = MessageDigest.getInstance("MD5");

    final Total dataSize = new Total();

    final List<String> addedDirectories = new ArrayList<String>();
    final DataConsumer receiver = new DataConsumer() {
        public void onEachDir(String dirname, String linkname, String user, int uid, String group, int gid,
                int mode, long size) throws IOException {
            dirname = fixPath(dirname);

            createParentDirectories(dirname, user, uid, group, gid);

            // The directory passed in explicitly by the caller also gets the passed-in mode.  (Unlike
            // the parent directories for now.  See related comments at "int mode =" in
            // createParentDirectories, including about a possible bug.)
            createDirectory(dirname, user, uid, group, gid, mode, 0);

            console.info("dir: " + dirname);
        }

        public void onEachFile(InputStream inputStream, String filename, String linkname, String user, int uid,
                String group, int gid, int mode, long size) throws IOException {
            filename = fixPath(filename);

            createParentDirectories(filename, user, uid, group, gid);

            final TarArchiveEntry entry = new TarArchiveEntry(filename, true);

            entry.setUserName(user);
            entry.setUserId(uid);
            entry.setGroupName(group);
            entry.setGroupId(gid);
            entry.setMode(mode);
            entry.setSize(size);

            tarOutputStream.putArchiveEntry(entry);

            dataSize.add(size);
            digest.reset();

            Utils.copy(inputStream, new DigestOutputStream(tarOutputStream, digest));

            final String md5 = Utils.toHex(digest.digest());

            tarOutputStream.closeArchiveEntry();

            console.info("file:" + entry.getName() + " size:" + entry.getSize() + " mode:" + entry.getMode()
                    + " linkname:" + entry.getLinkName() + " username:" + entry.getUserName() + " userid:"
                    + entry.getUserId() + " groupname:" + entry.getGroupName() + " groupid:"
                    + entry.getGroupId() + " modtime:" + entry.getModTime() + " md5: " + md5);

            // append to file md5 list
            checksums.append(md5).append(" ").append(entry.getName()).append('\n');
        }

        public void onEachLink(String path, String linkName, boolean symlink, String user, int uid,
                String group, int gid, int mode) throws IOException {
            path = fixPath(path);

            createParentDirectories(path, user, uid, group, gid);

            final TarArchiveEntry entry = new TarArchiveEntry(path,
                    symlink ? TarArchiveEntry.LF_SYMLINK : TarArchiveEntry.LF_LINK);
            entry.setLinkName(linkName);

            entry.setUserName(user);
            entry.setUserId(uid);
            entry.setGroupName(group);
            entry.setGroupId(gid);
            entry.setMode(mode);

            tarOutputStream.putArchiveEntry(entry);
            tarOutputStream.closeArchiveEntry();

            console.info("link:" + entry.getName() + " mode:" + entry.getMode() + " linkname:"
                    + entry.getLinkName() + " username:" + entry.getUserName() + " userid:" + entry.getUserId()
                    + " groupname:" + entry.getGroupName() + " groupid:" + entry.getGroupId());
        }

        private void createDirectory(String directory, String user, int uid, String group, int gid, int mode,
                long size) throws IOException {
            // All dirs should end with "/" when created, or the test DebAndTaskTestCase.testTarFileSet() thinks its a file
            // and so thinks it has the wrong permission.
            // This consistency also helps when checking if a directory already exists in addedDirectories.

            if (!directory.endsWith("/")) {
                directory += "/";
            }

            if (!addedDirectories.contains(directory)) {
                TarArchiveEntry entry = new TarArchiveEntry(directory, true);
                entry.setUserName(user);
                entry.setUserId(uid);
                entry.setGroupName(group);
                entry.setGroupId(gid);
                entry.setMode(mode);
                entry.setSize(size);

                tarOutputStream.putArchiveEntry(entry);
                tarOutputStream.closeArchiveEntry();
                addedDirectories.add(directory); // so addedDirectories consistently have "/" for finding duplicates.
            }
        }

        private void createParentDirectories(String filename, String user, int uid, String group, int gid)
                throws IOException {
            String dirname = fixPath(new File(filename).getParent());

            // Debian packages must have parent directories created
            // before sub-directories or files can be installed.
            // For example, if an entry of ./usr/lib/foo/bar existed
            // in a .deb package, but the ./usr/lib/foo directory didn't
            // exist, the package installation would fail.  The .deb must
            // then have an entry for ./usr/lib/foo and then ./usr/lib/foo/bar

            if (dirname == null) {
                return;
            }

            // The loop below will create entries for all parent directories
            // to ensure that .deb packages will install correctly.
            String[] pathParts = dirname.split("/");
            String parentDir = "./";
            for (int i = 1; i < pathParts.length; i++) {
                parentDir += pathParts[i] + "/";
                // Make it so the dirs can be traversed by users.
                // We could instead try something more granular, like setting the directory
                // permission to 'rx' for each of the 3 user/group/other read permissions
                // found on the file being added (ie, only if "other" has read
                // permission on the main node, then add o+rx permission on all the containing
                // directories, same w/ user & group), and then also we'd have to
                // check the parentDirs collection of those already added to
                // see if those permissions need to be similarly updated.  (Note, it hasn't
                // been demonstrated, but there might be a bug if a user specifically
                // requests a directory with certain permissions,
                // that has already been auto-created because it was a parent, and if so, go set
                // the user-requested mode on that directory instead of this automatic one.)
                // But for now, keeping it simple by making every dir a+rx.   Examples are:
                // drw-r----- fs/fs   # what you get with setMode(mode)
                // drwxr-xr-x fs/fs   # Usable. Too loose?
                int mode = TarArchiveEntry.DEFAULT_DIR_MODE;

                createDirectory(parentDir, user, uid, group, gid, mode, 0);
            }
        }
    };

    try {
        for (DataProducer data : producers) {
            data.produce(receiver);
        }
    } finally {
        tarOutputStream.close();
    }

    console.info("Total size: " + dataSize);

    return dataSize.count;
}