Example usage for com.amazonaws.services.s3.model S3Object close

List of usage examples for com.amazonaws.services.s3.model S3Object close

Introduction

In this page you can find the example usage for com.amazonaws.services.s3.model S3Object close.

Prototype

@Override
public void close() throws IOException 

Source Link

Document

Releases any underlying system resources.

Usage

From source file:org.opendedup.sdfs.filestore.cloud.BatchAwsS3ChunkStore.java

License:Open Source License

@Override
public void downloadFile(String nm, File to, String pp) throws IOException {
    this.s3clientLock.readLock().lock();
    try {// w ww  . ja v  a 2  s .  c om
        while (nm.startsWith(File.separator))
            nm = nm.substring(1);
        String rnd = RandomGUID.getGuid();
        File p = new File(this.staged_sync_location, rnd);
        File z = new File(this.staged_sync_location, rnd + ".uz");
        File e = new File(this.staged_sync_location, rnd + ".de");
        while (z.exists()) {
            rnd = RandomGUID.getGuid();
            p = new File(this.staged_sync_location, rnd);
            z = new File(this.staged_sync_location, rnd + ".uz");
            e = new File(this.staged_sync_location, rnd + ".de");
        }
        if (nm.startsWith(File.separator))
            nm = nm.substring(1);
        String haName = EncyptUtils.encString(nm, Main.chunkStoreEncryptionEnabled);
        Map<String, String> mp = null;
        byte[] shash = null;
        try {
            if (this.simpleS3) {
                S3Object obj = null;
                SDFSLogger.getLog().debug("downloading " + pp + "/" + haName);
                obj = s3Service.getObject(this.name, pp + "/" + haName);
                BufferedInputStream in = new BufferedInputStream(obj.getObjectContent());
                BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(p));
                IOUtils.copy(in, out);
                out.flush();
                out.close();
                in.close();
                ObjectMetadata omd = s3Service.getObjectMetadata(name, pp + "/" + haName);
                mp = this.getUserMetaData(omd);
                SDFSLogger.getLog().debug("mp sz=" + mp.size());
                try {
                    if (obj != null)
                        obj.close();
                } catch (Exception e1) {
                }
            } else {
                SDFSLogger.getLog().debug("downloading " + pp + "/" + haName);
                this.multiPartDownload(pp + "/" + haName, p);
                ObjectMetadata omd = s3Service.getObjectMetadata(name, pp + "/" + haName);
                mp = this.getUserMetaData(omd);
                if (md5sum && mp.containsKey("md5sum")) {
                    shash = BaseEncoding.base64().decode(omd.getUserMetaDataOf("md5sum"));
                }
            }
            if (shash != null && !FileUtils.fileValid(p, shash))
                throw new IOException("file " + p.getPath() + " is corrupt");
            boolean encrypt = false;
            boolean lz4compress = false;
            if (mp.containsKey("encrypt")) {
                encrypt = Boolean.parseBoolean(mp.get("encrypt"));
            }
            if (mp.containsKey("lz4compress")) {
                lz4compress = Boolean.parseBoolean(mp.get("lz4compress"));
            }
            byte[] ivb = null;
            if (mp.containsKey("ivspec")) {
                ivb = BaseEncoding.base64().decode(mp.get("ivspec"));
            }
            SDFSLogger.getLog().debug("compress=" + lz4compress + " " + mp.get("lz4compress"));

            if (mp.containsKey("symlink")) {
                if (OSValidator.isWindows())
                    throw new IOException("unable to restore symlinks to windows");
                else {
                    String spth = EncyptUtils.decString(mp.get("symlink"), encrypt);
                    Path srcP = Paths.get(spth);
                    Path dstP = Paths.get(to.getPath());
                    Files.createSymbolicLink(dstP, srcP);
                }
            } else if (mp.containsKey("directory")) {
                to.mkdirs();
                FileUtils.setFileMetaData(to, mp, encrypt);
                p.delete();
            } else {
                if (encrypt) {
                    if (ivb != null) {
                        EncryptUtils.decryptFile(p, e, new IvParameterSpec(ivb));
                    } else {
                        EncryptUtils.decryptFile(p, e);
                    }
                    p.delete();
                    p = e;
                }
                if (lz4compress) {
                    CompressionUtils.decompressFile(p, z);
                    p.delete();
                    p = z;
                }

                File parent = to.getParentFile();
                if (!parent.exists())
                    parent.mkdirs();
                BufferedInputStream is = new BufferedInputStream(new FileInputStream(p));
                BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(to));
                IOUtils.copy(is, os);
                os.flush();
                os.close();
                is.close();
                FileUtils.setFileMetaData(to, mp, encrypt);
                SDFSLogger.getLog().debug("updated " + to + " sz=" + to.length());
            }

        } catch (Exception e1) {
            throw new IOException(e1);
        } finally {

            p.delete();
            z.delete();
            e.delete();
        }
    } finally {
        this.s3clientLock.readLock().unlock();
    }
}

From source file:org.opendedup.sdfs.filestore.cloud.BatchAwsS3ChunkStore.java

License:Open Source License

@Override
public Map<String, Long> getHashMap(long id) throws IOException {

    // SDFSLogger.getLog().info("downloading map for " + id);
    String haName = EncyptUtils.encHashArchiveName(id, Main.chunkStoreEncryptionEnabled);
    S3Object kobj = null;
    this.s3clientLock.readLock().lock();
    try {/*from   ww w  . ja v  a  2s  . c  o  m*/
        kobj = s3Service.getObject(this.name, "keys/" + haName);
        String[] ks = this.getStrings(kobj);
        HashMap<String, Long> m = new HashMap<String, Long>(ks.length + 1);
        for (String k : ks) {
            String[] kv = k.split(":");
            m.put(kv[0], Long.parseLong(kv[1]));
        }

        return m;
    } finally {
        this.s3clientLock.readLock().unlock();
        try {
            kobj.close();
        } catch (Exception e) {
        }
    }

}

From source file:org.plos.repo.service.S3StoreService.java

License:Open Source License

@Override
public boolean objectExists(RepoObject repoObject) {
    try {// w ww  . j  a va 2  s.c  om
        S3Object obj = s3Client.getObject(repoObject.getBucketName(), repoObject.getChecksum());

        if (obj == null) {
            return false;
        }

        obj.close();
        return true;
    } catch (Exception e) {
        return false;
    }
}

From source file:oulib.aws.s3.S3TiffMetadataProcessorThread.java

@Override
public void run() {

    String sourceBucketName = bookInfo.getBucketSourceName();
    String targetBucketName = bookInfo.getBucketTargetName();
    String bookName = bookInfo.getBookName();

    String bucketFolder = S3Util.S3_TIFF_METADATA_PROCESS_OUTPUT + File.separator + sourceBucketName;
    File bucketFolderFile = new File(bucketFolder);
    if (!bucketFolderFile.exists() || !bucketFolderFile.isDirectory()) {
        bucketFolderFile.mkdirs();/*from ww w  . ja va2  s  .  c o  m*/
    }

    String bookPath = bucketFolder + File.separator + bookName;
    File bookFile = new File(bookPath);
    if (!bookFile.exists()) {
        bookFile.mkdir();
    }

    try {

        // Every book has a folder in the target bucket:
        Map targetBucketKeyMap = S3Util.getBucketObjectKeyMap(targetBucketName, bookName, s3client);
        if (!S3Util.folderExitsts(bookName, targetBucketKeyMap)) {
            S3Util.createFolder(targetBucketName, bookName, s3client);
        }

        for (String key : tiffList) {
            if (key.contains(".tif") && matchS3ObjKeyWithFilter(key, filter)
                    && targetBucketKeyMap.containsKey(key)
                    && !targetBucketKeyMap.containsKey(key.split(".tif")[0] + "-copied.tif")) {
                S3Object objectSource = s3client.getObject(new GetObjectRequest(sourceBucketName, key));
                S3Object objectTarget = s3client.getObject(new GetObjectRequest(targetBucketName, key));
                output += ("Start to copy metadata from the source object " + sourceBucketName + "/" + key
                        + " to target object " + targetBucketName + "/" + key + "\n");
                System.out.println("Start to copy metadata from the source object " + sourceBucketName + "/"
                        + key + " to target object " + targetBucketName + "/" + key + "\n");
                S3Util.copyS3ObjectTiffMetadata(s3client, objectSource, objectTarget);
                System.out.println("Finished copy metadata for the object with key=" + key + "\n");
                output += "Finished copy metadata for the object with key=" + key + "\n";
                try {
                    objectSource.close();
                    objectTarget.close();
                } catch (IOException ex) {
                    Logger.getLogger(S3TiffProcessorThread.class.getName()).log(Level.SEVERE, null, ex);
                }
            }
        }
        System.out.println(Thread.currentThread().getName() + "'s job is done!");

    } catch (AmazonServiceException ase) {
        output += "Caught an AmazonServiceException, which means your request made it to Amazon S3, but was rejected with an error response for some reason.\n";
        output += "Error Message:    " + ase.getMessage();
        output += "HTTP Status Code: " + ase.getStatusCode();
        output += "AWS Error Code:   " + ase.getErrorCode();
        output += "Error Type:       " + ase.getErrorType();
        output += "Request ID:       " + ase.getRequestId();
        System.out.println(
                "Caught an AmazonServiceException, which means your request made it to Amazon S3, but was rejected with an error response for some reason.\n");
        System.out.println("Error Message:    " + ase.getMessage());
        System.out.println("HTTP Status Code: " + ase.getStatusCode());
        System.out.println("AWS Error Code:   " + ase.getErrorCode());
        System.out.println("Error Type:       " + ase.getErrorType());
        System.out.println("Request ID:       " + ase.getRequestId());
    } catch (AmazonClientException ace) {
        output += "Caught an AmazonClientException, which means the client encountered an internal error while trying to communicate with S3, \nsuch as not being able to access the network.\n";
        output += "Error Message: " + ace.getMessage();
        System.out.println(
                "Caught an AmazonClientException, which means the client encountered an internal error while trying to communicate with S3, \nsuch as not being able to access the network.\n");
        System.out.println("Error Message: " + ace.getMessage());
    } finally {
        outputToFile(bookPath + File.separator + filter + ".txt");
    }
}

From source file:oulib.aws.s3.S3TiffProcessorThread.java

@Override
public void run() {

    String sourceBucketName = bookInfo.getBucketSourceName();
    String targetBucketName = bookInfo.getBucketTargetName();
    String bookName = bookInfo.getBookName();

    String bucketFolder = S3Util.S3_SMALL_DERIVATIVE_OUTPUT + File.separator + sourceBucketName;
    File bucketFolderFile = new File(bucketFolder);
    if (!bucketFolderFile.exists() || !bucketFolderFile.isDirectory()) {
        bucketFolderFile.mkdirs();//w  ww.ja  va2 s.  c  om
    }

    String bookPath = bucketFolder + File.separator + bookName;
    File bookFile = new File(bookPath);
    if (!bookFile.exists()) {
        bookFile.mkdir();
    }

    try {

        // Every book has a folder in the target bucket:
        Map targetBucketKeyMap = S3Util.getBucketObjectKeyMap(targetBucketName, bookName, s3client);
        if (!S3Util.folderExitsts(bookName, targetBucketKeyMap)) {
            S3Util.createFolder(targetBucketName, bookName, s3client);
        }

        //            final ListObjectsV2Request req = new ListObjectsV2Request().withBucketName(sourceBucketName).withPrefix(bookName + "/data/");
        //            ListObjectsV2Result result;

        //            do {               
        //                result = s3client.listObjectsV2(req);

        for (String key : tiffList) {
            //                    String key = objectSummary.getKey();
            //                    if(key.contains(".tif") && key.contains(filter) && !targetBucketKeyMap.containsKey(key+".tif")){
            if (key.contains(".tif") && matchS3ObjKeyWithFilter(key, filter)
                    && !targetBucketKeyMap.containsKey(key + ".tif")) {
                S3Object object = s3client.getObject(new GetObjectRequest(sourceBucketName, key));
                output += ("Start to generate smaller tif image for the object " + key + "\n");
                System.out.println("Start to generate smaller tif image for the object " + key + "\n");
                //                        System.out.println("Working on the object "+key);
                S3Util.generateSmallTiffWithTargetSize(s3client, object, targetBucketName,
                        bookInfo.getCompressionSize());
                //                   S3Util.copyS3ObjectTiffMetadata(s3client, object, s3client.getObject(new GetObjectRequest(targetBucketName, key)), targetBucketName, key+".tif");
                System.out.println("Finished to generate smaller tif image for the object " + key + "\n");
                output += "Finished to generate smaller tif image for the object " + key + "\n";
                try {
                    object.close();
                } catch (IOException ex) {
                    Logger.getLogger(S3TiffProcessorThread.class.getName()).log(Level.SEVERE, null, ex);
                }
            }
        }
        //                output += "Next Continuation Token : " + result.getNextContinuationToken();
        System.out.println(Thread.currentThread().getName() + "'s job is done!");
        //                req.setContinuationToken(result.getNextContinuationToken());
        //            } while(result.isTruncated() == true ); 

    } catch (AmazonServiceException ase) {
        output += "Caught an AmazonServiceException, which means your request made it to Amazon S3, but was rejected with an error response for some reason.\n";
        output += "Error Message:    " + ase.getMessage();
        output += "HTTP Status Code: " + ase.getStatusCode();
        output += "AWS Error Code:   " + ase.getErrorCode();
        output += "Error Type:       " + ase.getErrorType();
        output += "Request ID:       " + ase.getRequestId();
        System.out.println(
                "Caught an AmazonServiceException, which means your request made it to Amazon S3, but was rejected with an error response for some reason.\n");
        System.out.println("Error Message:    " + ase.getMessage());
        System.out.println("HTTP Status Code: " + ase.getStatusCode());
        System.out.println("AWS Error Code:   " + ase.getErrorCode());
        System.out.println("Error Type:       " + ase.getErrorType());
        System.out.println("Request ID:       " + ase.getRequestId());
    } catch (AmazonClientException ace) {
        output += "Caught an AmazonClientException, which means the client encountered an internal error while trying to communicate with S3, \nsuch as not being able to access the network.\n";
        output += "Error Message: " + ace.getMessage();
        System.out.println(
                "Caught an AmazonClientException, which means the client encountered an internal error while trying to communicate with S3, \nsuch as not being able to access the network.\n");
        System.out.println("Error Message: " + ace.getMessage());
    } finally {
        outputToFile(bookPath + File.separator + filter + ".txt");
    }
}

From source file:raymond.mockftpserver.S3BucketFileSystem.java

License:Apache License

@Override
public FileSystemEntry getEntry(String path) {
    if (isFile(path)) {
        S3Object obj = s3.getObject(new GetObjectRequest(bucket, path));
        if (obj != null) {
            FileEntry file = new FileEntry(path);
            try {
                try {
                    IOUtils.copy(obj.getObjectContent(), file.createOutputStream(false));
                } finally {
                    obj.close();
                }//ww  w.  j  a  v a 2  s. co m
            } catch (IOException e) {
                e.printStackTrace();
            }
            return file;
        }
    }
    return null;
}