List of usage examples for com.amazonaws.services.s3.transfer Upload waitForCompletion
public void waitForCompletion() throws AmazonClientException, AmazonServiceException, InterruptedException;
From source file:org.springframework.integration.aws.s3.core.DefaultAmazonS3Operations.java
License:Apache License
/** * The implementation puts the given {@link File} instance to the provided bucket against * the given key./* w w w . j a va2 s . co m*/ * * @param bucketName The bucket on S3 where this object is to be put * @param key The key against which this Object is to be stored in S3 * @param file resource to be uploaded to S3 * @param objectACL the Object's Access controls for the object to be uploaded * @param userMetadata The user's metadata to be associated with the object uploaded * @param stringContentMD5 The MD5 sum of the contents of the file to be uploaded */ @Override public void doPut(String bucketName, String key, File file, AmazonS3ObjectACL objectACL, Map<String, String> userMetadata, String stringContentMD5) { ObjectMetadata metadata = new ObjectMetadata(); PutObjectRequest request = new PutObjectRequest(bucketName, key, file); request.withMetadata(metadata); if (stringContentMD5 != null) { metadata.setContentMD5(stringContentMD5); } if (userMetadata != null) { metadata.setUserMetadata(userMetadata); } Upload upload; try { upload = transferManager.upload(request); } catch (Exception e) { throw new AmazonS3OperationException(credentials.getAccessKey(), bucketName, key, "Encountered Exception while invoking upload on multipart/single thread file, " + "see nested exceptions for more details", e); } //Wait till the upload completes, the call to putObject is synchronous try { if (logger.isInfoEnabled()) { logger.info("Waiting for Upload to complete"); } upload.waitForCompletion(); if (logger.isInfoEnabled()) { logger.info("Upload completed"); } } catch (Exception e) { throw new AmazonS3OperationException(credentials.getAccessKey(), bucketName, key, "Encountered Exception while uploading the multipart/single thread file, " + "see nested exceptions for more details", e); } //Now since the object is present on S3, set the AccessControl list on it //Please note that it is not possible to set the object ACL with the //put object request, and hence both these operations cannot be atomic //it is possible the objects is uploaded and the ACl not set due to some //failure if (objectACL != null) { if (logger.isInfoEnabled()) { logger.info("Setting Access control list for key " + key); } try { client.setObjectAcl(bucketName, key, getAccessControlList(bucketName, key, objectACL)); } catch (Exception e) { throw new AmazonS3OperationException(credentials.getAccessKey(), bucketName, key, "Encountered Exception while setting the Object ACL for key , " + key + "see nested exceptions for more details", e); } if (logger.isDebugEnabled()) { logger.debug("Successfully set the object ACL"); } } }
From source file:org.xmlsh.aws.gradle.s3.AmazonS3ProgressiveFileUploadTask.java
License:BSD License
@TaskAction public void upload() throws InterruptedException { // to enable conventionMappings feature String bucketName = getBucketName(); String key = getKey();/*from www . j av a 2 s .c o m*/ File file = getFile(); if (bucketName == null) throw new GradleException("bucketName is not specified"); if (key == null) throw new GradleException("key is not specified"); if (file == null) throw new GradleException("file is not specified"); if (file.isFile() == false) throw new GradleException("file must be regular file"); AmazonS3PluginExtension ext = getProject().getExtensions().getByType(AmazonS3PluginExtension.class); AmazonS3 s3 = ext.getClient(); TransferManager s3mgr = new TransferManager(s3); getLogger().info("Uploading... s3://{}/{}", bucketName, key); Upload upload = s3mgr.upload( new PutObjectRequest(getBucketName(), getKey(), getFile()).withMetadata(getObjectMetadata())); upload.addProgressListener(new ProgressListener() { public void progressChanged(ProgressEvent event) { getLogger().info(" {}% uploaded", upload.getProgress().getPercentTransferred()); } }); upload.waitForCompletion(); setResourceUrl(((AmazonS3Client) s3).getResourceUrl(bucketName, key)); getLogger().info("Upload completed: {}", getResourceUrl()); }