List of usage examples for com.amazonaws.services.s3.transfer Upload waitForCompletion
public void waitForCompletion() throws AmazonClientException, AmazonServiceException, InterruptedException;
From source file:be.ugent.intec.halvade.uploader.AWSUploader.java
License:Open Source License
public void Upload(String key, InputStream input, long size) throws InterruptedException { ObjectMetadata meta = new ObjectMetadata(); if (SSE)/*w w w .ja va 2 s. co m*/ meta.setServerSideEncryption(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); meta.setContentLength(size); Upload upload = tm.upload(existingBucketName, key, input, meta); try { // Or you can block and wait for the upload to finish upload.waitForCompletion(); Logger.DEBUG("Upload complete."); } catch (AmazonClientException amazonClientException) { Logger.DEBUG("Unable to upload file, upload was aborted."); Logger.EXCEPTION(amazonClientException); } }
From source file:be.ugent.intec.halvade.uploader.CopyOfAWSUploader.java
License:Open Source License
public void Upload(String key, InputStream input, long size) throws InterruptedException { ObjectMetadata meta = new ObjectMetadata(); meta.setServerSideEncryption(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); meta.setContentLength(size);//from www . ja v a2s . c o m Upload upload = tm.upload(existingBucketName, key, input, meta); try { // Or you can block and wait for the upload to finish upload.waitForCompletion(); Logger.DEBUG("Upload complete."); } catch (AmazonClientException amazonClientException) { Logger.DEBUG("Unable to upload file, upload was aborted."); Logger.EXCEPTION(amazonClientException); } }
From source file:br.puc_rio.ele.lvc.interimage.core.datamanager.AWSSource.java
License:Apache License
public void put(String from, String to, Resource resource) { try {//from w ww .jav a 2s .c om File file = new File(from); PutObjectRequest putObjectRequest = new PutObjectRequest(_bucket, to, file); if (resource instanceof SplittableResource) { SplittableResource rsrc = (SplittableResource)resource; if (rsrc.getType() == SplittableResource.IMAGE) { putObjectRequest.withCannedAcl(CannedAccessControlList.PublicRead); // public for all } } else if (resource instanceof DefaultResource) { DefaultResource rsrc = (DefaultResource)resource; if (rsrc.getType() == DefaultResource.TILE) { putObjectRequest.withCannedAcl(CannedAccessControlList.PublicRead); // public for all } else if (rsrc.getType() == DefaultResource.FUZZY_SET) { putObjectRequest.withCannedAcl(CannedAccessControlList.PublicRead); // public for all } else if (rsrc.getType() == DefaultResource.SHAPE) { putObjectRequest.withCannedAcl(CannedAccessControlList.PublicRead); // public for all } } Upload upload = _manager.upload(putObjectRequest); upload.waitForCompletion(); System.out.println("AWSSource: Uploaded file - " + to); } catch (Exception e) { System.err.println("Source put failed: " + e.getMessage()); } }
From source file:br.puc_rio.ele.lvc.interimage.core.datamanager.AWSSource.java
License:Apache License
public void put(String from, String to, Resource resource) { try {//from w ww . j a v a 2s. c om File file = new File(from); PutObjectRequest putObjectRequest = new PutObjectRequest(_bucket, to, file); if (resource instanceof SplittableResource) { SplittableResource rsrc = (SplittableResource) resource; if (rsrc.getType() == SplittableResource.IMAGE) { putObjectRequest.withCannedAcl(CannedAccessControlList.PublicRead); // public for all } } else if (resource instanceof DefaultResource) { DefaultResource rsrc = (DefaultResource) resource; if (rsrc.getType() == DefaultResource.TILE) { putObjectRequest.withCannedAcl(CannedAccessControlList.PublicRead); // public for all } else if (rsrc.getType() == DefaultResource.FUZZY_SET) { putObjectRequest.withCannedAcl(CannedAccessControlList.PublicRead); // public for all } else if (rsrc.getType() == DefaultResource.SHAPE) { putObjectRequest.withCannedAcl(CannedAccessControlList.PublicRead); // public for all } } Upload upload = _manager.upload(putObjectRequest); upload.waitForCompletion(); System.out.println("AWSSource: Uploaded file - " + to); } catch (Exception e) { System.err.println("Source put failed: " + e.getMessage()); } }
From source file:cloudExplorer.Put.java
License:Open Source License
public void run() { try {/* w w w. j a va 2 s . com*/ AWSCredentials credentials = new BasicAWSCredentials(access_key, secret_key); AmazonS3 s3Client = new AmazonS3Client(credentials, new ClientConfiguration().withSignerOverride("S3SignerType")); s3Client.setEndpoint(endpoint); TransferManager tx = new TransferManager(s3Client); File file = new File(what); PutObjectRequest putRequest; if (!rrs) { putRequest = new PutObjectRequest(bucket, ObjectKey, file); } else { putRequest = new PutObjectRequest(bucket, ObjectKey, file) .withStorageClass(StorageClass.ReducedRedundancy); } MimetypesFileTypeMap mimeTypesMap = new MimetypesFileTypeMap(); String mimeType = mimeTypesMap.getContentType(file); mimeType = mimeTypesMap.getContentType(file); ObjectMetadata objectMetadata = new ObjectMetadata(); if (encrypt) { objectMetadata.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); } if ((ObjectKey.contains(".html")) || ObjectKey.contains(".txt")) { objectMetadata.setContentType("text/html"); } else { objectMetadata.setContentType(mimeType); } long t1 = System.currentTimeMillis(); putRequest.setMetadata(objectMetadata); Upload myUpload = tx.upload(putRequest); myUpload.waitForCompletion(); tx.shutdownNow(); long t2 = System.currentTimeMillis(); long diff = t2 - t1; if (!mainFrame.perf) { if (terminal) { System.out.print("\nUploaded object: " + ObjectKey + " in " + diff / 1000 + " second(s).\n"); } else { mainFrame.jTextArea1 .append("\nUploaded object: " + ObjectKey + " in " + diff / 1000 + " second(s)."); } } } catch (AmazonServiceException ase) { if (NewJFrame.gui) { mainFrame.jTextArea1.append("\n\nError Message: " + ase.getMessage()); mainFrame.jTextArea1.append("\nHTTP Status Code: " + ase.getStatusCode()); mainFrame.jTextArea1.append("\nAWS Error Code: " + ase.getErrorCode()); mainFrame.jTextArea1.append("\nError Type: " + ase.getErrorType()); mainFrame.jTextArea1.append("\nRequest ID: " + ase.getRequestId()); calibrate(); } else { System.out.print("\n\nError Message: " + ase.getMessage()); System.out.print("\nHTTP Status Code: " + ase.getStatusCode()); System.out.print("\nAWS Error Code: " + ase.getErrorCode()); System.out.print("\nError Type: " + ase.getErrorType()); System.out.print("\nRequest ID: " + ase.getRequestId()); } } catch (Exception put) { } calibrate(); }
From source file:com.att.aro.core.cloud.aws.AwsRepository.java
License:Apache License
@Override public TransferState put(File file) { try {//from w w w. ja v a 2 s . co m PutObjectRequest req = new PutObjectRequest(bucketName, file.getName(), file); Upload myUpload = transferMgr.upload(req); myUpload.waitForCompletion(); transferMgr.shutdownNow(); return myUpload.getState(); } catch (AmazonServiceException ase) { LOGGER.error("Error Message: " + ase.getMessage()); } catch (Exception exception) { LOGGER.error(exception.getMessage(), exception); } return null; }
From source file:com.gendevs.bedrock.appengine.service.storage.StorageProvider.java
License:Apache License
private String uploadFile(InputStream imageInputStream, String contentType, String organisationId, String id, String type) {/*from w w w . j ava 2s. co m*/ initalizeS3(); String fileName = null; switch (type) { case "app": fileName = String.format(StorageConstants.APP_PROFILE_PHOTOS, organisationId, id); break; case "user": fileName = String.format(StorageConstants.USER_PROFILE_PHOTOS, organisationId, id); break; } ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentType(contentType); PutObjectRequest por = new PutObjectRequest(bucketName, fileName, imageInputStream, metadata); por.setCannedAcl(CannedAccessControlList.PublicRead); Upload upload = manager.upload(por); try { upload.waitForCompletion(); } catch (AmazonServiceException e) { e.printStackTrace(); } catch (AmazonClientException e) { e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } return fileName; }
From source file:com.github.abhinavmishra14.aws.s3.service.impl.AwsS3IamServiceImpl.java
License:Open Source License
@Override public boolean uploadObjectAndListenProgress(final String bucketName, final String fileName, final InputStream inputStream, final CannedAccessControlList cannedAcl) throws AmazonClientException, AmazonServiceException, IOException { LOGGER.info(//from w ww. j a va 2 s. c om "uploadObjectAndListenProgress invoked, bucketName: {} , fileName: {} and cannedAccessControlList: {}", bucketName, fileName, cannedAcl); File tempFile = null; PutObjectRequest putObjectRequest = null; Upload upload = null; try { // Create temporary file from stream to avoid 'out of memory' exception tempFile = AWSUtil.createTempFileFromStream(inputStream); putObjectRequest = new PutObjectRequest(bucketName, fileName, tempFile).withCannedAcl(cannedAcl); final TransferManager transferMgr = new TransferManager(s3client); upload = transferMgr.upload(putObjectRequest); // You can poll your transfer's status to check its progress if (upload.isDone()) { LOGGER.info("Start: {} , State: {} and Progress (%): {}", upload.getDescription(), upload.getState(), upload.getProgress().getPercentTransferred()); } // Add progressListener to listen asynchronous notifications about your transfer's progress // Uncomment below code snippet during development /*upload.addProgressListener(new ProgressListener() { public void progressChanged(ProgressEvent event) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Transferred bytes: " + (long) event.getBytesTransferred()); } } });*/ try { //Block the current thread and wait for completion //If the transfer fails AmazonClientException will be thrown upload.waitForCompletion(); } catch (AmazonClientException | InterruptedException excp) { LOGGER.error("Exception occured while waiting for transfer: ", excp); } } finally { AWSUtil.deleteTempFile(tempFile); // Delete the temporary file once uploaded } LOGGER.info("End: {} , State: {} , Progress (%): {}", upload.getDescription(), upload.getState(), upload.getProgress().getPercentTransferred()); return upload.isDone(); }
From source file:com.github.abhinavmishra14.aws.s3.service.impl.AwsS3IamServiceImpl.java
License:Open Source License
@Override public boolean uploadObjectAndListenProgress(final String bucketName, final String fileName, final InputStream inputStream, final boolean isPublicAccessible) throws AmazonClientException, AmazonServiceException, IOException { LOGGER.info(/*w w w.j a v a2 s .co m*/ "uploadObjectAndListenProgress invoked, bucketName: {} , fileName: {} and isPublicAccessible: {}", bucketName, fileName, isPublicAccessible); File tempFile = null; PutObjectRequest putObjectRequest = null; Upload upload = null; try { // Create temporary file from stream to avoid 'out of memory' exception tempFile = AWSUtil.createTempFileFromStream(inputStream); putObjectRequest = new PutObjectRequest(bucketName, fileName, tempFile); if (isPublicAccessible) { putObjectRequest.setCannedAcl(CannedAccessControlList.PublicRead); } final TransferManager transferMgr = new TransferManager(s3client); upload = transferMgr.upload(putObjectRequest); // You can poll your transfer's status to check its progress if (upload.isDone()) { LOGGER.info("Start: {} , State: {} and Progress (%): {}", upload.getDescription(), upload.getState(), upload.getProgress().getPercentTransferred()); } // Add progressListener to listen asynchronous notifications about your transfer's progress // Uncomment below code snippet during development /*upload.addProgressListener(new ProgressListener() { public void progressChanged(ProgressEvent event) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Transferred bytes: " + (long) event.getBytesTransferred()); } } });*/ try { //Block the current thread and wait for completion //If the transfer fails AmazonClientException will be thrown upload.waitForCompletion(); } catch (AmazonClientException | InterruptedException excp) { LOGGER.error("Exception occured while waiting for transfer: ", excp); } } finally { AWSUtil.deleteTempFile(tempFile); // Delete the temporary file once uploaded } LOGGER.info("End: {} , State: {} , Progress (%): {}", upload.getDescription(), upload.getState(), upload.getProgress().getPercentTransferred()); return upload.isDone(); }
From source file:com.github.kaklakariada.aws.sam.task.S3UploadTask.java
License:Open Source License
private void transferFileToS3(final String key) { final long fileSizeMb = file.length() / (1024 * 1024); getLogger().lifecycle("Uploading {} MB from file {} to {}...", fileSizeMb, file, getS3Url()); final TransferManager transferManager = createTransferManager(); final Instant start = Instant.now(); final Upload upload = transferManager.upload(config.getDeploymentBucket(), key, file); try {//from w ww . j av a 2 s. c o m upload.waitForCompletion(); final Duration uploadDuration = Duration.between(start, Instant.now()); getLogger().lifecycle("Uploaded {} to {} in {}", file, getS3Url(), uploadDuration); } catch (final InterruptedException e) { Thread.currentThread().interrupt(); throw new AssertionError("Upload interrupted", e); } }