List of usage examples for com.amazonaws.services.s3.transfer TransferManager shutdownNow
public void shutdownNow()
From source file:org.finra.herd.dao.impl.S3DaoImpl.java
License:Apache License
/** * Performs a file/directory transfer.//from w w w. java2 s . c o m * * @param params the parameters. * @param transferer a transferer that knows how to perform the transfer. * * @return the results. * @throws InterruptedException if a problem is encountered. */ private S3FileTransferResultsDto performTransfer(final S3FileTransferRequestParamsDto params, Transferer transferer) throws InterruptedException { // Create a transfer manager. TransferManager transferManager = getTransferManager(params); try { // Start a stop watch to keep track of how long the transfer takes. StopWatch stopWatch = new StopWatch(); stopWatch.start(); // Perform the transfer. Transfer transfer = transferer.performTransfer(transferManager); TransferProgress transferProgress = transfer.getProgress(); logTransferProgress(transferProgress); long stepCount = 0; // Loop until the transfer is complete. do { Thread.sleep(sleepIntervalsMillis); stepCount++; // Log progress status every 30 seconds and when transfer is complete. if (transfer.isDone() || stepCount % 300 == 0) { logTransferProgress(transferProgress); } } while (!transfer.isDone()); // Stop the stop watch and create a results object. stopWatch.stop(); // If the transfer failed, throw the underlying AWS exception if we can determine one. Otherwise, throw our own exception. TransferState transferState = transfer.getState(); if (transferState == TransferState.Failed) { // The waitForException method should return the underlying AWS exception since the state is "Failed". It should not block since the // transfer is already "done" per previous code checking "isDone". AmazonClientException amazonClientException = transfer.waitForException(); // If the returned exception is null, we weren't able to get the underlying AWS exception so just throw our own exception. // This is unlikely since the transfer failed, but it's better to handle the possibility just in case. if (amazonClientException == null) { throw new IllegalStateException("The transfer operation \"" + transfer.getDescription() + "\" failed for an unknown reason."); } // Throw the Amazon underlying exception. throw amazonClientException; } // Ensure the transfer completed. If not, throw an exception. else if (transferState != TransferState.Completed) { throw new IllegalStateException("The transfer operation \"" + transfer.getDescription() + "\" did not complete successfully. Current state: \"" + transferState + "\"."); } // TransferProgress.getBytesTransferred() are not populated for S3 Copy objects. if (!(transfer instanceof Copy)) { // Sanity check for the number of bytes transferred. Assert.isTrue(transferProgress.getBytesTransferred() >= transferProgress.getTotalBytesToTransfer(), String.format( "Actual number of bytes transferred is less than expected (actual: %d bytes; expected: %d bytes).", transferProgress.getBytesTransferred(), transferProgress.getTotalBytesToTransfer())); } // Create the results object and populate it with the standard data. S3FileTransferResultsDto results = new S3FileTransferResultsDto(); results.setDurationMillis(stopWatch.getTime()); results.setTotalBytesTransferred(transfer.getProgress().getBytesTransferred()); results.setTotalFilesTransferred(1L); if (transfer instanceof MultipleFileUpload) { // For upload directory, we need to calculate the total number of files transferred differently. results.setTotalFilesTransferred((long) ((MultipleFileUpload) transfer).getSubTransfers().size()); } else if (transfer instanceof MultipleFileDownload) { // For download directory, we need to calculate the total number of files differently. results.setTotalFilesTransferred((long) listDirectory(params).size()); } // Return the results. return results; } finally { // Shutdown the transfer manager to release resources. If this isn't done, the JVM may delay upon exiting. transferManager.shutdownNow(); } }
From source file:org.opendedup.sdfs.filestore.cloud.BatchAwsS3ChunkStore.java
License:Open Source License
private void multiPartUpload(PutObjectRequest req) throws AmazonServiceException, AmazonClientException, InterruptedException { TransferManager tx = null; try {// ww w . j a v a 2s. c o m if (awsCredentials != null) tx = new TransferManager(awsCredentials); else tx = new TransferManager(new InstanceProfileCredentialsProvider()); Upload myUpload = tx.upload(req); myUpload.waitForCompletion(); } finally { if (tx != null) tx.shutdownNow(); } }
From source file:org.opendedup.sdfs.filestore.cloud.BatchAwsS3ChunkStore.java
License:Open Source License
private void multiPartDownload(String keyName, File f) throws AmazonServiceException, AmazonClientException, InterruptedException { TransferManager tx = null; try {/* www . jav a 2 s.c o m*/ if (awsCredentials != null) tx = new TransferManager(awsCredentials); else tx = new TransferManager(new InstanceProfileCredentialsProvider()); Download myDownload = tx.download(this.name, keyName, f); myDownload.waitForCompletion(); } finally { if (tx != null) tx.shutdownNow(); } }
From source file:org.ow2.proactive.scheduler.examples.S3ConnectorDownloader.java
License:Open Source License
/** * Download a list of files from S3. <br> * Requires a bucket name. <br>/*from w w w . j av a 2 s.c om*/ * Requires a key prefix. <br> * * @param bucketName * @param keyPrefix * @param dirPath * @param pause * @param s3Client */ private void downloadDir(String bucketName, String keyPrefix, String dirPath, boolean pause, AmazonS3 s3Client) { getOut().println("downloading to directory: " + dirPath + (pause ? " (pause)" : "")); TransferManager transferManager = TransferManagerBuilder.standard().withS3Client(s3Client).build(); try { MultipleFileDownload xfer = transferManager.downloadDirectory(bucketName, keyPrefix, new File(dirPath)); // loop with Transfer.isDone() SchedulerExamplesUtils.showTransferProgress(xfer); // or block with Transfer.waitForCompletion() SchedulerExamplesUtils.waitForCompletion(xfer); } catch (AmazonServiceException e) { getErr().println(e.getMessage()); System.exit(1); } finally { transferManager.shutdownNow(); } }
From source file:org.ow2.proactive.scheduler.examples.S3ConnectorDownloader.java
License:Open Source License
/** * Download a file from S3. <br>//from www.j av a 2 s .c o m * Requires a bucket name. <br> * Requires a key prefix. <br> * * @param bucketName * @param keyName * @param filePath * @param pause * @param s3Client */ private void downloadFile(String bucketName, String keyName, String filePath, boolean pause, AmazonS3 s3Client) { getOut().println("Downloading to file: " + filePath + (pause ? " (pause)" : "")); File f = new File(filePath); TransferManager xferMgr = TransferManagerBuilder.standard().withS3Client(s3Client).build(); try { Download xfer = xferMgr.download(bucketName, keyName, f); // loop with Transfer.isDone() SchedulerExamplesUtils.showTransferProgress(xfer); // or block with Transfer.waitForCompletion() SchedulerExamplesUtils.waitForCompletion(xfer); } catch (AmazonServiceException e) { getErr().println(e.getMessage()); System.exit(1); } finally { xferMgr.shutdownNow(); } }
From source file:org.ow2.proactive.scheduler.examples.S3ConnectorUploader.java
License:Open Source License
/** * Upload a local directory to S3. <br> * Requires a bucket name. <br>//from ww w.ja va 2s . c om * If recursive is set to true, upload all subdirectories recursively. * * @param dirPath local directory to upload * @param bucketName * @param keyPrefix * @param recursive * @param pause * @param s3Client */ private void uploadDir(String dirPath, String bucketName, String keyPrefix, boolean recursive, boolean pause, AmazonS3 s3Client) { getOut().println( "directory: " + dirPath + (recursive ? " (recursive)" : "") + (pause ? " (" + PAUSE + ")" : "")); File folder = new File(dirPath); String keyName = (keyPrefix != null) ? Paths.get(keyPrefix, folder.getName()).toString() : folder.getName(); TransferManager transferManager = TransferManagerBuilder.standard().withS3Client(s3Client).build(); try { MultipleFileUpload uploader = transferManager.uploadDirectory(bucketName, keyName, folder, recursive); // loop with Transfer.isDone() SchedulerExamplesUtils.showTransferProgress(uploader); // or block with Transfer.waitForCompletion() SchedulerExamplesUtils.waitForCompletion(uploader); } catch (AmazonServiceException e) { getErr().println(e.getErrorMessage()); System.exit(1); } transferManager.shutdownNow(); }
From source file:org.ow2.proactive.scheduler.examples.S3ConnectorUploader.java
License:Open Source License
/** * Upload a local file to S3. <br> * Requires a bucket name. <br>//from w w w . j a va 2 s . c om * * @param filePath * @param bucketName * @param keyPrefix * @param pause * @param s3Client */ private void uploadFile(String filePath, String bucketName, String keyPrefix, boolean pause, AmazonS3 s3Client) { getOut().println("file: " + filePath + (pause ? " (" + PAUSE + ")" : "")); File file = new File(filePath); String keyName = (keyPrefix != null) ? Paths.get(keyPrefix, file.getName()).toString() : file.getName(); TransferManager transferManager = TransferManagerBuilder.standard().withS3Client(s3Client).build(); try { Upload uploader = transferManager.upload(bucketName, keyName, file); // loop with Transfer.isDone() SchedulerExamplesUtils.showTransferProgress(uploader); // or block with Transfer.waitForCompletion() SchedulerExamplesUtils.waitForCompletion(uploader); } catch (AmazonServiceException e) { getErr().println(e.getErrorMessage()); System.exit(1); } transferManager.shutdownNow(); }