List of usage examples for com.amazonaws.services.s3.transfer Upload waitForCompletion
public void waitForCompletion() throws AmazonClientException, AmazonServiceException, InterruptedException;
From source file:com.github.rholder.esthree.command.Put.java
License:Apache License
@Override public Integer call() throws Exception { TransferManager t = new TransferManager(amazonS3Client); ObjectMetadata objectMetadata = new ObjectMetadata(); objectMetadata.setUserMetadata(metadata); Upload u = t.upload(new PutObjectRequest(bucket, key, inputFile).withMetadata(objectMetadata)); // TODO this listener spews out garbage >100% on a retry, add a test to verify if (progressListener != null) { progressListener.withTransferProgress(new TransferProgressWrapper(u.getProgress())); u.addProgressListener(progressListener); }/*from ww w. j a v a 2 s . c o m*/ try { u.waitForCompletion(); } finally { t.shutdownNow(); } return 0; }
From source file:com.hpe.caf.worker.datastore.s3.S3DataStore.java
License:Apache License
private String store(InputStream inputStream, String partialReference, Long length) throws DataStoreException { try {// ww w . j a va 2 s . co m String fullReference = partialReference + UUID.randomUUID().toString(); ObjectMetadata objectMetadata = new ObjectMetadata(); if (length != null) { objectMetadata.setContentLength(length); } TransferManager transferManager = new TransferManager(amazonS3Client); Upload upload = transferManager.upload(bucketName, fullReference, inputStream, objectMetadata); upload.waitForCompletion(); // amazonS3Client.putObject(bucketName, fullReference, inputStream, objectMetadata); transferManager.shutdownNow(false); return fullReference; } catch (Exception ex) { errors.incrementAndGet(); throw new DataStoreException("Could not store input stream.", ex); } }
From source file:com.liferay.portal.store.s3.S3Store.java
License:Open Source License
protected void putObject(long companyId, long repositoryId, String fileName, String versionLabel, File file) throws PortalException { Upload upload = null; try {// www.j a va2 s . com String key = _s3KeyTransformer.getFileVersionKey(companyId, repositoryId, fileName, versionLabel); PutObjectRequest putObjectRequest = new PutObjectRequest(_bucketName, key, file); putObjectRequest.withStorageClass(_storageClass); upload = _transferManager.upload(putObjectRequest); upload.waitForCompletion(); } catch (AmazonClientException ace) { throw transform(ace); } catch (InterruptedException ie) { upload.abort(); Thread thread = Thread.currentThread(); thread.interrupt(); } }
From source file:com.netflix.hollow.example.producer.infrastructure.S3Publisher.java
License:Apache License
private void uploadFile(File file, String s3ObjectName, ObjectMetadata metadata) { try (InputStream is = new BufferedInputStream(new FileInputStream(file))) { Upload upload = s3TransferManager.upload(bucketName, s3ObjectName, is, metadata); upload.waitForCompletion(); } catch (Exception e) { throw new RuntimeException(e); }// w ww . java 2s. co m }
From source file:com.netflix.hollow.example.producer.infrastructure.S3Publisher.java
License:Apache License
/** * Write a list of all of the state versions to S3. * @param newVersion/*from w ww . j ava 2 s . c om*/ */ private synchronized void updateSnapshotIndex(Long newVersion) { /// insert the new version into the list int idx = Collections.binarySearch(snapshotIndex, newVersion); int insertionPoint = Math.abs(idx) - 1; snapshotIndex.add(insertionPoint, newVersion); /// build a binary representation of the list -- gap encoded variable-length integers byte[] idxBytes = buidGapEncodedVarIntSnapshotIndex(); /// indicate the Content-Length ObjectMetadata metadata = new ObjectMetadata(); metadata.setHeader("Content-Length", (long) idxBytes.length); /// upload the new file content. try (InputStream is = new ByteArrayInputStream(idxBytes)) { Upload upload = s3TransferManager.upload(bucketName, getSnapshotIndexObjectName(blobNamespace), is, metadata); upload.waitForCompletion(); } catch (Exception e) { throw new RuntimeException(e); } }
From source file:com.nike.cerberus.operation.gateway.PublishLambdaOperation.java
License:Apache License
@Override public void run(final PublishLambdaCommand command) { final URL artifactUrl = command.getArtifactUrl(); final BaseOutputs outputParameters = configStore.getBaseStackOutputs(); final String configBucketName = outputParameters.getConfigBucketName(); if (StringUtils.isBlank(configBucketName)) { final String errorMessage = "The specified environment isn't configured properly!"; logger.error(errorMessage);//from ww w . jav a 2 s .c om throw new IllegalStateException(errorMessage); } initClient(configBucketName); final File filePath = downloadArtifact(artifactUrl); try { final Upload upload = transferManager.upload(configBucketName, command.getLambdaName().getBucketKey(), filePath); logger.info("Uploading lambda artifact."); upload.waitForCompletion(); logger.info("Uploading complete."); } catch (InterruptedException e) { logger.error("Interrupted while waiting for upload to complete!", e); } finally { transferManager.shutdownNow(false); } }
From source file:com.projectlaver.service.ListingService.java
License:Open Source License
/** * Public methods// w w w . j av a2s . co m */ @Transactional(readOnly = false) public Listing create(Listing listing) throws Exception { // set the expiration for one year from now if unset if (listing.getExpires() == null) { listing.setExpires(this.addDays(new Date(), 365)); } // merge the user (reattach to DB) User user = listing.getSeller(); User mergedUser = this.em.merge(user); listing.setSeller(mergedUser); if (listing.getImageAsFile() != null) { // upload the image preview to the public S3 bucket AWSCredentials myCredentials = new BasicAWSCredentials(this.s3accessKey, this.s3secretKey); TransferManager tx = new TransferManager(myCredentials); Upload myUpload = tx.upload(this.s3publicBucketName, listing.getImageFilename(), listing.getImageAsFile()); myUpload.waitForCompletion(); } if (listing.getContentFiles() != null && listing.getContentFiles().size() > 0) { Set<ContentFile> contentFiles = listing.getContentFiles(); AWSCredentials myCredentials = new BasicAWSCredentials(this.s3accessKey, this.s3secretKey); TransferManager tx = new TransferManager(myCredentials); for (ContentFile file : contentFiles) { // upload the digital content to the private S3 bucket Upload myUpload = tx.upload(this.s3privateBucketName, file.getContentFilename(), file.getDigitalContentAsFile()); myUpload.waitForCompletion(); } } return this.listingRepository.save(listing); }
From source file:com.projectlaver.util.VoucherService.java
License:Open Source License
/** * Uploads the pdf file to the private S3 bucket. * //from ww w . jav a 2s . c o m * @param pdfFilename * @param pdf */ void savePdf(String pdfFilename, File pdf) { AWSCredentials myCredentials = new BasicAWSCredentials(this.s3accessKey, this.s3secretKey); TransferManager tx = new TransferManager(myCredentials); Upload myUpload = tx.upload(this.s3privateBucketName, pdfFilename, pdf); try { myUpload.waitForCompletion(); } catch (Exception e) { this.logger.error(String.format( "Exception while trying to upload to S3 the PDF using the temp file with path: %s", pdf.getPath()), e); throw new RuntimeException("Unable to upload the PDF file to S3.", e); } }
From source file:com.shelfmap.simplequery.domain.impl.DefaultBlobReference.java
License:Apache License
@Override public void setContent(T object, ObjectMetadata metadata) throws BlobOutputException { Upload upload = setContentAsync(object, metadata); try {/*from w w w. j a va2 s. co m*/ upload.waitForCompletion(); } catch (AmazonServiceException ex) { throw new BlobOutputException( "AWS returned an error response, or client can not understand the response.", ex); } catch (AmazonClientException ex) { throw new BlobOutputException("Client could not send request, or could not receive the response.", ex); } catch (InterruptedException ex) { LOGGER.warn("Thread was interrupted. Program will continue but something wrong might be occured.", ex); Thread.currentThread().interrupt(); } }
From source file:com.vitembp.services.interfaces.AmazonSimpleStorageService.java
License:Open Source License
/** * Uploads a file to S3 storage and sets its ACL to allow public access. * @param toUpload The file to uploadPublic. * @param destination The destination in the bucket to store the file. * @throws java.io.IOException If an I/O exception occurs while uploading * the file./*from w w w .j a va 2 s . co m*/ */ public void uploadPublic(File toUpload, String destination) throws IOException { try { // create a request that makes the object public PutObjectRequest req = new PutObjectRequest(this.bucketName, destination, toUpload); req.setCannedAcl(CannedAccessControlList.PublicRead); // uploadPublic the file and wait for completion Upload xfer = this.transferManager.upload(req); xfer.waitForCompletion(); } catch (AmazonServiceException ex) { LOGGER.error("Exception uploading " + toUpload.toString(), ex); throw new IOException("Exception uploading " + toUpload.toString(), ex); } catch (AmazonClientException | InterruptedException ex) { LOGGER.error("Exception uploading " + toUpload.toString(), ex); throw new IOException("Exception uploading " + toUpload.toString(), ex); } }