List of usage examples for com.amazonaws.services.s3.model GetObjectRequest GetObjectRequest
public GetObjectRequest(String bucketName, String key)
From source file:com.nextdoor.bender.operations.geo.GeoIpOperationFactory.java
License:Apache License
@Override public void setConf(AbstractConfig config) { this.config = (GeoIpOperationConfig) config; AmazonS3Client client = this.s3Factory.newInstance(); AmazonS3URI uri = new AmazonS3URI(this.config.getGeoLiteDb()); GetObjectRequest req = new GetObjectRequest(uri.getBucket(), uri.getKey()); S3Object obj = client.getObject(req); try {/*from w w w . jav a2 s .c om*/ this.databaseReader = new DatabaseReader.Builder(obj.getObjectContent()).withCache(new CHMCache()) .build(); } catch (IOException e) { throw new ConfigurationException("Unable to read " + this.config.getGeoLiteDb(), e); } }
From source file:com.nike.cerberus.config.CmsEnvPropertiesLoader.java
License:Apache License
private String getObject(String path) { final GetObjectRequest request = new GetObjectRequest(bucketName, path); try {//w ww. ja v a 2 s .c om S3Object s3Object = s3Client.getObject(request); InputStream object = s3Object.getObjectContent(); return IOUtils.toString(object, Charset.defaultCharset()); } catch (AmazonServiceException ase) { if (StringUtils.equalsIgnoreCase(ase.getErrorCode(), "NoSuchKey")) { final String errorMessage = String.format("The S3 object doesn't exist. Bucket: %s, Key: %s", bucketName, request.getKey()); logger.debug(errorMessage); throw new IllegalStateException(errorMessage); } else { logger.error("Unexpected error communicating with AWS.", ase); throw ase; } } catch (IOException e) { String errorMessage = String.format( "Unable to read contents of S3 object. Bucket: %s, Key: %s, Expected Encoding: %s", bucketName, request.getKey(), Charset.defaultCharset()); logger.error(errorMessage); throw new IllegalStateException(errorMessage, e); } }
From source file:com.nike.cerberus.service.S3StoreService.java
License:Apache License
public Optional<String> get(String path) { GetObjectRequest request = new GetObjectRequest(s3Bucket, getFullPath(path)); try {/* w w w . j av a 2 s . c om*/ S3Object s3Object = s3Client.getObject(request); InputStream object = s3Object.getObjectContent(); return Optional.of(IOUtils.toString(object, ConfigConstants.DEFAULT_ENCODING)); } catch (AmazonServiceException ase) { if (StringUtils.equalsIgnoreCase(ase.getErrorCode(), "NoSuchKey")) { logger.debug(String.format("The S3 object doesn't exist. Bucket: %s, Key: %s", s3Bucket, request.getKey())); return Optional.empty(); } else { logger.error("Unexpected error communicating with AWS.", ase); throw ase; } } catch (IOException e) { String errorMessage = String.format( "Unable to read contents of S3 object. Bucket: %s, Key: %s, Expected Encoding: %s", s3Bucket, request.getKey(), ConfigConstants.DEFAULT_ENCODING); logger.error(errorMessage); throw new UnexpectedDataEncodingException(errorMessage, e); } }
From source file:com.qubole.presto.kinesis.s3config.S3TableConfigClient.java
License:Apache License
/** * Connect to S3 directory to look for new or updated table definitions and then * update the map./*from w w w . java2 s . c o m*/ */ protected void updateTablesFromS3() { long now = System.currentTimeMillis(); List<S3ObjectSummary> objectList = this.getObjectSummaries(); AmazonS3Client s3client = this.clientManager.getS3Client(); AmazonS3URI directoryURI = new AmazonS3URI(this.bucketUrl); for (S3ObjectSummary objInfo : objectList) { if (!this.internalMap.containsKey(objInfo.getKey()) || objInfo.getLastModified().getTime() >= this.lastCheck) { // New or updated file, so we must read from AWS try { if (objInfo.getKey().endsWith("/")) { continue; } log.info("Getting : %s - %s", objInfo.getBucketName(), objInfo.getKey()); S3Object object = s3client .getObject(new GetObjectRequest(objInfo.getBucketName(), objInfo.getKey())); StringBuilder resultStr = new StringBuilder(""); try (BufferedReader reader = new BufferedReader( new InputStreamReader(object.getObjectContent()))) { boolean hasMore = true; while (hasMore) { String line = reader.readLine(); if (line != null) { resultStr.append(line); } else { hasMore = false; } } KinesisStreamDescription table = streamDescriptionCodec.fromJson(resultStr.toString()); internalMap.put(objInfo.getKey(), table); log.info("Put table description into the map from %s", objInfo.getKey()); } catch (IOException iox) { log.error("Problem reading input stream from object.", iox); } } catch (AmazonServiceException ase) { StringBuilder sb = new StringBuilder(); sb.append("Caught an AmazonServiceException, which means your request made it "); sb.append("to Amazon S3, but was rejected with an error response for some reason.\n"); sb.append("Error Message: " + ase.getMessage()); sb.append("HTTP Status Code: " + ase.getStatusCode()); sb.append("AWS Error Code: " + ase.getErrorCode()); sb.append("Error Type: " + ase.getErrorType()); sb.append("Request ID: " + ase.getRequestId()); log.error(sb.toString(), ase); } catch (AmazonClientException ace) { StringBuilder sb = new StringBuilder(); sb.append("Caught an AmazonClientException, " + "which means the client encountered " + "an internal error while trying to communicate" + " with S3, " + "such as not being able to access the network."); sb.append("Error Message: " + ace.getMessage()); log.error(sb.toString(), ace); } } } // end loop through object descriptions log.info("Completed updating table definitions from S3."); this.lastCheck = now; return; }
From source file:com.rathravane.clerk.impl.s3.S3IamDb.java
License:Apache License
/** * Load an object to a stream.// ww w . j a va 2s . c om * @param key * @param os * @returns true if found, false if not found * @throws IamSvcException * @throws IamBadRequestException */ private boolean loadTo(String key, OutputStream os) throws IamSvcException { S3Object object = null; try { object = fDb.getObject(new GetObjectRequest(fBucketId, key)); final InputStream is = object.getObjectContent(); // s3 objects must be closed or will leak an HTTP connection rrStreamTools.copyStream(is, os); return true; } catch (AmazonServiceException x) { if (404 == x.getStatusCode()) return false; throw new IamSvcException(x); } catch (AmazonClientException x) { throw new IamSvcException(x); } catch (IOException x) { throw new IamSvcException(x); } finally { if (object != null) { try { object.close(); } catch (IOException e) { throw new IamSvcException(e); } } } }
From source file:com.shelfmap.simplequery.domain.impl.DefaultBlobReference.java
License:Apache License
@Override public T getContent() throws BlobRestoreException { //TODO for avoiding a strange behavior of Amazon S3, I download all data from a bucket into a file and create a InputStream. //If I process something directly on the stream which have gotten by s3.getObject().getObjectContent(), //the remote socket of the s3 object suddenly be closed while the processing. //Same problems are foundable in google search, but no appropriate answer. File temp = null;/* www . j a v a 2s. c o m*/ InputStream resourceStream = null; try { String bucket = resourceInfo.getBucketName(); String key = resourceInfo.getKey(); String version = resourceInfo.getVersionId(); AmazonS3 s3 = getContext().getS3(); GetObjectRequest request = version.isEmpty() ? new GetObjectRequest(bucket, key) : new GetObjectRequest(bucket, key, version); temp = File.createTempFile("simplequery-", ".tmp"); s3.getObject(request, temp); resourceStream = new FileInputStream(temp); T content = getContentConverter().restoreObject(getObjectMetadata(), resourceStream); return content; } catch (IOException ex) { throw new BlobRestoreException(ex); } finally { IO.close(resourceStream, this); if (temp != null) temp.delete(); } }
From source file:com.shelfmap.simplequery.domain.impl.DefaultBlobReference.java
License:Apache License
private S3Object getS3ObjectRemote() { String bucket = resourceInfo.getBucketName(); String key = resourceInfo.getKey(); String version = resourceInfo.getVersionId(); AmazonS3 s3 = getContext().getS3();//from www. j a v a2 s . c om GetObjectRequest request = version.isEmpty() ? new GetObjectRequest(bucket, key) : new GetObjectRequest(bucket, key, version); return s3.getObject(request); }
From source file:com.sjsu.faceit.example.S3Sample.java
License:Open Source License
public static void main(String[] args) throws IOException { /*// ww w . j a va 2s .co m * Important: Be sure to fill in your AWS access credentials in the * AwsCredentials.properties file before you try to run this * sample. * http://aws.amazon.com/security-credentials */ System.out.println(new File(".").getAbsolutePath()); AmazonS3 s3 = new AmazonS3Client( new PropertiesCredentials(S3Sample.class.getResourceAsStream("AwsCredentials.properties"))); String bucketName = "my-first-s3-bucket-" + UUID.randomUUID(); String key = "MyObjectKey"; System.out.println("==========================================="); System.out.println("Getting Started with Amazon S3"); System.out.println("===========================================\n"); try { /* * Create a new S3 bucket - Amazon S3 bucket names are globally unique, * so once a bucket name has been taken by any user, you can't create * another bucket with that same name. * * You can optionally specify a location for your bucket if you want to * keep your data closer to your applications or users. */ System.out.println("Creating bucket " + bucketName + "\n"); s3.createBucket(bucketName); /* * List the buckets in your account */ System.out.println("Listing buckets"); for (Bucket bucket : s3.listBuckets()) { System.out.println(" - " + bucket.getName()); } System.out.println(); /* * Upload an object to your bucket - You can easily upload a file to * S3, or upload directly an InputStream if you know the length of * the data in the stream. You can also specify your own metadata * when uploading to S3, which allows you set a variety of options * like content-type and content-encoding, plus additional metadata * specific to your applications. */ System.out.println("Uploading a new object to S3 from a file\n"); s3.putObject(new PutObjectRequest(bucketName, "abc/" + key, new File("/Users/prayag/Desktop/2.jpg"))); /* * Download an object - When you download an object, you get all of * the object's metadata and a stream from which to read the contents. * It's important to read the contents of the stream as quickly as * possibly since the data is streamed directly from Amazon S3 and your * network connection will remain open until you read all the data or * close the input stream. * * GetObjectRequest also supports several other options, including * conditional downloading of objects based on modification times, * ETags, and selectively downloading a range of an object. */ System.out.println("Downloading an object"); S3Object object = s3.getObject(new GetObjectRequest(bucketName, "abc/" + key)); System.out.println("Content-Type: " + object.getObjectMetadata().getContentType()); displayTextInputStream(object.getObjectContent()); /* * List objects in your bucket by prefix - There are many options for * listing the objects in your bucket. Keep in mind that buckets with * many objects might truncate their results when listing their objects, * so be sure to check if the returned object listing is truncated, and * use the AmazonS3.listNextBatchOfObjects(...) operation to retrieve * additional results. */ System.out.println("Listing objects"); ObjectListing objectListing = s3 .listObjects(new ListObjectsRequest().withBucketName(bucketName).withPrefix("My")); for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) { System.out.println( " - " + objectSummary.getKey() + " " + "(size = " + objectSummary.getSize() + ")"); } System.out.println(); /* * Delete an object - Unless versioning has been turned on for your bucket, * there is no way to undelete an object, so use caution when deleting objects. */ // System.out.println("Deleting an object\n"); // s3.deleteObject(bucketName, key); /* * Delete a bucket - A bucket must be completely empty before it can be * deleted, so remember to delete any objects from your buckets before * you try to delete them. */ // System.out.println("Deleting bucket " + bucketName + "\n"); // s3.deleteBucket(bucketName); } catch (AmazonServiceException ase) { System.out.println("Caught an AmazonServiceException, which means your request made it " + "to Amazon S3, but was rejected with an error response for some reason."); System.out.println("Error Message: " + ase.getMessage()); System.out.println("HTTP Status Code: " + ase.getStatusCode()); System.out.println("AWS Error Code: " + ase.getErrorCode()); System.out.println("Error Type: " + ase.getErrorType()); System.out.println("Request ID: " + ase.getRequestId()); } catch (AmazonClientException ace) { System.out.println("Caught an AmazonClientException, which means the client encountered " + "a serious internal problem while trying to communicate with S3, " + "such as not being able to access the network."); System.out.println("Error Message: " + ace.getMessage()); } }
From source file:com.smoketurner.pipeline.application.core.AmazonS3Downloader.java
License:Apache License
/** * Retrieves a file from S3/*from w w w . j a va 2 s. c o m*/ * * @param record * S3 event notification record to download * @return S3 object * @throws AmazonS3ConstraintException * if the etag constraints weren't met * @throws AmazonS3ZeroSizeException * if the file size of the object is zero */ public S3Object fetch(@Nonnull final S3EventNotificationRecord record) throws AmazonS3ConstraintException, AmazonS3ZeroSizeException { final AmazonS3Object object = converter.convert(Objects.requireNonNull(record)); final GetObjectRequest request = new GetObjectRequest(object.getBucketName(), object.getKey()); object.getVersionId().ifPresent(request::setVersionId); object.getETag().ifPresent(etag -> request.setMatchingETagConstraints(Collections.singletonList(etag))); LOGGER.debug("Fetching key: {}/{}", object.getBucketName(), object.getKey()); final S3Object download; try { download = s3.getObject(request); } catch (AmazonServiceException e) { LOGGER.error("Service error while fetching object from S3", e); throw e; } catch (AmazonClientException e) { LOGGER.error("Client error while fetching object from S3", e); throw e; } if (download == null) { LOGGER.error("eTag from object did not match for key: {}/{}", object.getBucketName(), object.getKey()); throw new AmazonS3ConstraintException(object.getKey()); } final long contentLength = download.getObjectMetadata().getContentLength(); if (contentLength < 1) { try { download.close(); } catch (IOException e) { LOGGER.error(String.format("Failed to close S3 stream for key: %s/%s", download.getBucketName(), download.getKey()), e); } LOGGER.debug("Object size is zero for key: {}/{}", download.getBucketName(), download.getKey()); throw new AmazonS3ZeroSizeException(object.getKey()); } LOGGER.debug("Streaming key ({} bytes): {}/{}", contentLength, download.getBucketName(), download.getKey()); return download; }
From source file:com.snapdeal.scm.core.service.impl.S3StorageServiceImpl.java
License:Open Source License
@Override public S3Object getObjectFromS3(String key) { try {//from w w w.j a va 2 s . c o m AmazonS3URI uri = new AmazonS3URI(key); GetObjectRequest rangeObjectRequest = new GetObjectRequest(uri.getBucket(), uri.getKey()); S3Object s3object = s3Client.getObject(rangeObjectRequest); return s3object; } catch (AmazonServiceException ase) { LOG.info("Caught an AmazonServiceException, which" + " means your request made it " + "to Amazon S3, but was rejected with an error response" + " for some reason."); LOG.info("Error Message: " + ase.getMessage()); LOG.info("HTTP Status Code: " + ase.getStatusCode()); LOG.info("AWS Error Code: " + ase.getErrorCode()); LOG.info("Error Type: " + ase.getErrorType()); LOG.info("Request ID: " + ase.getRequestId()); throw ase; } catch (AmazonClientException ace) { LOG.info("Caught an AmazonClientException, which means" + " the client encountered " + "an internal error while trying to " + "communicate with S3, " + "such as not being able to access the network."); LOG.info("Error Message: " + ace.getMessage()); throw ace; } }