Example usage for com.amazonaws.services.s3.model GetObjectRequest GetObjectRequest

List of usage examples for com.amazonaws.services.s3.model GetObjectRequest GetObjectRequest

Introduction

In this page you can find the example usage for com.amazonaws.services.s3.model GetObjectRequest GetObjectRequest.

Prototype

public GetObjectRequest(String bucketName, String key) 

Source Link

Document

Constructs a new GetObjectRequest with all the required parameters.

Usage

From source file:com.ibm.stocator.fs.cos.COSInputStream.java

License:Apache License

/**
 * Opens up the stream at specified target position and for given length.
 *
 * @param reason reason for reopen/* www. ja  v a2 s . co  m*/
 * @param targetPos target position
 * @param length length requested
 * @throws IOException on any failure to open the object
 */
private synchronized void reopen(String reason, long targetPos, long length) throws IOException {

    if (wrappedStream != null) {
        closeStream("reopen(" + reason + ")", contentRangeFinish, false);
    }

    contentRangeFinish = calculateRequestLimit(inputPolicy, targetPos, length, contentLength, readahead);
    LOG.debug("reopen({}) for {} range[{}-{}], length={}," + " streamPosition={}, nextReadPosition={}", uri,
            reason, targetPos, contentRangeFinish, length, pos, nextReadPos);

    try {
        GetObjectRequest request = new GetObjectRequest(bucket, key).withRange(targetPos,
                contentRangeFinish - 1);
        wrappedStream = client.getObject(request).getObjectContent();
        contentRangeStart = targetPos;
        if (wrappedStream == null) {
            throw new IOException("Null IO stream from reopen of (" + reason + ") " + uri);
        }
    } catch (AmazonClientException e) {
        throw COSUtils.translateException("Reopen at position " + targetPos, uri, e);
    }

    pos = targetPos;
}

From source file:com.igeekinc.indelible.indeliblefs.uniblock.casstore.s3.S3CASStore.java

License:Open Source License

@Override
public CASIDDataDescriptor retrieveSegment(CASIdentifier segmentID) throws IOException {
    S3DataDescriptor returnDescriptor = null;
    GetObjectRequest retrieveRequest = new GetObjectRequest(storeID.toString(), segmentID.toString());
    S3Object retrieveObject = s3Client.getObject(retrieveRequest);
    if (retrieveObject != null)
        returnDescriptor = new S3DataDescriptor(segmentID, retrieveObject);
    return returnDescriptor;
}

From source file:com.images3.data.impl.ImageContentAccessImplS3.java

License:Apache License

@Override
public File selectImageContent(ImageIdentity id, AmazonS3Bucket bucket) {
    File imageContent = new File(generateFilePath(id));
    if (imageContent.exists()) {
        return imageContent;
    }/*from   w  ww. j  ava 2  s  .  c  o  m*/
    AmazonS3 client = clients.getClient(bucket);
    try {
        client.getObject(new GetObjectRequest(bucket.getName(), generateS3ObjectKey(id)), imageContent);
    } catch (AmazonS3Exception e) {
        if (e.getStatusCode() == 404) {
            throw new NoSuchEntityFoundException("ImageContent", generateS3ObjectKey(id),
                    "No such image content found.");
        }
        throw new RuntimeException(e);
    }
    return imageContent;
}

From source file:com.imos.sample.S3SampleCheck.java

License:Open Source License

public static void main(String[] args) throws IOException {

    /*/*from w  w  w .jav a 2 s.  c  o  m*/
     * The ProfileCredentialsProvider will return your [default]
     * credential profile by reading from the credentials file located at
     * (/home/alok/.aws/credentials).
     */
    AWSCredentials credentials = null;
    try {
        credentials = new ProfileCredentialsProvider("default").getCredentials();
    } catch (Exception e) {
        throw new AmazonClientException("Cannot load the credentials from the credential profiles file. "
                + "Please make sure that your credentials file is at the correct "
                + "location (/home/alok/.aws/credentials), and is in valid format.", e);
    }

    AmazonS3 s3 = new AmazonS3Client(credentials);
    //        Region usWest2 = Region.getRegion(Regions.US_WEST_2);
    Region usWest2 = Region.getRegion(Regions.AP_SOUTHEAST_1);
    s3.setRegion(usWest2);

    String bucketName = "alok-test";
    String key = "sample.json";

    System.out.println("===========================================");
    System.out.println("Getting Started with Amazon S3");
    System.out.println("===========================================\n");

    try {
        /*
         * Create a new S3 bucket - Amazon S3 bucket names are globally unique,
         * so once a bucket name has been taken by any user, you can't create
         * another bucket with that same name.
         *
         * You can optionally specify a location for your bucket if you want to
         * keep your data closer to your applications or users.
         */
        //            System.out.println("Creating bucket " + bucketName + "\n");
        //            s3.createBucket(bucketName);

        /*
         * List the buckets in your account
         */
        //            System.out.println("Listing buckets");
        //            for (Bucket bucket : s3.listBuckets()) {
        //                System.out.println(" - " + bucket.getName());
        //            }
        System.out.println();

        /*
         * Upload an object to your bucket - You can easily upload a file to
         * S3, or upload directly an InputStream if you know the length of
         * the data in the stream. You can also specify your own metadata
         * when uploading to S3, which allows you set a variety of options
         * like content-type and content-encoding, plus additional metadata
         * specific to your applications.
         */
        System.out.println("Uploading a new object to S3 from a file\n");
        //s3.putObject(new PutObjectRequest(bucketName, key, createSampleFile()));
        s3.putObject(new PutObjectRequest(bucketName, key, createSampleFile()));

        /*
         * Download an object - When you download an object, you get all of
         * the object's metadata and a stream from which to read the contents.
         * It's important to read the contents of the stream as quickly as
         * possibly since the data is streamed directly from Amazon S3 and your
         * network connection will remain open until you read all the data or
         * close the input stream.
         *
         * GetObjectRequest also supports several other options, including
         * conditional downloading of objects based on modification times,
         * ETags, and selectively downloading a range of an object.
         */
        System.out.println("Downloading an object");
        //            S3Object object = s3.getObject(new GetObjectRequest(bucketName, key));
        S3Object object = s3.getObject(new GetObjectRequest("alok-test", key));
        System.out.println("Content-Type: " + object.getObjectMetadata().getContentType());
        displayTextInputStream(object.getObjectContent());

        /*
         * List objects in your bucket by prefix - There are many options for
         * listing the objects in your bucket.  Keep in mind that buckets with
         * many objects might truncate their results when listing their objects,
         * so be sure to check if the returned object listing is truncated, and
         * use the AmazonS3.listNextBatchOfObjects(...) operation to retrieve
         * additional results.
         */
        System.out.println("Listing objects");
        ObjectListing objectListing = s3.listObjects(new ListObjectsRequest()
                //                    .withBucketName(bucketName)
                .withBucketName("alok-test"));
        //                    .withPrefix("My"));
        objectListing.getObjectSummaries().forEach((objectSummary) -> {
            System.out.println(
                    " - " + objectSummary.getKey() + "  " + "(size = " + objectSummary.getSize() + ")");
        });
        System.out.println();

        /*
         * Delete an object - Unless versioning has been turned on for your bucket,
         * there is no way to undelete an object, so use caution when deleting objects.
         */
        //            System.out.println("Deleting an object\n");
        //            s3.deleteObject(bucketName, key);

        /*
         * Delete a bucket - A bucket must be completely empty before it can be
         * deleted, so remember to delete any objects from your buckets before
         * you try to delete them.
         */
        //            System.out.println("Deleting bucket " + bucketName + "\n");
        //            s3.deleteBucket(bucketName);
    } catch (AmazonServiceException ase) {
        System.out.println("Caught an AmazonServiceException, which means your request made it "
                + "to Amazon S3, but was rejected with an error response for some reason.");
        System.out.println("Error Message:    " + ase.getMessage());
        System.out.println("HTTP Status Code: " + ase.getStatusCode());
        System.out.println("AWS Error Code:   " + ase.getErrorCode());
        System.out.println("Error Type:       " + ase.getErrorType());
        System.out.println("Request ID:       " + ase.getRequestId());
    } catch (AmazonClientException ace) {
        System.out.println("Caught an AmazonClientException, which means the client encountered "
                + "a serious internal problem while trying to communicate with S3, "
                + "such as not being able to access the network.");
        System.out.println("Error Message: " + ace.getMessage());
    }
}

From source file:com.imos.sample.SampleS3.java

public static void main(String[] args) throws IOException {
    AmazonS3 s3Client = new AmazonS3Client(new ProfileCredentialsProvider());
    S3Object object = s3Client.getObject(new GetObjectRequest("inv.adminconsole.test", ""));
    InputStream objectData = object.getObjectContent();
    // Process the objectData stream.
    objectData.close();//  w  ww  .  java2  s  .  c  o m
}

From source file:com.intuit.s3encrypt.S3Encrypt.java

License:Open Source License

private static String inspectS3Object(CommandLine cmd, AmazonS3EncryptionClient s3, String bucket,
        String filename, String keyname) {
    System.out.println("Supposed to inspect the BUCKET = " + bucket + " OBJECT = " + filename);
    S3Object s3object = s3.getObject(new GetObjectRequest(bucket, filename));
    String metadata = s3object.getObjectMetadata().getUserMetadata().get(keyname);
    return metadata;
}

From source file:com.jeet.s3.AmazonS3ClientWrapper.java

License:Open Source License

public InputStream getObjectStream(String path) {
    try {//  w  w  w.j a  v  a2s. co m
        S3Object object = s3Client.getObject(new GetObjectRequest(Constants.BUCKET_NAME, path));
        return object.getObjectContent();
    } catch (Exception ex) {
        ex.printStackTrace();
    }
    return null;
}

From source file:com.jeet.s3.AmazonS3ClientWrapper.java

License:Open Source License

public File getObjectFile(String path) {
    try {/*from  www. j av  a 2s. c om*/
        File f = new File(System.getProperty("java.io.tmpdir") + "\\" + UUID.randomUUID() + ".jpg");
        s3Client.getObject(new GetObjectRequest(Constants.BUCKET_NAME, path), f);
        return f;
    } catch (Exception ex) {
        //            ex.printStackTrace();
    }
    return null;
}

From source file:com.jfixby.scarabei.red.aws.test.S3Sample.java

License:Open Source License

public static void main(final String[] args) throws IOException {

    /*//from   w w w .  ja va 2  s .c o m
     * The ProfileCredentialsProvider will return your [default] credential profile by reading from the credentials file located
     * at (C:\\Users\\JCode\\.aws\\credentials).
     */
    AWSCredentials credentials = null;
    try {
        credentials = new ProfileCredentialsProvider("default").getCredentials();
    } catch (final Exception e) {
        throw new AmazonClientException("Cannot load the credentials from the credential profiles file. "
                + "Please make sure that your credentials file is at the correct "
                + "location (C:\\Users\\%USERNAME%\\.aws\\credentials), and is in valid format.", e);
    }

    final AmazonS3 s3 = new AmazonS3Client(credentials);
    final Region usWest2 = Region.getRegion(Regions.US_WEST_2);
    s3.setRegion(usWest2);

    final String bucketName = "my-first-s3-bucket-" + UUID.randomUUID();
    final String key = "MyObjectKey";

    System.out.println("===========================================");
    System.out.println("Getting Started with Amazon S3");
    System.out.println("===========================================\n");

    try {
        /*
         * Create a new S3 bucket - Amazon S3 bucket names are globally unique, so once a bucket name has been taken by any user,
         * you can't create another bucket with that same name.
         *
         * You can optionally specify a location for your bucket if you want to keep your data closer to your applications or
         * users.
         */
        System.out.println("Creating bucket " + bucketName + "\n");
        s3.createBucket(bucketName);

        /*
         * List the buckets in your account
         */
        System.out.println("Listing buckets");
        for (final Bucket bucket : s3.listBuckets()) {
            System.out.println(" - " + bucket.getName());
        }
        System.out.println();

        /*
         * Upload an object to your bucket - You can easily upload a file to S3, or upload directly an InputStream if you know
         * the length of the data in the stream. You can also specify your own metadata when uploading to S3, which allows you
         * set a variety of options like content-type and content-encoding, plus additional metadata specific to your
         * applications.
         */
        System.out.println("Uploading a new object to S3 from a file\n");
        s3.putObject(new PutObjectRequest(bucketName, key, createSampleFile()));

        /*
         * Download an object - When you download an object, you get all of the object's metadata and a stream from which to read
         * the contents. It's important to read the contents of the stream as quickly as possibly since the data is streamed
         * directly from Amazon S3 and your network connection will remain open until you read all the data or close the input
         * stream.
         *
         * GetObjectRequest also supports several other options, including conditional downloading of objects based on
         * modification times, ETags, and selectively downloading a range of an object.
         */
        System.out.println("Downloading an object");
        final S3Object object = s3.getObject(new GetObjectRequest(bucketName, key));
        System.out.println("Content-Type: " + object.getObjectMetadata().getContentType());
        displayTextInputStream(object.getObjectContent());

        /*
         * List objects in your bucket by prefix - There are many options for listing the objects in your bucket. Keep in mind
         * that buckets with many objects might truncate their results when listing their objects, so be sure to check if the
         * returned object listing is truncated, and use the AmazonS3.listNextBatchOfObjects(...) operation to retrieve
         * additional results.
         */
        System.out.println("Listing objects");
        final ObjectListing objectListing = s3
                .listObjects(new ListObjectsRequest().withBucketName(bucketName).withPrefix("My"));
        for (final S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) {
            System.out.println(
                    " - " + objectSummary.getKey() + "  " + "(size = " + objectSummary.getSize() + ")");
        }
        System.out.println();

        /*
         * Delete an object - Unless versioning has been turned on for your bucket, there is no way to undelete an object, so use
         * caution when deleting objects.
         */
        System.out.println("Deleting an object\n");
        s3.deleteObject(bucketName, key);

        /*
         * Delete a bucket - A bucket must be completely empty before it can be deleted, so remember to delete any objects from
         * your buckets before you try to delete them.
         */
        System.out.println("Deleting bucket " + bucketName + "\n");
        s3.deleteBucket(bucketName);
    } catch (final AmazonServiceException ase) {
        System.out.println("Caught an AmazonServiceException, which means your request made it "
                + "to Amazon S3, but was rejected with an error response for some reason.");
        System.out.println("Error Message:    " + ase.getMessage());
        System.out.println("HTTP Status Code: " + ase.getStatusCode());
        System.out.println("AWS Error Code:   " + ase.getErrorCode());
        System.out.println("Error Type:       " + ase.getErrorType());
        System.out.println("Request ID:       " + ase.getRequestId());
    } catch (final AmazonClientException ace) {
        System.out.println("Caught an AmazonClientException, which means the client encountered "
                + "a serious internal problem while trying to communicate with S3, "
                + "such as not being able to access the network.");
        System.out.println("Error Message: " + ace.getMessage());
    }
}

From source file:com.johnstok.blobs.s3.S3ByteStore.java

License:Open Source License

/** {@inheritDoc} */
@Override/*from   w  ww.  j a va2 s . c o m*/
public void read(final UUID id, final OutputStream out) throws ByteStoreException {
    Objects.requireNonNull(id);
    Objects.requireNonNull(out);
    try (final S3Object object = _s3Client.getObject(new GetObjectRequest(_bucket, id.toString()))) {
        copy(object.getObjectContent(), out);
    } catch (AmazonClientException | IOException e) {
        throw new ByteStoreException(e);
    }
}