Example usage for com.amazonaws.services.s3.model AmazonS3Exception toString

List of usage examples for com.amazonaws.services.s3.model AmazonS3Exception toString

Introduction

In this page you can find the example usage for com.amazonaws.services.s3.model AmazonS3Exception toString.

Prototype

@Override
public String toString() 

Source Link

Document

Extends the implementation from AmazonServiceException to include additional information on S3's extended request ID.

Usage

From source file:com.netflix.genie.web.services.impl.S3FileTransferImpl.java

License:Apache License

/**
 * {@inheritDoc}/* ww w.  ja  v a2 s . c  o  m*/
 */
@Override
public void getFile(@NotBlank(message = "Source file path cannot be empty.") final String srcRemotePath,
        @NotBlank(message = "Destination local path cannot be empty") final String dstLocalPath)
        throws GenieException {
    final long start = System.nanoTime();
    final Set<Tag> tags = Sets.newHashSet();
    try {
        log.debug("Called with src path {} and destination path {}", srcRemotePath, dstLocalPath);

        final AmazonS3URI s3Uri = getS3Uri(srcRemotePath);
        try {
            this.s3ClientFactory.getClient(s3Uri)
                    .getObject(new GetObjectRequest(s3Uri.getBucket(), s3Uri.getKey()), new File(dstLocalPath));
        } catch (final AmazonS3Exception ase) {
            log.error("Error fetching file {} from s3 due to exception {}", srcRemotePath, ase.toString());
            throw new GenieServerException("Error downloading file from s3. Filename: " + srcRemotePath, ase);
        }
        MetricsUtils.addSuccessTags(tags);
    } catch (Throwable t) {
        MetricsUtils.addFailureTagsWithException(tags, t);
        throw t;
    } finally {
        this.registry.timer(DOWNLOAD_TIMER_NAME, tags).record(System.nanoTime() - start, TimeUnit.NANOSECONDS);
    }
}

From source file:com.netflix.genie.web.services.impl.S3FileTransferImpl.java

License:Apache License

/**
 * {@inheritDoc}/*from   w ww .jav a 2 s .  co  m*/
 */
@Override
public void putFile(@NotBlank(message = "Source local path cannot be empty.") final String srcLocalPath,
        @NotBlank(message = "Destination remote path cannot be empty") final String dstRemotePath)
        throws GenieException {
    final long start = System.nanoTime();
    final Set<Tag> tags = Sets.newHashSet();
    try {
        log.debug("Called with src path {} and destination path {}", srcLocalPath, dstRemotePath);

        final AmazonS3URI s3Uri = getS3Uri(dstRemotePath);
        try {
            this.s3ClientFactory.getClient(s3Uri).putObject(s3Uri.getBucket(), s3Uri.getKey(),
                    new File(srcLocalPath));
        } catch (final AmazonS3Exception ase) {
            log.error("Error posting file {} to s3 due to exception {}", dstRemotePath, ase.toString());
            throw new GenieServerException("Error uploading file to s3. Filename: " + dstRemotePath, ase);
        }
        MetricsUtils.addSuccessTags(tags);
    } catch (Throwable t) {
        MetricsUtils.addFailureTagsWithException(tags, t);
        throw t;
    } finally {
        this.registry.timer(UPLOAD_TIMER_NAME, tags).record(System.nanoTime() - start, TimeUnit.NANOSECONDS);
    }
}

From source file:com.netflix.spinnaker.kork.secrets.engines.S3SecretEngine.java

License:Apache License

@Override
protected InputStream downloadRemoteFile(EncryptedSecret encryptedSecret) throws IOException {
    String region = encryptedSecret.getParams().get(STORAGE_REGION);
    String bucket = encryptedSecret.getParams().get(STORAGE_BUCKET);
    String objName = encryptedSecret.getParams().get(STORAGE_FILE_URI);

    AmazonS3ClientBuilder s3ClientBuilder = AmazonS3ClientBuilder.standard().withRegion(region);

    AmazonS3 s3Client = s3ClientBuilder.build();

    try {/*from   ww  w  . j  av a 2 s  .  com*/
        if (!s3Client.doesBucketExistV2(bucket)) {
            throw new SecretException(
                    String.format("S3 Bucket does not exist. Bucket: %s, Region: %s", bucket, region));
        }

        S3Object s3Object = s3Client.getObject(bucket, objName);

        return s3Object.getObjectContent();
    } catch (AmazonS3Exception ex) {
        StringBuilder sb = new StringBuilder("Error reading contents of S3 -- ");
        if (403 == ex.getStatusCode()) {
            sb.append(String.format(
                    "Unauthorized access. Check connectivity and permissions to the bucket. -- Bucket: %s, Object: %s, Region: %s.\n"
                            + "Error: %s ",
                    bucket, objName, region, ex.toString()));
        } else if (404 == ex.getStatusCode()) {
            sb.append(String.format(
                    "Not found. Does secret file exist? -- Bucket: %s, Object: %s, Region: %s.\nError: %s",
                    bucket, objName, region, ex.toString()));
        } else {
            sb.append(String.format("Error: %s", ex.toString()));
        }
        throw new SecretException(sb.toString());
    } catch (AmazonClientException ex) {
        throw new SecretException(
                String.format("Error reading contents of S3. Bucket: %s, Object: %s, Region: %s.\nError: %s",
                        bucket, objName, region, ex.toString()));
    }
}

From source file:com.streamsets.pipeline.stage.origin.s3.S3Config.java

License:Apache License

private void validateConnection(Stage.Context context, List<Stage.ConfigIssue> issues) {
    //Access Key ID - username [unique in aws]
    //secret access key - password
    AWSCredentials credentials = new BasicAWSCredentials(accessKeyId, secretAccessKey);
    s3Client = new AmazonS3Client(credentials, new ClientConfiguration());
    s3Client.setS3ClientOptions(new S3ClientOptions().withPathStyleAccess(true));
    if (endPoint != null && !endPoint.isEmpty()) {
        s3Client.setEndpoint(endPoint);/*www .  j  a v  a2s  . co  m*/
    } else {
        s3Client.setRegion(Region.getRegion(region));
    }
    try {
        //check if the credentials are right by trying to list buckets
        s3Client.listBuckets();
    } catch (AmazonS3Exception e) {
        issues.add(context.createConfigIssue(Groups.S3.name(), "accessKeyId", Errors.S3_SPOOLDIR_20,
                e.toString()));
    }
}

From source file:com.streamsets.pipeline.stage.origin.s3.S3ConnectionSourceConfig.java

License:Apache License

private void validateConnection(Stage.Context context, String configPrefix, List<Stage.ConfigIssue> issues) {
    try {//from  w ww.  j a va2s.co m
        //check if the credentials are right by trying to list an object in the common prefix
        getS3Client().listObjects(
                new ListObjectsRequest(bucket, commonPrefix, null, delimiter, 1).withEncodingType("url"));
    } catch (AmazonS3Exception e) {
        LOG.debug(Errors.S3_SPOOLDIR_20.getMessage(), e.toString(), e);
        issues.add(context.createConfigIssue(Groups.S3.name(),
                configPrefix + S3ConnectionBaseConfig.AWS_CONFIG_PREFIX + "awsAccessKeyId",
                Errors.S3_SPOOLDIR_20, e.toString()));
    }
}

From source file:ohnosequences.ivy.S3Repository.java

License:Apache License

private boolean createBucket(String name, Region region) {
    int attemptLimit = 5;
    int timeout = 1000 * 20;
    int attempt = 0;

    while (attempt < attemptLimit) {
        try {/*from ww  w  .ja  v  a 2 s. c om*/
            attempt++;

            getS3Client().createBucket(name, region);
            if (getS3Client().doesBucketExist(name)) {
                return true;
            }

        } catch (AmazonS3Exception s3e) {
            try {
                Message.warn(s3e.toString());
                Thread.sleep(timeout);
            } catch (InterruptedException e) {
            }
        }
    }
    return false;
}

From source file:org.apache.nifi.processors.aws.s3.AbstractS3IT.java

License:Apache License

@AfterClass
public static void oneTimeTearDown() {
    // Empty the bucket before deleting it.
    try {/*w w w. j  a  v a2 s  .  com*/
        ObjectListing objectListing = client.listObjects(BUCKET_NAME);

        while (true) {
            for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) {
                client.deleteObject(BUCKET_NAME, objectSummary.getKey());
            }

            if (objectListing.isTruncated()) {
                objectListing = client.listNextBatchOfObjects(objectListing);
            } else {
                break;
            }
        }

        DeleteBucketRequest dbr = new DeleteBucketRequest(BUCKET_NAME);
        client.deleteBucket(dbr);
    } catch (final AmazonS3Exception e) {
        System.err.println("Unable to delete bucket " + BUCKET_NAME + e.toString());
    }

    if (client.doesBucketExist(BUCKET_NAME)) {
        Assert.fail("Incomplete teardown, subsequent tests might fail");
    }

}