Example usage for com.amazonaws.services.s3 AmazonS3Client AmazonS3Client

List of usage examples for com.amazonaws.services.s3 AmazonS3Client AmazonS3Client

Introduction

In this page you can find the example usage for com.amazonaws.services.s3 AmazonS3Client AmazonS3Client.

Prototype

@SdkInternalApi
AmazonS3Client(AmazonS3ClientParams s3ClientParams) 

Source Link

Document

Constructs a new client to invoke service methods on S3 using the specified parameters.

Usage

From source file:br.unb.cic.bionimbuz.storage.bucket.methods.CloudMethodsAmazonGoogle.java

@Override
public void StorageAuth(StorageProvider sp) throws Exception {

    switch (sp) {

    case AMAZON: {

        byte[] encoded = Files.readAllBytes(Paths.get(authFolder + "accesskey.txt"));
        String fileContent = new String(encoded, Charset.defaultCharset());
        //System.out.println("AuthString: " + fileContent);
        String accessKeyID, accessKey;
        int delimiter = fileContent.indexOf(':');
        accessKeyID = fileContent.substring(0, delimiter);
        accessKey = fileContent.substring(delimiter + 1);
        AWSCredentials credentials = new BasicAWSCredentials(accessKeyID, accessKey);
        s3client = new AmazonS3Client(credentials);

        break;/*from w  w  w. ja v  a  2 s .com*/
    }
    case GOOGLE: {

        String command = gcloudFolder + "gcloud auth activate-service-account --key-file=" + authFolder
                + "cred.json";
        ExecCommand(command);

        break;
    }
    default: {
        throw new Exception("Provedor incorreto!");
    }
    }
}

From source file:c3.ops.priam.aws.S3FileSystem.java

License:Apache License

@Inject
public S3FileSystem(Provider<AbstractBackupPath> pathProvider, ICompression compress,
        final IConfiguration config, ICredential cred) {
    this.pathProvider = pathProvider;
    this.compress = compress;
    this.config = config;
    int threads = config.getMaxBackupUploadThreads();
    LinkedBlockingQueue<Runnable> queue = new LinkedBlockingQueue<Runnable>(threads);
    this.executor = new BlockingSubmitThreadPoolExecutor(threads, queue, UPLOAD_TIMEOUT);
    double throttleLimit = config.getUploadThrottle();
    rateLimiter = RateLimiter.create(throttleLimit < 1 ? Double.MAX_VALUE : throttleLimit);

    MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
    String mbeanName = MBEAN_NAME;
    try {/*from w ww  .j ava2s . c  o m*/
        mbs.registerMBean(this, new ObjectName(mbeanName));
    } catch (Exception e) {
        logger.warn("Fail to register " + mbeanName);
        //throw new RuntimeException(e);
    }

    s3Client = new AmazonS3Client(cred.getAwsCredentialProvider());
    s3Client.setEndpoint(getS3Endpoint());
}

From source file:ca.pgon.amazons3masscontenttype.App.java

License:Apache License

public static void main(String[] args) {

    if (args.length != 4) {
        System.out.println("Usage: AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY BUCKET_NAME CONTENT_TYPE");
        return;/*from  w  w  w.  ja  va2s. co m*/
    }

    // Get the parameters
    int i = 0;
    awsAccessKeyId = args[i++];
    awsSecretAccessKey = args[i++];
    bucketName = args[i++];
    contentType = args[i++];

    // Prepare service
    AWSCredentials awsCredentials = new BasicAWSCredentials(awsAccessKeyId, awsSecretAccessKey);
    amazonS3Client = new AmazonS3Client(awsCredentials);

    // Go through the first page
    ObjectListing objectListing = amazonS3Client.listObjects(bucketName);
    process(objectListing);
    // Go through the other pages
    while (objectListing.isTruncated()) {
        amazonS3Client.listNextBatchOfObjects(objectListing);
        process(objectListing);
    }

    System.out.println();
    System.out.println("Processed " + count + " files");
}

From source file:ch.entwine.weblounge.maven.S3DeployMojo.java

License:Open Source License

/**
 * //  www  .  j  a  v  a 2  s  .co m
 * {@inheritDoc}
 * 
 * @see org.apache.maven.plugin.Mojo#execute()
 */
public void execute() throws MojoExecutionException, MojoFailureException {

    // Setup AWS S3 client
    AWSCredentials credentials = new BasicAWSCredentials(awsAccessKey, awsSecretKey);
    AmazonS3Client uploadClient = new AmazonS3Client(credentials);
    TransferManager transfers = new TransferManager(credentials);

    // Make sure key prefix does not start with a slash but has one at the
    // end
    if (keyPrefix.startsWith("/"))
        keyPrefix = keyPrefix.substring(1);
    if (!keyPrefix.endsWith("/"))
        keyPrefix = keyPrefix + "/";

    // Keep track of how much data has been transferred
    long totalBytesTransferred = 0L;
    int items = 0;
    Queue<Upload> uploads = new LinkedBlockingQueue<Upload>();

    try {
        // Check if S3 bucket exists
        getLog().debug("Checking whether bucket " + bucket + " exists");
        if (!uploadClient.doesBucketExist(bucket)) {
            getLog().error("Desired bucket '" + bucket + "' does not exist!");
            return;
        }

        getLog().debug("Collecting files to transfer from " + resources.getDirectory());
        List<File> res = getResources();
        for (File file : res) {
            // Make path of resource relative to resources directory
            String filename = file.getName();
            String extension = FilenameUtils.getExtension(filename);
            String path = file.getPath().substring(resources.getDirectory().length());
            String key = concat("/", keyPrefix, path).substring(1);

            // Delete old file version in bucket
            getLog().debug("Removing existing object at " + key);
            uploadClient.deleteObject(bucket, key);

            // Setup meta data
            ObjectMetadata meta = new ObjectMetadata();
            meta.setCacheControl("public, max-age=" + String.valueOf(valid * 3600));

            FileInputStream fis = null;
            GZIPOutputStream gzipos = null;
            final File fileToUpload;

            if (gzip && ("js".equals(extension) || "css".equals(extension))) {
                try {
                    fis = new FileInputStream(file);
                    File gzFile = File.createTempFile(file.getName(), null);
                    gzipos = new GZIPOutputStream(new FileOutputStream(gzFile));
                    IOUtils.copy(fis, gzipos);
                    fileToUpload = gzFile;
                    meta.setContentEncoding("gzip");
                    if ("js".equals(extension))
                        meta.setContentType("text/javascript");
                    if ("css".equals(extension))
                        meta.setContentType("text/css");
                } catch (FileNotFoundException e) {
                    getLog().error(e);
                    continue;
                } catch (IOException e) {
                    getLog().error(e);
                    continue;
                } finally {
                    IOUtils.closeQuietly(fis);
                    IOUtils.closeQuietly(gzipos);
                }
            } else {
                fileToUpload = file;
            }

            // Do a random check for existing errors before starting the next upload
            if (erroneousUpload != null)
                break;

            // Create put object request
            long bytesToTransfer = fileToUpload.length();
            totalBytesTransferred += bytesToTransfer;
            PutObjectRequest request = new PutObjectRequest(bucket, key, fileToUpload);
            request.setProgressListener(new UploadListener(credentials, bucket, key, bytesToTransfer));
            request.setMetadata(meta);

            // Schedule put object request
            getLog().info(
                    "Uploading " + key + " (" + FileUtils.byteCountToDisplaySize((int) bytesToTransfer) + ")");
            Upload upload = transfers.upload(request);
            uploads.add(upload);
            items++;
        }
    } catch (AmazonServiceException e) {
        getLog().error("Uploading resources failed: " + e.getMessage());
    } catch (AmazonClientException e) {
        getLog().error("Uploading resources failed: " + e.getMessage());
    }

    // Wait for uploads to be finished
    String currentUpload = null;
    try {
        Thread.sleep(1000);
        getLog().info("Waiting for " + uploads.size() + " uploads to finish...");
        while (!uploads.isEmpty()) {
            Upload upload = uploads.poll();
            currentUpload = upload.getDescription().substring("Uploading to ".length());
            if (TransferState.InProgress.equals(upload.getState()))
                getLog().debug("Waiting for upload " + currentUpload + " to finish");
            upload.waitForUploadResult();
        }
    } catch (AmazonServiceException e) {
        throw new MojoExecutionException("Error while uploading " + currentUpload);
    } catch (AmazonClientException e) {
        throw new MojoExecutionException("Error while uploading " + currentUpload);
    } catch (InterruptedException e) {
        getLog().debug("Interrupted while waiting for upload to finish");
    }

    // Check for errors that happened outside of the actual uploading
    if (erroneousUpload != null) {
        throw new MojoExecutionException("Error while uploading " + erroneousUpload);
    }

    getLog().info("Deployed " + items + " files ("
            + FileUtils.byteCountToDisplaySize((int) totalBytesTransferred) + ") to s3://" + bucket);
}

From source file:cloudtrailviewer.components.S3FileChooserDialog.java

License:Open Source License

private void reloadContents() {

    List<String> tmp = new ArrayList<String>();
    this.files.setAll(tmp);

    String bucketName = PropertiesSingleton.getInstance().getProperty("Bucket");

    ListObjectsRequest listObjectsRequest = new ListObjectsRequest();
    listObjectsRequest.setBucketName(bucketName);
    listObjectsRequest.setPrefix(prefix);
    listObjectsRequest.setDelimiter("/");

    AWSCredentials credentials = new BasicAWSCredentials(PropertiesSingleton.getInstance().getProperty("Key"),
            PropertiesSingleton.getInstance().getProperty("Secret"));

    AmazonS3 s3Client = new AmazonS3Client(credentials);

    ObjectListing objectListing = s3Client.listObjects(listObjectsRequest);

    // these are directories
    List<String> directories = objectListing.getCommonPrefixes();
    for (String directory : directories) {

        tmp.add(stripPrefix(directory));
    }//from ww w .  j  av a  2s. com

    // these are files
    List<S3ObjectSummary> objectSummaries = objectListing.getObjectSummaries();
    for (final S3ObjectSummary objectSummary : objectSummaries) {

        tmp.add(stripPrefix(objectSummary.getKey()));
    }

    this.files.setAll(tmp);
}

From source file:cloudtrailviewer.events.EventLoader.java

License:Open Source License

private void readS3File(String key) throws IOException {

    AWSCredentials credentials = new BasicAWSCredentials(PropertiesSingleton.getInstance().getProperty("Key"),
            PropertiesSingleton.getInstance().getProperty("Secret"));

    AmazonS3 s3Client = new AmazonS3Client(credentials);
    String bucketName = PropertiesSingleton.getInstance().getProperty("Bucket");

    S3Object s3Object = s3Client.getObject(new GetObjectRequest(bucketName, key));

    GZIPInputStream gzis = new GZIPInputStream(s3Object.getObjectContent());
    BufferedReader bf = new BufferedReader(new InputStreamReader(gzis, "UTF-8"));

    String outStr = "";
    String line;/*from ww  w.j  ava2s .  c o  m*/
    while ((line = bf.readLine()) != null) {
        outStr += line;
    }
    bf.close();
    gzis.close();

    readLogEvents(outStr);
}

From source file:com.ad.mediasharing.awsclientmanager.AmazonClientManager.java

License:Open Source License

private void initClients() {
    AWSCredentials credentials = AmazonSharedPreferencesWrapper
            .getCredentialsFromSharedPreferences(this.sharedPreferences);

    Region region = Region.getRegion(Regions.US_WEST_2);

    s3Client = new AmazonS3Client(credentials);
    s3Client.setRegion(region);/* w w  w. java2 s .c om*/
}

From source file:com.adobe.acs.commons.mcp.impl.processes.asset.S3AssetIngestor.java

License:Apache License

@Override
public void buildProcess(ProcessInstance instance, ResourceResolver rr)
        throws LoginException, RepositoryException {
    if (StringUtils.isNotBlank(s3BasePath) && !s3BasePath.endsWith("/")) {
        s3BasePath = s3BasePath + "/";
    }/*  w  w  w .java2 s .  co m*/
    instance.getInfo().setDescription(baseItemName + "->" + jcrBasePath);
    instance.defineCriticalAction("Create Folders", rr, this::createFolders);
    instance.defineCriticalAction("Import Assets", rr, this::importAssets);
    s3Client = new AmazonS3Client(new BasicAWSCredentials(accessKey, secretKey));
    if (StringUtils.isNotBlank(endpointUrl)) {
        s3Client.setEndpoint(endpointUrl);
    }
}

From source file:com.aegeus.aws.SimpleStorageService.java

License:Apache License

public SimpleStorageService(S3ConfigObject config) {
    this.config = config;

    s3 = new AmazonS3Client(new BasicAWSCredentials(config.getAccessKey(), config.getSecretKey()));
    s3.setRegion(Region.getRegion(Regions.fromName(config.getRegion())));
}

From source file:com.ALC.SC2BOAserver.aws.S3StorageManager.java

License:Open Source License

/**
 * Returns a new AmazonS3 client using the default endpoint and current
 * credentials.// w  w  w . ja  v a2s .  c  om
 */
public static AmazonS3Client createClient() {
    AWSCredentials creds = new BasicAWSCredentials(getKey(), getSecret());
    return new AmazonS3Client(creds);
}