Example usage for com.amazonaws.services.s3 AmazonS3 deleteObject

List of usage examples for com.amazonaws.services.s3 AmazonS3 deleteObject

Introduction

In this page you can find the example usage for com.amazonaws.services.s3 AmazonS3 deleteObject.

Prototype

public void deleteObject(DeleteObjectRequest deleteObjectRequest)
        throws SdkClientException, AmazonServiceException;

Source Link

Document

Deletes the specified object in the specified bucket.

Usage

From source file:org.alanwilliamson.amazon.s3.Rename.java

License:Open Source License

public cfData execute(cfSession _session, cfArgStructData argStruct) throws cfmRunTimeException {
    AmazonKey amazonKey = getAmazonKey(_session, argStruct);
    AmazonS3 s3Client = getAmazonS3(amazonKey);

    String bucket = getNamedStringParam(argStruct, "bucket", null);
    String srckey = getNamedStringParam(argStruct, "srckey", null);
    String deskey = getNamedStringParam(argStruct, "destkey", null);
    String aes256key = getNamedStringParam(argStruct, "aes256key", null);

    if (srckey != null && srckey.charAt(0) == '/')
        srckey = srckey.substring(1);/*from  ww  w .  j av a 2 s. co  m*/

    if (deskey != null && deskey.charAt(0) == '/')
        deskey = deskey.substring(1);

    CopyObjectRequest cor = new CopyObjectRequest(bucket, srckey, bucket, deskey);

    if (aes256key != null && !aes256key.isEmpty()) {
        cor.setSourceSSECustomerKey(new SSECustomerKey(aes256key));
        cor.setDestinationSSECustomerKey(new SSECustomerKey(aes256key));
    }

    try {
        s3Client.copyObject(cor);
        s3Client.deleteObject(new DeleteObjectRequest(bucket, srckey));
        return cfBooleanData.TRUE;
    } catch (Exception e) {
        throwException(_session, "AmazonS3: " + e.getMessage());
        return cfBooleanData.FALSE;
    }
}

From source file:org.apache.nifi.processors.aws.s3.DeleteS3Object.java

License:Apache License

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
    FlowFile flowFile = session.get();/*from w  ww. j  a  va  2 s  . c o  m*/
    if (flowFile == null) {
        return;
    }

    final long startNanos = System.nanoTime();

    final String bucket = context.getProperty(BUCKET).evaluateAttributeExpressions(flowFile).getValue();
    final String key = context.getProperty(KEY).evaluateAttributeExpressions(flowFile).getValue();
    final String versionId = context.getProperty(VERSION_ID).evaluateAttributeExpressions(flowFile).getValue();

    final AmazonS3 s3 = getClient();

    // Deletes a key on Amazon S3
    try {
        if (versionId == null) {
            final DeleteObjectRequest r = new DeleteObjectRequest(bucket, key);
            // This call returns success if object doesn't exist
            s3.deleteObject(r);
        } else {
            final DeleteVersionRequest r = new DeleteVersionRequest(bucket, key, versionId);
            s3.deleteVersion(r);
        }
    } catch (final AmazonServiceException ase) {
        getLogger().error("Failed to delete S3 Object for {}; routing to failure",
                new Object[] { flowFile, ase });
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
        return;
    }

    session.transfer(flowFile, REL_SUCCESS);
    final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
    getLogger().info("Successfully delete S3 Object for {} in {} millis; routing to success",
            new Object[] { flowFile, transferMillis });
}

From source file:org.crypto.sse.IEX2LevAMAZON.java

License:Open Source License

/**
 * @param args/*from w w  w  .  ja v  a2  s.  co m*/
 * @throws Exception
 */
@SuppressWarnings("null")
public static void main(String[] args) throws Exception {

    //First Job
    Configuration conf = new Configuration();

    Job job = Job.getInstance(conf, "IEX-2Lev");

    job.setJarByClass(IEX2LevAMAZON.class);

    job.setMapperClass(MLK1.class);

    job.setReducerClass(RLK1.class);

    job.setMapOutputKeyClass(Text.class);

    job.setMapOutputValueClass(Text.class);

    job.setOutputKeyClass(Text.class);

    job.setNumReduceTasks(1);

    job.setOutputValueClass(ArrayListWritable.class);

    job.setInputFormatClass(FileNameKeyInputFormat.class);

    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    //Second Job
    Configuration conf2 = new Configuration();

    Job job2 = Job.getInstance(conf2, "IEX-2Lev");

    job2.setJarByClass(IEX2LevAMAZON.class);

    job2.setMapperClass(MLK2.class);

    job2.setReducerClass(RLK2.class);

    job2.setNumReduceTasks(1);

    job2.setMapOutputKeyClass(Text.class);

    job2.setMapOutputValueClass(Text.class);

    job2.setOutputKeyClass(Text.class);

    job2.setOutputValueClass(ArrayListWritable.class);

    job2.setInputFormatClass(FileNameKeyInputFormat.class);

    FileInputFormat.addInputPath(job2, new Path(args[0]));
    FileOutputFormat.setOutputPath(job2, new Path(args[2]));

    job.waitForCompletion(true);
    job2.waitForCompletion(true);

    //Here add your Amazon Credentials

    AWSCredentials credentials = new BasicAWSCredentials("XXXXXXXXXXXXXXXX", "XXXXXXXXXXXXXXXX");
    // create a client connection based on credentials
    AmazonS3 s3client = new AmazonS3Client(credentials);

    // create bucket - name must be unique for all S3 users
    String bucketName = "iexmaptest";

    S3Object s3object = s3client.getObject(new GetObjectRequest(bucketName, args[4]));
    System.out.println(s3object.getObjectMetadata().getContentType());
    System.out.println(s3object.getObjectMetadata().getContentLength());
    List<String> lines = new ArrayList<String>();

    String folderName = "2";

    BufferedReader reader = new BufferedReader(new InputStreamReader(s3object.getObjectContent()));
    String line;
    int counter = 0;
    while ((line = reader.readLine()) != null) {
        // can copy the content locally as well
        // using a buffered writer
        lines.add(line);
        System.out.println(line);
        // upload file to folder 
        String fileName = folderName + "/" + Integer.toString(counter);
        ByteArrayInputStream input = new ByteArrayInputStream(line.getBytes());
        s3client.putObject(bucketName, fileName, input, new ObjectMetadata());
        counter++;
    }

    Multimap<String, String> lookup = ArrayListMultimap.create();

    for (int i = 0; i < lines.size(); i++) {
        String[] tokens = lines.get(i).split("\\s+");
        for (int j = 1; j < tokens.length; j++) {
            lookup.put(tokens[0], tokens[j]);
        }
    }

    // Loading inverted index that associates files identifiers to keywords
    lines = new ArrayList<String>();
    s3object = s3client.getObject(new GetObjectRequest(bucketName, args[5]));
    System.out.println(s3object.getObjectMetadata().getContentType());
    System.out.println(s3object.getObjectMetadata().getContentLength());

    // Loading inverted index that associates keywords to identifiers

    reader = new BufferedReader(new InputStreamReader(s3object.getObjectContent()));
    while ((line = reader.readLine()) != null) {
        lines.add(line);
    }
    Multimap<String, String> lookup2 = ArrayListMultimap.create();
    for (int i = 0; i < lines.size(); i++) {
        String[] tokens = lines.get(i).split("\\s+");
        for (int j = 1; j < tokens.length; j++) {
            lookup2.put(tokens[0], tokens[j]);
        }
    }

    // Delete File
    try {
        s3client.deleteObject(new DeleteObjectRequest(bucketName, args[4]));
    } catch (AmazonServiceException ase) {
        System.out.println("Caught an AmazonServiceException.");
        System.out.println("Error Message:    " + ase.getMessage());
        System.out.println("HTTP Status Code: " + ase.getStatusCode());
        System.out.println("AWS Error Code:   " + ase.getErrorCode());
        System.out.println("Error Type:       " + ase.getErrorType());
        System.out.println("Request ID:       " + ase.getRequestId());
    } catch (AmazonClientException ace) {
        System.out.println("Caught an AmazonClientException.");
        System.out.println("Error Message: " + ace.getMessage());
    }

    /*
     * Start of IEX-2Lev construction
     */

    // Generation of keys for IEX-2Lev
    BufferedReader keyRead = new BufferedReader(new InputStreamReader(System.in));
    System.out.println("Enter your password :");
    String pass = keyRead.readLine();

    // You can change the size of the key; Here we set it to 128

    List<byte[]> listSK = IEX2Lev.keyGen(128, pass, "salt/salt", 100);

    // Generation of Local Multi-maps with Mapper job only without reducer

    Configuration conf3 = new Configuration();

    String testSerialization1 = new String(Base64.encodeBase64(Serializer.serialize(lookup)));
    String testSerialization2 = new String(Base64.encodeBase64(Serializer.serialize(lookup2)));

    String testSerialization3 = new String(Base64.encodeBase64(Serializer.serialize(listSK)));

    //String testSerialization2 = gson.toJson(lookup2);
    conf3.set("lookup", testSerialization1);
    conf3.set("lookup2", testSerialization2);
    conf3.set("setKeys", testSerialization3);

    Job job3 = Job.getInstance(conf3, "Local MM");

    job3.setJarByClass(IEX2LevAMAZON.class);

    job3.setMapperClass(LocalMM.class);

    job3.setNumReduceTasks(0);

    FileInputFormat.addInputPath(job3, new Path(args[2]));
    FileOutputFormat.setOutputPath(job3, new Path(args[3]));

    job3.waitForCompletion(true);

}

From source file:squash.booking.lambdas.core.PageManager.java

License:Apache License

@Override
public void refreshAllPages(List<String> validDates, String apiGatewayBaseUrl, String revvingSuffix)
        throws Exception {

    if (!initialised) {
        throw new IllegalStateException("The page manager has not been initialised");
    }//from  w  w w .ja va 2 s  .c  om

    try {
        // Upload all bookings pages, cached booking data, famous players data,
        // and the index page to the S3 bucket. N.B. This should upload for the
        // most-future date first to ensure all links are valid during the several
        // seconds the update takes to complete.
        logger.log("About to refresh S3 website");
        logger.log("Using valid dates: " + validDates);
        logger.log("Using ApigatewayBaseUrl: " + apiGatewayBaseUrl);

        // Log time to sanity check it does occur at midnight. (_Think_ this
        // accounts for BST?). N.B. Manual executions may be at other times.
        logger.log("Current London time is: " + Calendar.getInstance().getTime().toInstant()
                .atZone(TimeZone.getTimeZone("Europe/London").toZoneId())
                .format(DateTimeFormatter.ofPattern("h:mm a")));

        ImmutablePair<ILifecycleManager.LifecycleState, Optional<String>> lifecycleState = lifecycleManager
                .getLifecycleState();

        uploadBookingsPagesToS3(validDates, apiGatewayBaseUrl, revvingSuffix, lifecycleState);
        logger.log("Uploaded new set of bookings pages to S3");

        // Save the valid dates in JSON form
        logger.log("About to create and upload cached valid dates data to S3");
        copyJsonDataToS3("NoScript/validdates", createValidDatesData(validDates));
        logger.log("Uploaded cached valid dates data to S3");

        logger.log("About to upload famous players data to S3");
        uploadFamousPlayers();
        logger.log("Uploaded famous players data to S3");

        // Remove the now-previous day's bookings page and cached data from S3.
        // (If this page does not exist then this is a no-op.)
        String yesterdaysDate = getCurrentLocalDate().minusDays(1)
                .format(DateTimeFormatter.ofPattern("yyyy-MM-dd"));
        logger.log("About to remove yesterday's booking page and cached data from S3 bucket: "
                + websiteBucketName + " and key: " + yesterdaysDate + ".html");
        IS3TransferManager transferManager = getS3TransferManager();
        DeleteObjectRequest deleteObjectRequest = new DeleteObjectRequest(websiteBucketName,
                yesterdaysDate + ".html");
        AmazonS3 client = transferManager.getAmazonS3Client();
        client.deleteObject(deleteObjectRequest);
        deleteObjectRequest = new DeleteObjectRequest(websiteBucketName, yesterdaysDate + ".json");
        client.deleteObject(deleteObjectRequest);
        logger.log("Removed yesterday's booking page and cached data successfully from S3");
    } catch (Exception exception) {
        logger.log("Exception caught while refreshing S3 booking pages - so notifying sns topic");
        getSNSClient().publish(adminSnsTopicArn,
                "Apologies - but there was an error refreshing the booking pages in S3. Please refresh the pages manually instead from the Lambda console. The error message was: "
                        + exception.getMessage(),
                "Sqawsh booking pages in S3 failed to refresh");
        // Rethrow
        throw exception;
    }
}