List of usage examples for com.amazonaws.services.s3 AmazonS3 putObject
public PutObjectResult putObject(String bucketName, String key, InputStream input, ObjectMetadata metadata) throws SdkClientException, AmazonServiceException;
Uploads the specified input stream and object metadata to Amazon S3 under the specified bucket and key name.
From source file:ingest.utility.IngestUtilities.java
License:Apache License
/** * Will copy external AWS S3 file to piazza S3 Bucket * //from ww w.j a va2 s. c om * @param dataResource * @param host * if piazza should host the data */ public void copyS3Source(DataResource dataResource) throws InvalidInputException, IOException { logger.log(String.format("Copying Data %s to Piazza S3 Location.", dataResource.getDataId()), Severity.INFORMATIONAL, new AuditElement(INGEST, "copyS3DataToPiazza", dataResource.getDataId())); // Obtain file input stream FileLocation fileLocation = ((FileRepresentation) dataResource.getDataType()).getLocation(); FileAccessFactory fileFactory = getFileFactoryForDataResource(dataResource); InputStream inputStream = fileFactory.getFile(fileLocation); // Write stream directly into the Piazza S3 bucket AmazonS3 s3Client = getAwsClient(USE_KMS.booleanValue()); ObjectMetadata metadata = new ObjectMetadata(); String fileKey = String.format("%s-%s", dataResource.getDataId(), fileLocation.getFileName()); s3Client.putObject(AMAZONS3_BUCKET_NAME, fileKey, inputStream, metadata); // Clean up inputStream.close(); }
From source file:org.crypto.sse.IEX2LevAMAZON.java
License:Open Source License
/** * @param args// w w w.j a va 2 s . co m * @throws Exception */ @SuppressWarnings("null") public static void main(String[] args) throws Exception { //First Job Configuration conf = new Configuration(); Job job = Job.getInstance(conf, "IEX-2Lev"); job.setJarByClass(IEX2LevAMAZON.class); job.setMapperClass(MLK1.class); job.setReducerClass(RLK1.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(Text.class); job.setOutputKeyClass(Text.class); job.setNumReduceTasks(1); job.setOutputValueClass(ArrayListWritable.class); job.setInputFormatClass(FileNameKeyInputFormat.class); FileInputFormat.addInputPath(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); //Second Job Configuration conf2 = new Configuration(); Job job2 = Job.getInstance(conf2, "IEX-2Lev"); job2.setJarByClass(IEX2LevAMAZON.class); job2.setMapperClass(MLK2.class); job2.setReducerClass(RLK2.class); job2.setNumReduceTasks(1); job2.setMapOutputKeyClass(Text.class); job2.setMapOutputValueClass(Text.class); job2.setOutputKeyClass(Text.class); job2.setOutputValueClass(ArrayListWritable.class); job2.setInputFormatClass(FileNameKeyInputFormat.class); FileInputFormat.addInputPath(job2, new Path(args[0])); FileOutputFormat.setOutputPath(job2, new Path(args[2])); job.waitForCompletion(true); job2.waitForCompletion(true); //Here add your Amazon Credentials AWSCredentials credentials = new BasicAWSCredentials("XXXXXXXXXXXXXXXX", "XXXXXXXXXXXXXXXX"); // create a client connection based on credentials AmazonS3 s3client = new AmazonS3Client(credentials); // create bucket - name must be unique for all S3 users String bucketName = "iexmaptest"; S3Object s3object = s3client.getObject(new GetObjectRequest(bucketName, args[4])); System.out.println(s3object.getObjectMetadata().getContentType()); System.out.println(s3object.getObjectMetadata().getContentLength()); List<String> lines = new ArrayList<String>(); String folderName = "2"; BufferedReader reader = new BufferedReader(new InputStreamReader(s3object.getObjectContent())); String line; int counter = 0; while ((line = reader.readLine()) != null) { // can copy the content locally as well // using a buffered writer lines.add(line); System.out.println(line); // upload file to folder String fileName = folderName + "/" + Integer.toString(counter); ByteArrayInputStream input = new ByteArrayInputStream(line.getBytes()); s3client.putObject(bucketName, fileName, input, new ObjectMetadata()); counter++; } Multimap<String, String> lookup = ArrayListMultimap.create(); for (int i = 0; i < lines.size(); i++) { String[] tokens = lines.get(i).split("\\s+"); for (int j = 1; j < tokens.length; j++) { lookup.put(tokens[0], tokens[j]); } } // Loading inverted index that associates files identifiers to keywords lines = new ArrayList<String>(); s3object = s3client.getObject(new GetObjectRequest(bucketName, args[5])); System.out.println(s3object.getObjectMetadata().getContentType()); System.out.println(s3object.getObjectMetadata().getContentLength()); // Loading inverted index that associates keywords to identifiers reader = new BufferedReader(new InputStreamReader(s3object.getObjectContent())); while ((line = reader.readLine()) != null) { lines.add(line); } Multimap<String, String> lookup2 = ArrayListMultimap.create(); for (int i = 0; i < lines.size(); i++) { String[] tokens = lines.get(i).split("\\s+"); for (int j = 1; j < tokens.length; j++) { lookup2.put(tokens[0], tokens[j]); } } // Delete File try { s3client.deleteObject(new DeleteObjectRequest(bucketName, args[4])); } catch (AmazonServiceException ase) { System.out.println("Caught an AmazonServiceException."); System.out.println("Error Message: " + ase.getMessage()); System.out.println("HTTP Status Code: " + ase.getStatusCode()); System.out.println("AWS Error Code: " + ase.getErrorCode()); System.out.println("Error Type: " + ase.getErrorType()); System.out.println("Request ID: " + ase.getRequestId()); } catch (AmazonClientException ace) { System.out.println("Caught an AmazonClientException."); System.out.println("Error Message: " + ace.getMessage()); } /* * Start of IEX-2Lev construction */ // Generation of keys for IEX-2Lev BufferedReader keyRead = new BufferedReader(new InputStreamReader(System.in)); System.out.println("Enter your password :"); String pass = keyRead.readLine(); // You can change the size of the key; Here we set it to 128 List<byte[]> listSK = IEX2Lev.keyGen(128, pass, "salt/salt", 100); // Generation of Local Multi-maps with Mapper job only without reducer Configuration conf3 = new Configuration(); String testSerialization1 = new String(Base64.encodeBase64(Serializer.serialize(lookup))); String testSerialization2 = new String(Base64.encodeBase64(Serializer.serialize(lookup2))); String testSerialization3 = new String(Base64.encodeBase64(Serializer.serialize(listSK))); //String testSerialization2 = gson.toJson(lookup2); conf3.set("lookup", testSerialization1); conf3.set("lookup2", testSerialization2); conf3.set("setKeys", testSerialization3); Job job3 = Job.getInstance(conf3, "Local MM"); job3.setJarByClass(IEX2LevAMAZON.class); job3.setMapperClass(LocalMM.class); job3.setNumReduceTasks(0); FileInputFormat.addInputPath(job3, new Path(args[2])); FileOutputFormat.setOutputPath(job3, new Path(args[3])); job3.waitForCompletion(true); }
From source file:org.deeplearning4j.aws.s3.uploader.S3Uploader.java
License:Apache License
public void upload(InputStream is, String name, String bucketName) { AmazonS3 client = getClient(); bucketName = ensureValidBucketName(bucketName); List<Bucket> buckets = client.listBuckets(); ObjectMetadata med = new ObjectMetadata(); for (Bucket b : buckets) if (b.getName().equals(bucketName)) { client.putObject(bucketName, name, is, med); return; }/*from ww w .j a v a 2s . com*/ //bucket didn't exist: createComplex it client.createBucket(bucketName); client.putObject(bucketName, name, is, med); }
From source file:pl.worker.Main.java
public static void putFile(BufferedImage image, String file) throws IOException { AmazonS3 s3Client = new AmazonS3Client(); ByteArrayOutputStream os = new ByteArrayOutputStream(); ImageIO.write(image, "gif", os); InputStream is = new ByteArrayInputStream(os.toByteArray()); s3Client.putObject("lab4-weeia", "agnieszka.leszczynska/" + generateNewNameOfFile(file), is, null); }