List of usage examples for com.amazonaws.services.s3.model GetObjectRequest GetObjectRequest
public GetObjectRequest(String bucketName, String key)
From source file:org.onebusaway.admin.service.impl.S3FileServiceImpl.java
License:Apache License
@Override /**/*from w ww. j a v a 2 s . c o m*/ * Retrieve the specified key from S3 and store in the given directory. */ public String get(String key, String tmpDir) { _log.debug("get(" + key + ", " + tmpDir + ")"); NYCFileUtils fs = new NYCFileUtils(); String filename = fs.parseFileName(key); _log.debug("filename=" + filename); GetObjectRequest request = new GetObjectRequest(this._bucketName, key); S3Object file = _s3.getObject(request); String pathAndFileName = tmpDir + File.separator + filename; fs.copy(file.getObjectContent(), pathAndFileName); return pathAndFileName; }
From source file:org.onebusaway.admin.service.impl.S3FileServiceImpl.java
License:Apache License
public InputStream get(String key) { GetObjectRequest request = new GetObjectRequest(this._bucketName, key); S3Object file = _s3.getObject(request); return file.getObjectContent(); }
From source file:org.opencb.opencga.core.data.source.S3Source.java
License:Apache License
public InputStream getInputStream(String path) { String ak = "AKIAI3BZQ2VG6GPWQBVA"; String sk = "oDDIv+OAQeQVj9sy1CcWeeJsOMAhbh9KIpJ7hiDK"; String bucket = "nacho-s3"; AWSCredentials myCredentials = new BasicAWSCredentials(ak, sk); AmazonS3Client s3Client = new AmazonS3Client(myCredentials); S3Object object = s3Client.getObject(new GetObjectRequest(bucket, path)); return object.getObjectContent(); }
From source file:org.opendedup.sdfs.filestore.cloud.BatchAwsS3ChunkStore.java
License:Open Source License
public byte[] getBytes(long id, int from, int to) throws IOException, DataArchivedException { // SDFSLogger.getLog().info("Downloading " + id); // SDFSLogger.getLog().info("Current readers :" + rr.incrementAndGet()); String haName = EncyptUtils.encHashArchiveName(id, Main.chunkStoreEncryptionEnabled); this.s3clientLock.readLock().lock(); S3Object sobj = null;// w w w . java 2 s . c o m byte[] data = null; // int ol = 0; try { long tm = System.currentTimeMillis(); // ObjectMetadata omd = s3Service.getObjectMetadata(this.name, // "blocks/" + haName); // Map<String, String> mp = this.getUserMetaData(omd); // ol = Integer.parseInt(mp.get("compressedsize")); // if (ol <= to) { // to = ol; // SDFSLogger.getLog().info("change to=" + to); // } int cl = (int) to - from; GetObjectRequest gr = new GetObjectRequest(this.name, "blocks/" + haName); gr.setRange(from, to); sobj = s3Service.getObject(gr); InputStream in = sobj.getObjectContent(); data = new byte[cl]; IOUtils.readFully(in, data); IOUtils.closeQuietly(in); double dtm = (System.currentTimeMillis() - tm) / 1000d; double bps = (cl / 1024) / dtm; SDFSLogger.getLog().debug("read [" + id + "] at " + bps + " kbps"); // mp = this.getUserMetaData(omd); /* * try { mp.put("lastaccessed", * Long.toString(System.currentTimeMillis())); * omd.setUserMetadata(mp); CopyObjectRequest req = new * CopyObjectRequest(this.name, "blocks/" + haName, this.name, * "blocks/" + haName) .withNewObjectMetadata(omd); * s3Service.copyObject(req); } catch (Exception e) { * SDFSLogger.getLog().debug("error setting last accessed", e); } */ /* * if (mp.containsKey("deleted")) { boolean del = * Boolean.parseBoolean((String) mp.get("deleted")); if (del) { * S3Object kobj = s3Service.getObject(this.name, "keys/" + haName); * * int claims = this.getClaimedObjects(kobj, id); * * int delobj = 0; if (mp.containsKey("deleted-objects")) { delobj = * Integer.parseInt((String) mp .get("deleted-objects")) - claims; * if (delobj < 0) delobj = 0; } mp.remove("deleted"); * mp.put("deleted-objects", Integer.toString(delobj)); * mp.put("suspect", "true"); omd.setUserMetadata(mp); * CopyObjectRequest req = new CopyObjectRequest(this.name, "keys/" * + haName, this.name, "keys/" + haName) * .withNewObjectMetadata(omd); s3Service.copyObject(req); int _size * = Integer.parseInt((String) mp.get("size")); int _compressedSize * = Integer.parseInt((String) mp .get("compressedsize")); * HashBlobArchive.currentLength.addAndGet(_size); * HashBlobArchive.compressedLength.addAndGet(_compressedSize); * SDFSLogger.getLog().warn( "Reclaimed [" + claims + * "] blocks marked for deletion"); kobj.close(); } } */ dtm = (System.currentTimeMillis() - tm) / 1000d; bps = (cl / 1024) / dtm; } catch (AmazonS3Exception e) { if (e.getErrorCode().equalsIgnoreCase("InvalidObjectState")) throw new DataArchivedException(id, null); else { SDFSLogger.getLog().error( "unable to get block [" + id + "] at [blocks/" + haName + "] pos " + from + " to " + to, e); throw e; } } catch (Exception e) { throw new IOException(e); } finally { try { if (sobj != null) { sobj.close(); } } catch (Exception e) { } this.s3clientLock.readLock().unlock(); } return data; }
From source file:org.p365.S3Sample.java
License:Open Source License
public static void main(String[] args) throws IOException { /*/*from w ww . j a v a 2s . c o m*/ * This credentials provider implementation loads your AWS credentials * from a properties file at the root of your classpath. * * Important: Be sure to fill in your AWS access credentials in the * AwsCredentials.properties file before you try to run this * sample. * http://aws.amazon.com/security-credentials */ AmazonS3 s3 = new AmazonS3Client(new ClasspathPropertiesFileCredentialsProvider()); Region usWest2 = Region.getRegion(Regions.US_WEST_2); s3.setRegion(usWest2); String bucketName = "mynewbuket"; String key = "Myobj/sd.jpg"; System.out.println("==========================================="); System.out.println("Getting Started with Amazon S3"); System.out.println("===========================================\n"); try { /* * Create a new S3 bucket - Amazon S3 bucket names are globally unique, * so once a bucket name has been taken by any user, you can't create * another bucket with that same name. * * You can optionally specify a location for your bucket if you want to * keep your data closer to your applications or users. */ System.out.println("Creating bucket " + bucketName + "\n"); if (!s3.doesBucketExist(bucketName)) { s3.createBucket(bucketName); } /* * List the buckets in your account */ System.out.println("Listing buckets"); for (Bucket bucket : s3.listBuckets()) { System.out.println(" - " + bucket.getName()); } System.out.println(); /* * Upload an object to your bucket - You can easily upload a file to * S3, or upload directly an InputStream if you know the length of * the data in the stream. You can also specify your own metadata * when uploading to S3, which allows you set a variety of options * like content-type and content-encoding, plus additional metadata * specific to your applications. */ System.out.println("Uploading a new object to S3 from a file\n"); String pathname = "D:\\Program Files\\apache-tomcat-7.0.42\\webapps\\WorkerForP365\\src\\AAA_1465.jpg"; File file = new File(pathname); s3.putObject( new PutObjectRequest(bucketName, key, file).withCannedAcl(CannedAccessControlList.PublicRead)); /* * Download an object - When you download an object, you get all of * the object's metadata and a stream from which to read the contents. * It's important to read the contents of the stream as quickly as * possibly since the data is streamed directly from Amazon S3 and your * network connection will remain open until you read all the data or * close the input stream. * * GetObjectRequest also supports several other options, including * conditional downloading of objects based on modification times, * ETags, and selectively downloading a range of an object. */ System.out.println("Downloading an object"); S3Object object = s3.getObject(new GetObjectRequest(bucketName, key)); System.out.println("Content-Type: " + object.getObjectMetadata().getContentType()); displayTextInputStream(object.getObjectContent()); /* * List objects in your bucket by prefix - There are many options for * listing the objects in your bucket. Keep in mind that buckets with * many objects might truncate their results when listing their objects, * so be sure to check if the returned object listing is truncated, and * use the AmazonS3.listNextBatchOfObjects(...) operation to retrieve * additional results. */ System.out.println("Listing objects"); ObjectListing objectListing = s3 .listObjects(new ListObjectsRequest().withBucketName(bucketName).withPrefix("My")); for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) { System.out.println( " - " + objectSummary.getKey() + " " + "(size = " + objectSummary.getSize() + ")"); } System.out.println(); /* * Delete an object - Unless versioning has been turned on for your bucket, * there is no way to undelete an object, so use caution when deleting objects. */ //System.out.println("Deleting an object\n"); //s3.deleteObject(bucketName, key); /* * Delete a bucket - A bucket must be completely empty before it can be * deleted, so remember to delete any objects from your buckets before * you try to delete them. */ //System.out.println("Deleting bucket " + bucketName + "\n"); //s3.deleteBucket(bucketName); } catch (AmazonServiceException ase) { System.out.println("Caught an AmazonServiceException, which means your request made it " + "to Amazon S3, but was rejected with an error response for some reason."); System.out.println("Error Message: " + ase.getMessage()); System.out.println("HTTP Status Code: " + ase.getStatusCode()); System.out.println("AWS Error Code: " + ase.getErrorCode()); System.out.println("Error Type: " + ase.getErrorType()); System.out.println("Request ID: " + ase.getRequestId()); } catch (AmazonClientException ace) { System.out.println("Caught an AmazonClientException, which means the client encountered " + "a serious internal problem while trying to communicate with S3, " + "such as not being able to access the network."); System.out.println("Error Message: " + ace.getMessage()); } }
From source file:org.pathirage.ceph.bench.ReadThroughputBench.java
License:Apache License
private void downloadVolumes(List<String> volumes) { for (String volume : volumes) { pool.submit(() -> {//w w w. j a v a2s.c o m // When saved to a file, we assume that node running the benchmark has better write throughput than read throughput of Ceph getS3Connection().getObject(new GetObjectRequest(VOL_BUCKET, volume), Paths.get(parentDir.toAbsolutePath().toString(), volume).toFile()); doneSignal.countDown(); }); } }
From source file:org.pentaho.di.trans.steps.s3csvinput.S3ObjectsProvider.java
License:Apache License
/** * Returns an object representing the details and data of an item in S3. * * @param bucket/*from w w w. j ava 2 s .c om*/ * the bucket containing the object. * @param objectKey * the key identifying the object. * @param byteRangeStart * include only a portion of the object's data - starting at this point * @param byteRangeEnd * include only a portion of the object's data - ending at this point * @return the object with the given key in S3, including details and data * @throws SdkClientException */ public S3Object getS3Object(Bucket bucket, String objectKey, Long byteRangeStart, Long byteRangeEnd) throws SdkClientException { if (byteRangeStart != null && byteRangeEnd != null) { GetObjectRequest rangeObjectRequest = new GetObjectRequest(bucket.getName(), objectKey) .withRange(byteRangeStart, byteRangeEnd); return s3Client.getObject(rangeObjectRequest); } else { return s3Client.getObject(bucket.getName(), objectKey); } }
From source file:org.pieShare.pieDrive.adapter.s3.S3Adapter.java
@Override public void download(PieDriveFile file, OutputStream stream) throws AdaptorException { byte[] buf = new byte[1024]; int count = 0; S3Object object = s3Auth.getClient().getObject(new GetObjectRequest(bucketName, file.getUuid())); InputStream objectData = object.getObjectContent(); try {//w w w . j a va 2 s.c o m while ((count = objectData.read(buf)) != -1) { if (Thread.interrupted()) { throw new AdaptorException("Download interrupted."); } stream.write(buf, 0, count); } stream.close(); objectData.close(); PieLogger.trace(S3Adapter.class, "{} downloaded", file.getUuid()); } catch (IOException e) { throw new AdaptorException(e); } catch (AmazonServiceException ase) { throw new AdaptorException(ase); } catch (AmazonClientException ace) { throw new AdaptorException(ace); } }
From source file:org.rdswitchboard.importers.browser.s3.App.java
License:Open Source License
public static void main(String[] args) { try {// ww w.j a va2 s . c o m if (args.length == 0 || StringUtils.isNullOrEmpty(args[0])) throw new Exception("Please provide properties file"); String propertiesFile = args[0]; Properties properties = new Properties(); try (InputStream in = new FileInputStream(propertiesFile)) { properties.load(in); } String source = properties.getProperty("data.source.id"); if (StringUtils.isNullOrEmpty(source)) throw new IllegalArgumentException("Source can not be empty"); System.out.println("Source: " + source); String baseUrl = properties.getProperty("base.url"); if (StringUtils.isNullOrEmpty(baseUrl)) throw new IllegalArgumentException("Base URL can not be empty"); System.out.println("Base URL: " + baseUrl); String sessionId = properties.getProperty("session.id"); if (StringUtils.isNullOrEmpty(sessionId)) throw new IllegalArgumentException("Session Id can not be empty"); System.out.println("Session Id: " + sessionId); String accessKey = properties.getProperty("aws.access.key"); String secretKey = properties.getProperty("aws.secret.key"); String bucket = properties.getProperty("s3.bucket"); if (StringUtils.isNullOrEmpty(bucket)) throw new IllegalArgumentException("AWS S3 Bucket can not be empty"); System.out.println("S3 Bucket: " + bucket); String prefix = properties.getProperty("s3.prefix"); if (StringUtils.isNullOrEmpty(prefix)) throw new IllegalArgumentException("AWS S3 Prefix can not be empty"); System.out.println("S3 Prefix: " + prefix); String crosswalk = properties.getProperty("crosswalk"); Templates template = null; if (!StringUtils.isNullOrEmpty(crosswalk)) { System.out.println("Crosswalk: " + crosswalk); template = TransformerFactory.newInstance() .newTemplates(new StreamSource(new FileInputStream(crosswalk))); } ObjectMapper mapper = new ObjectMapper(); Client client = Client.create(); Cookie cookie = new Cookie("PHPSESSID", properties.getProperty("session")); AmazonS3 s3client; if (!StringUtils.isNullOrEmpty(accessKey) && !StringUtils.isNullOrEmpty(secretKey)) { System.out.println( "Connecting to AWS via Access and Secret Keys. This is not safe practice, consider to use IAM Role instead."); AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey); s3client = new AmazonS3Client(awsCredentials); } else { System.out.println("Connecting to AWS via Instance Profile Credentials"); s3client = new AmazonS3Client(new InstanceProfileCredentialsProvider()); } //String file = "rda/rif/class:collection/54800.xml"; ListObjectsRequest listObjectsRequest; ObjectListing objectListing; String file = prefix + "/latest.txt"; S3Object object = s3client.getObject(new GetObjectRequest(bucket, file)); String latest; try (InputStream txt = object.getObjectContent()) { latest = prefix + "/" + IOUtils.toString(txt, StandardCharsets.UTF_8).trim() + "/"; } System.out.println("S3 Repository: " + latest); listObjectsRequest = new ListObjectsRequest().withBucketName(bucket).withPrefix(latest); do { objectListing = s3client.listObjects(listObjectsRequest); for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) { file = objectSummary.getKey(); System.out.println("Processing file: " + file); object = s3client.getObject(new GetObjectRequest(bucket, file)); String xml = null; if (null != template) { Source reader = new StreamSource(object.getObjectContent()); StringWriter writer = new StringWriter(); Transformer transformer = template.newTransformer(); transformer.transform(reader, new StreamResult(writer)); xml = writer.toString(); } else { InputStream is = object.getObjectContent(); xml = IOUtils.toString(is, ENCODING); } URL url = new URL(baseUrl + "/registry/import/import_s3/"); StringBuilder sb = new StringBuilder(); addParam(sb, "id", source); addParam(sb, "xml", xml); //System.out.println(sb.toString()); WebResource webResource = client.resource(url.toString()); ClientResponse response = webResource .header("User-Agent", "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:38.0) Gecko/20100101 Firefox/38.0") .accept(MediaType.APPLICATION_JSON, "*/*").acceptLanguage("en-US", "en") .type(MediaType.APPLICATION_FORM_URLENCODED).cookie(cookie) .post(ClientResponse.class, sb.toString()); if (response.getStatus() != 200) { throw new RuntimeException("Failed : HTTP error code : " + response.getStatus()); } String output = response.getEntity(String.class); Result result = mapper.readValue(output, Result.class); if (!result.getStatus().equals("OK")) { System.err.println(result.getMessage()); break; } else System.out.println(result.getMessage()); } listObjectsRequest.setMarker(objectListing.getNextMarker()); } while (objectListing.isTruncated()); } catch (Exception e) { e.printStackTrace(); } }
From source file:org.rdswitchboard.tests.crosswalk.App.java
License:Open Source License
public static void main(String[] args) { try {/*w ww. j a v a2 s . com*/ String propertiesFile = PROPERTIES_FILE; if (args.length != 0 && !StringUtils.isNullOrEmpty(args[0])) propertiesFile = args[0]; Properties properties = new Properties(); try (InputStream in = new FileInputStream(propertiesFile)) { properties.load(in); } String accessKey = properties.getProperty("aws.access.key"); String secretKey = properties.getProperty("aws.secret.key"); String bucket = properties.getProperty("s3.bucket"); if (StringUtils.isNullOrEmpty(bucket)) throw new IllegalArgumentException("AWS S3 Bucket can not be empty"); System.out.println("S3 Bucket: " + bucket); String key = properties.getProperty("s3.key"); if (StringUtils.isNullOrEmpty(key)) throw new IllegalArgumentException("AWS S3 Key can not be empty"); System.out.println("S3 Key: " + key); String crosswalk = properties.getProperty("crosswalk"); if (StringUtils.isNullOrEmpty(crosswalk)) throw new IllegalArgumentException("Crosswalk can not be empty"); System.out.println("Crosswalk: " + crosswalk); String outFileName = properties.getProperty("out", OUT_FILE_NAME); System.out.println("Out: " + outFileName); AmazonS3 s3client; if (!StringUtils.isNullOrEmpty(accessKey) && !StringUtils.isNullOrEmpty(secretKey)) { System.out.println( "Connecting to AWS via Access and Secret Keys. This is not safe practice, consider to use IAM Role instead."); AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey); s3client = new AmazonS3Client(awsCredentials); } else { System.out.println("Connecting to AWS via Instance Profile Credentials"); s3client = new AmazonS3Client(new InstanceProfileCredentialsProvider()); } S3Object object = s3client.getObject(new GetObjectRequest(bucket, key)); Templates template = TransformerFactory.newInstance() .newTemplates(new StreamSource(new FileInputStream(crosswalk))); StreamSource reader = new StreamSource(object.getObjectContent()); StreamResult result = (StringUtils.isNullOrEmpty(outFileName) || outFileName.equals("stdout")) ? new StreamResult(System.out) : new StreamResult(new FileOutputStream(outFileName)); Transformer transformer = template.newTransformer(); transformer.transform(reader, result); /* DocumentBuilderFactory dFactory = DocumentBuilderFactory.newInstance(); TransformerFactory tFactory = TransformerFactory.newInstance(); XPath xPath = XPathFactory.newInstance().newXPath(); DocumentBuilder builder = dFactory.newDocumentBuilder(); Document document = builder.parse(object.getObjectContent()); Transformer transformer1 = tFactory.newTemplates( new StreamSource(new FileInputStream(crosswalk))).newTransformer(); Transformer transformer2 = tFactory.newTransformer(); NodeList metadata = (NodeList)xPath.evaluate("/OAI-PMH/ListRecords/record/metadata", document.getDocumentElement(), XPathConstants.NODESET); for (int i = 0; i < metadata.getLength(); ++i) { System.out.println("Converting node: " + i); Element e = (Element) metadata.item(i); Node mets = e.getElementsByTagName("mets").item(0); Node rifcs = document.createElement("registryObjects"); DOMSource xmlSource = new DOMSource(mets); DOMResult xmlResult = new DOMResult(rifcs); transformer1.transform(xmlSource, xmlResult); e.removeChild(mets); e.appendChild(xmlResult.getNode()); // e.replaceChild(rifcs, xmlResult.getNode()); } StreamResult result = (StringUtils.isNullOrEmpty(outFileName) || outFileName.equals("stdout")) ? new StreamResult(System.out) : new StreamResult(new FileOutputStream(outFileName)); transformer2.transform(new DOMSource(document), result); */ } catch (Exception e) { e.printStackTrace(); } }