List of usage examples for com.amazonaws.services.s3.model ListObjectsV2Result getObjectSummaries
public List<S3ObjectSummary> getObjectSummaries()
From source file:alluxio.underfs.s3a.S3AUnderFileSystem.java
License:Apache License
/** * Lists the files in the given path, the paths will be their logical names and not contain the * folder suffix. Note that, the list results are unsorted. * * @param path the key to list//from w w w . j a v a2s . c o m * @param recursive if true will list children directories as well * @return an array of the file and folder names in this directory * @throws IOException if an I/O error occurs */ private String[] listInternal(String path, boolean recursive) throws IOException { path = stripPrefixIfPresent(path); path = PathUtils.normalizePath(path, PATH_SEPARATOR); path = path.equals(PATH_SEPARATOR) ? "" : path; String delimiter = recursive ? "" : PATH_SEPARATOR; Set<String> children = new HashSet<>(); try { ListObjectsV2Request request = new ListObjectsV2Request().withBucketName(mBucketName).withPrefix(path) .withDelimiter(delimiter).withMaxKeys(LISTING_LENGTH); ListObjectsV2Result result = null; while (result == null || result.isTruncated()) { // Query S3 for the next batch of objects result = mClient.listObjectsV2(request); // Advance the request continuation token to the next set of objects request.setContinuationToken(result.getNextContinuationToken()); // Directories in S3 UFS can be possibly encoded in two different ways: // (1) as file objects with FOLDER_SUFFIX for directories created through Alluxio or // (2) as "common prefixes" of other files objects for directories not created through // Alluxio // // Case (1) (and file objects) is accounted for by iterating over chunk.getObjects() while // case (2) is accounted for by iterating over chunk.getCommonPrefixes(). // // An example, with prefix="ufs" and delimiter="/" and LISTING_LENGTH=5 // - objects.key = ufs/, child = // - objects.key = ufs/dir1_$folder$, child = dir1 // - objects.key = ufs/file, child = file // - commonPrefix = ufs/dir1/, child = dir1 // - commonPrefix = ufs/dir2/, child = dir2 // Handle case (1) for (S3ObjectSummary obj : result.getObjectSummaries()) { // Remove parent portion of the key String child = getChildName(obj.getKey(), path); // Prune the special folder suffix child = CommonUtils.stripSuffixIfPresent(child, FOLDER_SUFFIX); // Only add if the path is not empty (removes results equal to the path) if (!child.isEmpty()) { children.add(child); } } // Handle case (2) for (String commonPrefix : result.getCommonPrefixes()) { // Remove parent portion of the key String child = getChildName(commonPrefix, path); // Remove any portion after the last path delimiter int childNameIndex = child.lastIndexOf(PATH_SEPARATOR); child = childNameIndex != -1 ? child.substring(0, childNameIndex) : child; if (!child.isEmpty() && !children.contains(child)) { // This directory has not been created through Alluxio. mkdirsInternal(commonPrefix); children.add(child); } } } return children.toArray(new String[children.size()]); } catch (AmazonClientException e) { LOG.error("Failed to list path {}", path, e); return null; } }
From source file:com.dustindoloff.s3websitedeploy.Main.java
License:Apache License
private static boolean emptyBucket(final AmazonS3 s3Client, final String bucket) { final ListObjectsV2Request request = new ListObjectsV2Request(); request.setBucketName(bucket);// ww w .ja v a 2 s . com String continuationToken = null; ListObjectsV2Result result; do { request.setContinuationToken(continuationToken); result = s3Client.listObjectsV2(bucket); for (final S3ObjectSummary summary : result.getObjectSummaries()) { s3Client.deleteObject(bucket, summary.getKey()); } continuationToken = result.getNextContinuationToken(); } while (result.isTruncated()); return true; }
From source file:com.epam.dlab.module.aws.S3FileList.java
License:Apache License
private List<S3ObjectSummary> notProcessedFiles(ListObjectsV2Result result) { return result.getObjectSummaries().stream().filter(this::matchBillingRegexAndWasNotProcessed) .collect(toList());/*from w w w .j a va 2s. co m*/ }
From source file:com.handywedge.binarystore.store.aws.BinaryStoreManagerImpl.java
License:MIT License
@Override public List<BinaryInfo> list(StorageInfo storage, BinaryInfo binary) throws StoreException { logger.debug("={}", storage); logger.debug("?={}", binary); List<BinaryInfo> objInfoList = new ArrayList<BinaryInfo>(); AmazonS3 s3client = getS3Client(binary.getBucketName()); try {//from w w w . j a v a 2 s .co m logger.debug("Listing binaries"); final ListObjectsV2Request req = new ListObjectsV2Request().withBucketName(binary.getBucketName()) .withMaxKeys(2); ListObjectsV2Result result; do { result = s3client.listObjectsV2(req); for (S3ObjectSummary binarySummary : result.getObjectSummaries()) { logger.debug(" - {}(size={})", binarySummary.getKey(), binarySummary.getSize()); if (binarySummary.getSize() != 0) { BinaryInfo objInfo = new BinaryInfo(binary.getBucketName()); objInfo.setFileName(binarySummary.getKey()); objInfo.setSize(binarySummary.getSize()); S3Object s3Object = s3client .getObject(new GetObjectRequest(binary.getBucketName(), binarySummary.getKey())); objInfo.setContentType(s3Object.getObjectMetadata().getContentType()); objInfo.setUrl(s3client.getUrl(binary.getBucketName(), binarySummary.getKey()).toString()); logger.debug("Generating pre-signed URL."); URL PresignedUrl = getPresignedUrl(s3client, binary.getBucketName(), binarySummary.getKey()); objInfo.setPresignedUrl(PresignedUrl.toString()); logger.debug("Pre-Signed URL = " + PresignedUrl.toString()); objInfoList.add(objInfo); } } logger.debug("Next Continuation Token : " + result.getNextContinuationToken()); req.setContinuationToken(result.getNextContinuationToken()); } while (result.isTruncated() == true); } catch (AmazonServiceException ase) { throw new StoreException(HttpStatus.SC_BAD_REQUEST, ErrorClassification.LIST_FAIL, ase, binary.getFileName()); } catch (AmazonClientException ace) { throw new StoreException(HttpStatus.SC_BAD_REQUEST, ErrorClassification.LIST_FAIL, ace, binary.getFileName()); } logger.info(" ={}", objInfoList.size()); return objInfoList; }
From source file:com.mesosphere.dcos.cassandra.executor.backup.S3StorageDriver.java
License:Apache License
private static Map<String, Long> listSnapshotFiles(AmazonS3Client amazonS3Client, String bucketName, String backupName) {/*from w w w.j av a2 s . c o m*/ Map<String, Long> snapshotFiles = new HashMap<>(); final ListObjectsV2Request req = new ListObjectsV2Request().withBucketName(bucketName) .withPrefix(backupName); ListObjectsV2Result result; do { result = amazonS3Client.listObjectsV2(req); for (S3ObjectSummary objectSummary : result.getObjectSummaries()) { snapshotFiles.put(objectSummary.getKey(), objectSummary.getSize()); } req.setContinuationToken(result.getNextContinuationToken()); } while (result.isTruncated()); return snapshotFiles; }
From source file:com.yrashk.etcetera.S3ConfigBackend.java
License:Mozilla Public License
@Override public Collection<String> getFilenames() { Collection<String> names = new ArrayList<>(); final ListObjectsV2Request req = new ListObjectsV2Request().withBucketName(bucket); ListObjectsV2Result listing; do {/*w ww. j av a2 s . c o m*/ listing = client.listObjectsV2(req); names.addAll(listing.getObjectSummaries().stream().map(S3ObjectSummary::getKey) .collect(Collectors.toList())); } while (listing.isTruncated()); return names; }
From source file:edu.harvard.hms.dbmi.bd2k.irct.aws.event.result.S3AfterGetResult.java
License:Mozilla Public License
@Override public void fire(Result result) { if (result.getResultStatus() != ResultStatus.AVAILABLE) { return;/* www . j av a 2 s . c o m*/ } if (!result.getResultSetLocation().startsWith("S3://")) { File temp = new File(result.getResultSetLocation()); if (temp.exists()) { return; } else { result.setResultSetLocation( "S3://" + s3Folder + result.getResultSetLocation().replaceAll(irctSaveLocation + "/", "")); } } String location = result.getResultSetLocation().substring(5); // List the files in that bucket path try { final ListObjectsV2Request req = new ListObjectsV2Request().withBucketName(bucketName) .withPrefix(location); // Loop Through all the files ListObjectsV2Result s3Files; do { s3Files = s3client.listObjectsV2(req); for (S3ObjectSummary objectSummary : s3Files.getObjectSummaries()) { // Download the files to the directory specified String keyName = objectSummary.getKey(); String fileName = irctSaveLocation + keyName.replace(location, ""); log.info("Downloading: " + keyName + " --> " + fileName); s3client.getObject(new GetObjectRequest(bucketName, keyName), new File(fileName)); } req.setContinuationToken(s3Files.getNextContinuationToken()); } while (s3Files.isTruncated() == true); // Update the result set id result.setResultSetLocation(irctSaveLocation + "/" + location.replace(s3Folder, "")); } catch (AmazonServiceException ase) { log.warn("Caught an AmazonServiceException, which " + "means your request made it " + "to Amazon S3, but was rejected with an error response" + " for some reason."); log.warn("Error Message: " + ase.getMessage()); log.warn("HTTP Status Code: " + ase.getStatusCode()); log.warn("AWS Error Code: " + ase.getErrorCode()); log.warn("Error Type: " + ase.getErrorType()); log.warn("Request ID: " + ase.getRequestId()); } catch (AmazonClientException ace) { log.warn("Caught an AmazonClientException, which " + "means the client encountered " + "an internal error while trying to " + "communicate with S3, " + "such as not being able to access the network."); log.warn("Error Message: " + ace.getMessage()); } }
From source file:io.druid.storage.s3.S3DataSegmentMover.java
License:Apache License
/** * Copies an object and after that checks that the object is present at the target location, via a separate API call. * If it is not, an exception is thrown, and the object is not deleted at the old location. This "paranoic" check * is added after it was observed that S3 may report a successful move, and the object is not found at the target * location./*from w w w . j ava 2 s . com*/ */ private void selfCheckingMove(String s3Bucket, String targetS3Bucket, String s3Path, String targetS3Path, String copyMsg) throws IOException, SegmentLoadingException { if (s3Bucket.equals(targetS3Bucket) && s3Path.equals(targetS3Path)) { log.info("No need to move file[s3://%s/%s] onto itself", s3Bucket, s3Path); return; } if (s3Client.doesObjectExist(s3Bucket, s3Path)) { final ListObjectsV2Result listResult = s3Client.listObjectsV2( new ListObjectsV2Request().withBucketName(s3Bucket).withPrefix(s3Path).withMaxKeys(1)); if (listResult.getKeyCount() == 0) { // should never happen throw new ISE("Unable to list object [s3://%s/%s]", s3Bucket, s3Path); } final S3ObjectSummary objectSummary = listResult.getObjectSummaries().get(0); if (objectSummary.getStorageClass() != null && StorageClass.fromValue(StringUtils.toUpperCase(objectSummary.getStorageClass())) .equals(StorageClass.Glacier)) { throw new AmazonServiceException(StringUtils.format( "Cannot move file[s3://%s/%s] of storage class glacier, skipping.", s3Bucket, s3Path)); } else { log.info("Moving file %s", copyMsg); final CopyObjectRequest copyRequest = new CopyObjectRequest(s3Bucket, s3Path, targetS3Bucket, targetS3Path); if (!config.getDisableAcl()) { copyRequest .setAccessControlList(S3Utils.grantFullControlToBucketOwner(s3Client, targetS3Bucket)); } s3Client.copyObject(copyRequest); if (!s3Client.doesObjectExist(targetS3Bucket, targetS3Path)) { throw new IOE( "After copy was reported as successful the file doesn't exist in the target location [%s]", copyMsg); } deleteWithRetriesSilent(s3Bucket, s3Path); log.debug("Finished moving file %s", copyMsg); } } else { // ensure object exists in target location if (s3Client.doesObjectExist(targetS3Bucket, targetS3Path)) { log.info("Not moving file [s3://%s/%s], already present in target location [s3://%s/%s]", s3Bucket, s3Path, targetS3Bucket, targetS3Path); } else { throw new SegmentLoadingException( "Unable to move file %s, not present in either source or target location", copyMsg); } } }
From source file:io.druid.storage.s3.S3Utils.java
License:Apache License
/** * Gets a single {@link S3ObjectSummary} from s3. Since this method might return a wrong object if there are multiple * objects that match the given key, this method should be used only when it's guaranteed that the given key is unique * in the given bucket.//ww w . j a v a 2 s .c o m * * @param s3Client s3 client * @param bucket s3 bucket * @param key unique key for the object to be retrieved */ public static S3ObjectSummary getSingleObjectSummary(AmazonS3 s3Client, String bucket, String key) { final ListObjectsV2Request request = new ListObjectsV2Request().withBucketName(bucket).withPrefix(key) .withMaxKeys(1); final ListObjectsV2Result result = s3Client.listObjectsV2(request); if (result.getKeyCount() == 0) { throw new ISE("Cannot find object for bucket[%s] and key[%s]", bucket, key); } final S3ObjectSummary objectSummary = result.getObjectSummaries().get(0); if (!objectSummary.getBucketName().equals(bucket) || !objectSummary.getKey().equals(key)) { throw new ISE("Wrong object[%s] for bucket[%s] and key[%s]", objectSummary, bucket, key); } return objectSummary; }
From source file:mx.iteso.desi.cloud.hw3.AWSFaceCompare.java
License:Apache License
public Face compare(ByteBuffer imageBuffer) { final ListObjectsV2Request req = new ListObjectsV2Request().withBucketName(srcBucket).withPrefix("Faces/"); ListObjectsV2Result result; ArrayList<String> s3Files = new ArrayList<>(); Face empty = new Face("", 0.0f); try {/*from w ww . ja v a 2s . c om*/ String sourceFile = "source.jpg"; Path file = Paths.get(sourceFile); Files.write(file, imageBuffer.array(), StandardOpenOption.CREATE); upload(sourceFile); } catch (IOException ex) { Logger.getLogger(FaceAddFrame.class.getName()).log(Level.SEVERE, null, ex); } do { result = s3.listObjectsV2(req); for (S3ObjectSummary objectSummary : result.getObjectSummaries()) { s3Files.add(objectSummary.getKey()); } req.setContinuationToken(result.getNextContinuationToken()); } while (result.isTruncated() == true); for (String s3File : s3Files) { if (s3File.endsWith(".jpg")) { System.out.println("Checking " + s3File + "..."); Face face = compare("Compare/source.jpg", s3File); if (face.getCofidence() > SIMILARITY_THRESHOLD) { return face; } } } return empty; }