Example usage for com.amazonaws.services.s3.model GetObjectRequest GetObjectRequest

List of usage examples for com.amazonaws.services.s3.model GetObjectRequest GetObjectRequest

Introduction

In this page you can find the example usage for com.amazonaws.services.s3.model GetObjectRequest GetObjectRequest.

Prototype

public GetObjectRequest(String bucketName, String key) 

Source Link

Document

Constructs a new GetObjectRequest with all the required parameters.

Usage

From source file:org.alfresco.provision.AWSService.java

License:Open Source License

public void get(String key, String filename) throws IOException {
    S3Object object = s3.getObject(new GetObjectRequest(bucketName, key));
    InputStream in = new BufferedInputStream(object.getObjectContent());
    OutputStream out = new BufferedOutputStream(new FileOutputStream(filename));
    try {//w  w  w .  java2 s .  c o  m
        IOUtils.copy(in, out);
    } finally {
        if (in != null) {
            in.close();
        }
        if (out != null) {
            out.close();
        }
    }
}

From source file:org.apache.beam.sdk.io.aws.s3.S3ReadableSeekableByteChannel.java

License:Apache License

@Override
public int read(ByteBuffer destinationBuffer) throws IOException {
    if (!isOpen()) {
        throw new ClosedChannelException();
    }/*from  www .  j  a v a 2 s .co m*/
    if (!destinationBuffer.hasRemaining()) {
        return 0;
    }
    if (position == contentLength) {
        return -1;
    }

    if (s3Object == null) {
        GetObjectRequest request = new GetObjectRequest(path.getBucket(), path.getKey());
        request.setSSECustomerKey(options.getSSECustomerKey());
        if (position > 0) {
            request.setRange(position, contentLength);
        }
        try {
            s3Object = amazonS3.getObject(request);
        } catch (AmazonClientException e) {
            throw new IOException(e);
        }
        s3ObjectContentChannel = Channels
                .newChannel(new BufferedInputStream(s3Object.getObjectContent(), 1024 * 1024));
    }

    int totalBytesRead = 0;
    int bytesRead = 0;

    do {
        totalBytesRead += bytesRead;
        try {
            bytesRead = s3ObjectContentChannel.read(destinationBuffer);
        } catch (AmazonClientException e) {
            // TODO replace all catch AmazonServiceException with client exception
            throw new IOException(e);
        }
    } while (bytesRead > 0);

    position += totalBytesRead;
    return totalBytesRead;
}

From source file:org.apache.camel.component.aws.s3.S3Consumer.java

License:Apache License

@Override
protected int poll() throws Exception {
    // must reset for each poll
    shutdownRunningTask = null;/*from  w  w w .  ja  v  a  2  s .c  om*/
    pendingExchanges = 0;

    String fileName = getConfiguration().getFileName();
    String bucketName = getConfiguration().getBucketName();
    Queue<Exchange> exchanges = null;

    if (fileName != null) {
        LOG.trace("Getting object in bucket [{}] with file name [{}]...", bucketName, fileName);

        S3Object s3Object = getAmazonS3Client().getObject(new GetObjectRequest(bucketName, fileName));
        exchanges = createExchanges(s3Object);
    } else {
        LOG.trace("Queueing objects in bucket [{}]...", bucketName);

        ListObjectsRequest listObjectsRequest = new ListObjectsRequest();
        listObjectsRequest.setBucketName(bucketName);
        listObjectsRequest.setPrefix(getConfiguration().getPrefix());
        listObjectsRequest.setMaxKeys(maxMessagesPerPoll);

        ObjectListing listObjects = getAmazonS3Client().listObjects(listObjectsRequest);

        if (LOG.isTraceEnabled()) {
            LOG.trace("Found {} objects in bucket [{}]...", listObjects.getObjectSummaries().size(),
                    bucketName);
        }

        exchanges = createExchanges(listObjects.getObjectSummaries());
    }
    return processBatch(CastUtils.cast(exchanges));
}

From source file:org.apache.druid.firehose.s3.StaticS3FirehoseFactory.java

License:Apache License

@Override
protected InputStream openObjectStream(URI object, long start) throws IOException {
    final String bucket = object.getAuthority();
    final String key = S3Utils.extractS3Key(object);

    final GetObjectRequest request = new GetObjectRequest(bucket, key);
    request.setRange(start);/*from  ww  w  .j ava  2s .c  om*/
    try {
        final S3Object s3Object = s3Client.getObject(request);
        if (s3Object == null) {
            throw new ISE("Failed to get an s3 object for bucket[%s], key[%s], and start[%d]", bucket, key,
                    start);
        }
        return s3Object.getObjectContent();
    } catch (AmazonS3Exception e) {
        throw new IOException(e);
    }
}

From source file:org.apache.druid.storage.s3.S3TaskLogs.java

License:Apache License

private Optional<ByteSource> streamTaskFile(final long offset, String taskKey) throws IOException {
    try {/*from w  w  w  . jav  a  2 s.  c  o m*/
        final ObjectMetadata objectMetadata = service.getObjectMetadata(config.getS3Bucket(), taskKey);

        return Optional.of(new ByteSource() {
            @Override
            public InputStream openStream() throws IOException {
                try {
                    final long start;
                    final long end = objectMetadata.getContentLength() - 1;

                    if (offset > 0 && offset < objectMetadata.getContentLength()) {
                        start = offset;
                    } else if (offset < 0 && (-1 * offset) < objectMetadata.getContentLength()) {
                        start = objectMetadata.getContentLength() + offset;
                    } else {
                        start = 0;
                    }

                    final GetObjectRequest request = new GetObjectRequest(config.getS3Bucket(), taskKey)
                            .withMatchingETagConstraint(objectMetadata.getETag()).withRange(start, end);

                    return service.getObject(request).getObjectContent();
                } catch (AmazonServiceException e) {
                    throw new IOException(e);
                }
            }
        });
    } catch (AmazonS3Exception e) {
        if (404 == e.getStatusCode() || "NoSuchKey".equals(e.getErrorCode())
                || "NoSuchBucket".equals(e.getErrorCode())) {
            return Optional.absent();
        } else {
            throw new IOE(e, "Failed to stream logs from: %s", taskKey);
        }
    }
}

From source file:org.apache.fineract.infrastructure.documentmanagement.contentrepository.S3ContentRepository.java

License:Apache License

@Override
public ImageData fetchImage(final ImageData imageData) {
    final S3Object s3object = this.s3Client
            .getObject(new GetObjectRequest(this.s3BucketName, imageData.location()));
    imageData.updateContent(s3object.getObjectContent());
    return imageData;
}

From source file:org.apache.hadoop.fs.s3a.S3AInputStream.java

License:Apache License

private synchronized void reopen(long pos) throws IOException {
    if (wrappedStream != null) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("Aborting old stream to open at pos " + pos);
        }// w  w w . ja  v  a2s .  com
        wrappedStream.abort();
    }

    LOG.info("Actually opening file " + key + " at pos " + pos);

    GetObjectRequest request = new GetObjectRequest(bucket, key);
    request.setRange(pos, contentLength - 1);

    wrappedObject = client.getObject(request);
    wrappedStream = wrappedObject.getObjectContent();

    if (wrappedStream == null) {
        throw new IOException("Null IO stream");
    }

    this.pos = pos;
}

From source file:org.apache.hadoop.fs.s3r.S3RInputStream.java

License:Apache License

/**
 * Opens up the stream at specified target position and for given length.
 *
 * @param targetPos//from  w ww  .  j  av  a 2 s. c  om
 * @param length
 * @throws IOException
 */
private synchronized void reopen(long targetPos, long length) throws IOException {

    requestedStreamLen = (length < 0) ? this.contentLength : Math.max(CLOSE_THRESHOLD, (targetPos + length));

    if (s3InputStream != null) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("Closing the previous stream");
        }
        closeStream(requestedStreamLen);
    }

    validateTargetPosition(targetPos);

    if (LOG.isDebugEnabled()) {
        LOG.debug("Requesting for " + "targetPos=" + targetPos + ", length=" + length + ", requestedStreamLen="
                + requestedStreamLen + ", streamPosition=" + pos + ", nextReadPosition=" + nextReadPos);
    }

    GetObjectRequest request = new GetObjectRequest(bucket, key).withRange(targetPos, requestedStreamLen);
    s3InputStream = client.getObject(request).getObjectContent();

    if (s3InputStream == null) {
        throw new IOException("Null IO stream");
    }

    this.pos = targetPos;
}

From source file:org.apache.manifoldcf.crawler.connectors.amazons3.AmazonS3Connector.java

License:Apache License

@Override
public void processDocuments(String[] documentIdentifiers, IExistingVersions statuses, Specification spec,
        IProcessActivity activities, int jobMode, boolean usesDefaultAuthority)
        throws ManifoldCFException, ServiceInterruption {
    AmazonS3 amazons3Client = getClient();
    if (amazons3Client == null)
        throw new ManifoldCFException("Amazon client can not connect at the moment");
    String[] acls = null;//w  w  w  .j av  a  2  s  .c om

    // loop documents and process
    for (String documentIdentifier : documentIdentifiers) {
        try {
            if (documentIdentifier != null && StringUtils.isNotEmpty(documentIdentifier)) {
                String versionString;
                String[] aclsToUse;

                if (documentIdentifier.split(STD_SEPARATOR_BUCKET_AND_KEY) == null
                        && documentIdentifier.length() < 1) {
                    continue;
                }

                S3Artifact s3Artifact = getS3Artifact(documentIdentifier);
                S3Object s3Obj = amazons3Client
                        .getObject(new GetObjectRequest(s3Artifact.getBucketName(), s3Artifact.getKey()));

                if (s3Obj == null) {
                    // no such document in the bucket now
                    // delete document
                    activities.deleteDocument(documentIdentifier);
                    continue;
                }

                Logging.connectors.info("Content-Type: " + s3Obj.getObjectMetadata().getContentType());
                ObjectMetadata objectMetadata = s3Obj.getObjectMetadata();
                Date lastModified = objectMetadata.getLastModified();
                StringBuilder sb = new StringBuilder();
                if (lastModified == null) {
                    // remove the content
                    activities.deleteDocument(documentIdentifier);
                    continue;
                }

                aclsToUse = new String[0];

                AccessControlList objectAcl = amazons3Client.getObjectAcl(s3Artifact.getBucketName(),
                        s3Artifact.getKey());

                Set<Grant> grants = objectAcl.getGrants();
                String[] users = getUsers(grants);
                // sort

                aclsToUse = users;
                Arrays.sort(aclsToUse);
                packList(sb, aclsToUse, '+');
                if (aclsToUse.length > 0) {
                    sb.append('+');
                    pack(sb, AmazonS3Config.defaultAuthorityDenyToken, '+');
                } else
                    sb.append('-');

                //
                sb.append(lastModified.toString());
                versionString = sb.toString();

                Logging.connectors.debug("version string : " + versionString);

                if (versionString.length() > 0
                        && !activities.checkDocumentNeedsReindexing(documentIdentifier, versionString)) {
                    Logging.connectors.info("Document need not to be reindexed : " + documentIdentifier);
                    continue;
                }

                Logging.connectors.debug("JIRA: Processing document identifier '" + documentIdentifier + "'");

                long startTime = System.currentTimeMillis();
                String errorCode = null;
                String errorDesc = null;
                Long fileSize = null;

                try {
                    String mimeType = "text/plain";// default

                    // tika works starts
                    InputStream in = null;

                    String document = null;
                    try {
                        in = s3Obj.getObjectContent();

                        parser.parse(in, handler, metadata, context);
                        mimeType = tika.detect(in);
                        document = handler.toString();
                        if (document == null)
                            continue;
                        metadata.set(Metadata.CONTENT_TYPE, mimeType);
                    } catch (Exception e) {
                        Logging.connectors.error("Error while parsing tika contents", e);
                    } finally {
                        if (in != null)
                            IOUtils.closeQuietly(in);
                    }

                    String documentURI = getDocumentURI(s3Artifact);

                    Logging.connectors.debug("document : " + documentURI);

                    // need some investigation
                    if (!activities.checkURLIndexable(documentURI)) {
                        errorCode = activities.EXCLUDED_URL;
                        errorDesc = "Excluded because of URL ('" + documentURI + "')";
                        activities.noDocument(documentIdentifier, versionString);
                        continue;
                    }
                    if (!activities.checkMimeTypeIndexable(mimeType)) {
                        errorCode = activities.EXCLUDED_MIMETYPE;
                        errorDesc = "Excluded because of mime type ('" + mimeType + "')";
                        activities.noDocument(documentIdentifier, versionString);
                        continue;
                    }
                    if (!activities.checkDateIndexable(lastModified)) {
                        errorCode = activities.EXCLUDED_DATE;
                        errorDesc = "Excluded because of date (" + lastModified + ")";
                        activities.noDocument(documentIdentifier, versionString);
                        continue;
                    }

                    // otherwise process
                    RepositoryDocument rd = new RepositoryDocument();
                    // Turn into acls and add into
                    // description
                    String[] denyAclsToUse;
                    if (aclsToUse.length > 0)
                        denyAclsToUse = new String[] { AmazonS3Config.defaultAuthorityDenyToken };
                    else
                        denyAclsToUse = new String[0];
                    rd.setSecurity(RepositoryDocument.SECURITY_TYPE_DOCUMENT, aclsToUse, denyAclsToUse);

                    rd.setMimeType(mimeType);

                    if (lastModified != null)
                        rd.setModifiedDate(lastModified);

                    // set all meta-data fields
                    addAllMetaData(rd, metadata);

                    // get document

                    try {
                        byte[] documentBytes = document.getBytes(StandardCharsets.UTF_8);
                        long fileLength = documentBytes.length;

                        if (!activities.checkLengthIndexable(fileLength)) {
                            errorCode = activities.EXCLUDED_LENGTH;
                            errorDesc = "Excluded because of document length (" + fileLength + ")";
                            activities.noDocument(documentIdentifier, versionString);
                            continue;
                        }

                        InputStream is = new ByteArrayInputStream(documentBytes);
                        try {
                            rd.setBinary(is, fileLength);
                            activities.ingestDocumentWithException(documentIdentifier, versionString,
                                    documentURI, rd);

                            errorCode = "OK";
                            fileSize = new Long(fileLength);
                        } finally {
                            if (is != null)
                                IOUtils.closeQuietly(is);
                        }
                    } catch (Exception e) {
                        Logging.connectors.error(e);
                    }
                } catch (Exception e) {
                    Logging.connectors.error(e);
                }

            }
        } catch (AmazonServiceException e) {
            Logging.connectors.error(e);
        } catch (AmazonClientException e) {
            Logging.connectors.error(e);
        }

    }

}

From source file:org.apache.nifi.minifi.c2.cache.s3.S3CacheFileInfoImpl.java

License:Apache License

@Override
public WriteableConfiguration getConfiguration(Integer version) throws ConfigurationProviderException {

    if (version == null) {

        try {//from   www.j av  a  2 s. c om
            return getCachedConfigurations().findFirst()
                    .orElseThrow(() -> new ConfigurationProviderException("No configurations found."));
        } catch (IOException e) {
            throw new ConfigurationProviderException("Unable to get cached configurations.", e);
        }

    } else {

        final S3Object s3Object;

        if (StringUtils.isEmpty(prefix) || StringUtils.equals(prefix, "/")) {
            s3Object = s3.getObject(new GetObjectRequest(bucket, expectedFilename + version.toString()));
        } else {
            s3Object = s3
                    .getObject(new GetObjectRequest(bucket, prefix + expectedFilename + version.toString()));
        }

        if (s3Object == null) {
            throw new ConfigurationProviderException("No configurations found for object key.");
        }

        return new S3WritableConfiguration(s3, s3Object, Integer.toString(version));

    }

}