Example usage for com.amazonaws.services.s3.model S3Object getObjectContent

List of usage examples for com.amazonaws.services.s3.model S3Object getObjectContent

Introduction

In this page you can find the example usage for com.amazonaws.services.s3.model S3Object getObjectContent.

Prototype

public S3ObjectInputStream getObjectContent() 

Source Link

Document

Gets the input stream containing the contents of this object.

Usage

From source file:com.clicktravel.infrastructure.persistence.aws.s3.S3FileStore.java

License:Apache License

@Override
public FileItem read(final FilePath filePath) throws NonExistentItemException {
    checkInitialization();/* w w w .  j av  a 2 s . c  o  m*/
    final GetObjectRequest getObjectRequest = new GetObjectRequest(bucketNameForFilePath(filePath),
            filePath.filename());
    try {
        final S3Object s3Object = amazonS3Client.getObject(getObjectRequest);
        final Map<String, String> userMetaData = getUserMetaData(s3Object);
        final String filename = userMetaData.get(USER_METADATA_FILENAME);
        final String lastUpdatedTimeStr = userMetaData.get(USER_METADATA_LAST_UPDATED_TIME);
        DateTime lastUpdatedTime = null;
        if (lastUpdatedTimeStr != null) {
            try {
                lastUpdatedTime = formatter.parseDateTime(lastUpdatedTimeStr);
            } catch (final Exception e) {
                logger.warn(e.getMessage(), e);
            }
        }
        try {

            final FileItem fileItem = new FileItem(filename, s3Object.getObjectContent(), lastUpdatedTime);
            return fileItem;
        } catch (final IOException e) {
            throw new IllegalStateException(e);
        }
    } catch (final AmazonS3Exception e) {
        if (missingItemErrorCodes.contains(e.getErrorCode())) {
            throw new NonExistentItemException(
                    "Item does not exist" + filePath.directory() + "->" + filePath.filename());
        }
        throw e;
    }
}

From source file:com.clouddrive.parth.AmazonOperations.java

public byte[] downloadFile(String fileName, String userName) {
    S3Object object = s3.getObject(new GetObjectRequest(userName, fileName));
    byte[] fileBytes = null;
    try {//  w  w  w  .  j  a  va2 s  .co m
        IOUtils.copy(object.getObjectContent(), new FileOutputStream(userName + "" + fileName));

        File file = new File(userName + "" + fileName);

        FileInputStream fis = new FileInputStream(file);
        BufferedInputStream inputStream = new BufferedInputStream(fis);
        fileBytes = new byte[(int) file.length()];
        inputStream.read(fileBytes);
        inputStream.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
    return fileBytes;
}

From source file:com.cloudhub.aws.extractor.AWSCSVExtractor.java

License:Apache License

/**
 * Persists the Amazon S3 object to disk
 *
 * @param objectSummary - the S3 object to be persisted to disk.
 * @throws IOException - if an I/O error occurs.
 *//*from  ww w . java  2s.c om*/
private String persist(final CSVMatcher matcher, final S3ObjectSummary objectSummary) throws IOException {
    log.debug("Downloading the body of " + objectSummary.getKey() + " from Amazon S3.");
    final S3Object object = s3client.getObject(bucketName, objectSummary.getKey());
    log.debug("Downloaded " + objectSummary.getSize() + " bytes.");

    log.debug("Writing the body of " + objectSummary.getKey() + " to disk path: " + dataFolder + File.separator
            + bucketName);
    final File objectCSVFile = writeOutObjectToFile(objectSummary, object.getObjectContent());

    return getTotal(matcher, objectCSVFile);
}

From source file:com.crickdata.upload.s3.UploadLiveData.java

License:Open Source License

public Map<String, Date> uploadToS3(String fileName, boolean type) throws IOException {

    Statistics statistics = new Statistics();
    Map<String, Date> perfMap = new HashMap<String, Date>();
    AWSCredentials credentials = null;/*from w w w  .j  a  va2s . co  m*/
    try {
        credentials = new BasicAWSCredentials("AKIAI6QKTRAQE7MXQOIQ",
                "wIG6u1yI5ZaseeJbvYSUmD98qelIJNSCVBzt5k2q");
    } catch (Exception e) {
        throw new AmazonClientException("Cannot load the credentials from the credential profiles file. "
                + "Please make sure that your credentials file is at the correct "
                + "location (C:\\Users\\bssan_000\\.aws\\credentials), and is in valid format.", e);
    }

    AmazonS3 s3 = new AmazonS3Client(credentials);
    Region usWest2 = Region.getRegion(Regions.US_WEST_2);
    s3.setRegion(usWest2);
    String bucketName;
    if (!type)
        bucketName = "cricmatchinfo";
    else
        bucketName = "cricmatchinfoseries";
    String key = fileName.replace(".json", "").trim();
    try {
        perfMap.put("S3INSERTREQ", new Date());
        statistics.setS3Req(new Date());
        File f = readMatchFile(fileName);

        double bytes = f.length();
        double kilobytes = (bytes / 1024);
        System.out.println("Details :" + kilobytes);
        s3.putObject(new PutObjectRequest(bucketName, key, f));
        statistics.setSize(String.valueOf(kilobytes));

        S3Object object = s3.getObject(new GetObjectRequest(bucketName, key));
        perfMap.put("S3SAVERES", object.getObjectMetadata().getLastModified());
        statistics.setKey(key);
        statistics.setS3Res(object.getObjectMetadata().getLastModified());
        MyUI.stats.add(statistics);

        displayTextInputStream(object.getObjectContent());

        ObjectListing objectListing = s3
                .listObjects(new ListObjectsRequest().withBucketName(bucketName).withPrefix("My"));
        for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) {
            System.out.println(
                    " - " + objectSummary.getKey() + "  " + "(size = " + objectSummary.getSize() + ")");
        }
    } catch (AmazonServiceException ase) {
        System.out.println("Caught an AmazonServiceException, which means your request made it "
                + "to Amazon S3, but was rejected with an error response for some reason.");
        System.out.println("Error Message:    " + ase.getMessage());
        System.out.println("HTTP Status Code: " + ase.getStatusCode());
        System.out.println("AWS Error Code:   " + ase.getErrorCode());
        System.out.println("Error Type:       " + ase.getErrorType());
        System.out.println("Request ID:       " + ase.getRequestId());
    } catch (AmazonClientException ace) {
        System.out.println("Caught an AmazonClientException, which means the client encountered "
                + "a serious internal problem while trying to communicate with S3, "
                + "such as not being able to access the network.");
        System.out.println("Error Message: " + ace.getMessage());
    }
    return perfMap;
}

From source file:com.dateofrock.simpledbmapper.Reflector.java

License:Apache License

@SuppressWarnings("unchecked")
<T> void setAttributeAndBlobValueToField(AmazonS3 s3, T instance, Field field, String attributeName,
        String attributeValue) throws IllegalAccessException, ParseException {
    Class<?> type;/*w ww  .ja  va 2s  .c om*/
    type = field.getType();

    // SimpleDBAttribute
    SimpleDBAttribute sdbAttrAnnotation = field.getAnnotation(SimpleDBAttribute.class);
    if (sdbAttrAnnotation != null && getAttributeName(field).equals(attributeName)) {
        if (Set.class.isAssignableFrom(type)) {
            // Set
            Set<?> s = (Set<?>) field.get(instance);
            ParameterizedType genericType = (ParameterizedType) field.getGenericType();
            Class<?> setClass = (Class<?>) genericType.getActualTypeArguments()[0];
            if (Number.class.isAssignableFrom(setClass)) {
                // Set???Number??
                if (s == null) {
                    Set<Number> newSet = new HashSet<Number>();
                    field.set(instance, newSet);
                    s = (Set<Number>) field.get(instance);
                }
                Number n = null;
                if (isIntegerType(setClass)) {
                    n = new Integer(attributeValue);
                } else if (isFloatType(setClass)) {
                    n = new Float(attributeValue);
                } else if (isLongType(setClass)) {
                    n = new Long(attributeValue);
                }
                ((Set<Number>) s).add(n);
                return;
            } else if (isStringType(setClass)) {
                // Set???String??
                if (s == null) {
                    Set<String> newSet = new HashSet<String>();
                    field.set(instance, newSet);
                    s = (Set<String>) field.get(instance);
                }
                ((Set<String>) s).add(attributeValue);
                return;
            } else {
                // FIXME
                throw new SimpleDBMapperUnsupportedTypeException(
                        s.toString() + " genericType: " + setClass + " is not supported.");
            }
        } else if (isDateType(type)) {
            Date parsedDate = decodeDate(attributeValue);
            field.set(instance, parsedDate);
            return;
        } else if (isStringType(type)) {
            field.set(instance, attributeValue);
            return;
        } else if (isIntegerType(type)) {
            field.set(instance, new Integer(attributeValue));
            return;
        } else if (isFloatType(type)) {
            field.set(instance, new Float(attributeValue));
            return;
        } else if (isLongType(type)) {
            field.set(instance, new Long(attributeValue));
            return;
        } else if (isBooleanType(type)) {
            field.set(instance, new Boolean(attributeValue));
        } else {
            if (type.isAssignableFrom(List.class)) {
                new SimpleDBMapperUnsupportedTypeException(type + " is not supprted. Use java.util.Set.");
            }
            throw new SimpleDBMapperUnsupportedTypeException(type + " is not supprted.");
        }
    }

    // SimpleDBBlob
    SimpleDBBlob sdbBlobAnnotation = field.getAnnotation(SimpleDBBlob.class);
    if (sdbBlobAnnotation != null && getAttributeName(field).equals(attributeName)) {
        S3TaskResult taskResult = new S3TaskResult(Operation.DOWNLOAD, attributeName, null, null);
        taskResult.setSimpleDBAttributeValue(attributeValue);
        S3Object s3Obj = s3.getObject(taskResult.getBucketName(), taskResult.getKey());
        InputStream input = s3Obj.getObjectContent();
        if (isStringType(type)) {
            // FIXME encoding???
            String stringValue = IOUtils.readString(input, "UTF-8");
            field.set(instance, stringValue);
        } else if (isPrimitiveByteArrayType(type)) {
            byte[] bytes = IOUtils.readBytes(input);
            field.set(instance, bytes);
        }
    }
}

From source file:com.davidsoergel.s3napback.StreamingServiceUtils.java

License:Apache License

/**
 * Downloads an S3Object, as returned from {@link com.amazonaws.services.s3.AmazonS3Client#getObject(com.amazonaws.services.s3.model.GetObjectRequest)
 * }, to//from w w  w .  ja  va2s . c om
 * the specified file.
 *
 * @param s3Object        The S3Object containing a reference to an InputStream containing the object's data.
 * @param destinationFile The file to store the object's data in.
 */
public static void downloadObjectToStream(S3Object s3Object, BufferedOutputStream eventualOutputStream) {
    /*
          // attempt to create the parent if it doesn't exist
          File parentDirectory = destinationFile.getParentFile();
          if (parentDirectory != null && !parentDirectory.exists())
             {
             parentDirectory.mkdirs();
             }
    */

    ByteArrayOutputStream byteOS = new ByteArrayOutputStream(
            (int) s3Object.getObjectMetadata().getContentLength());
    OutputStream outputStream = null;
    try {
        // perf extra copying, left over from file outputstream version
        outputStream = new BufferedOutputStream(byteOS);
        byte[] buffer = new byte[1024 * 10];
        int bytesRead;
        while ((bytesRead = s3Object.getObjectContent().read(buffer)) > -1) {
            outputStream.write(buffer, 0, bytesRead);
        }
    } catch (IOException e) {
        try {
            s3Object.getObjectContent().abort();
        } catch (IOException abortException) {
            log.warn("Couldn't abort stream", e);
        }
        throw new AmazonClientException("Unable to store object contents to disk: " + e.getMessage(), e);
    } finally {
        try {
            outputStream.close();
        } catch (Exception e) {
        }
        try {
            s3Object.getObjectContent().close();
        } catch (Exception e) {
        }
    }

    try {
        // Multipart Uploads don't have an MD5 calculated on the service side
        if (ServiceUtils.isMultipartUploadETag(s3Object.getObjectMetadata().getETag()) == false) {
            byte[] clientSideHash = Md5Utils.computeMD5Hash(byteOS.toByteArray()); //new FileInputStream(destinationFile));
            byte[] serverSideHash = BinaryUtils.fromHex(s3Object.getObjectMetadata().getETag());

            if (!Arrays.equals(clientSideHash, serverSideHash)) {
                throw new AmazonClientException("Unable to verify integrity of data download.  "
                        + "Client calculated content hash didn't match hash calculated by Amazon S3.  "
                        + "The data may be corrupt; please try again.");
            }
        }
    } catch (Exception e) {
        log.warn("Unable to calculate MD5 hash to validate download: " + e.getMessage(), e);
    }

    try {
        eventualOutputStream.write(byteOS.toByteArray());
    } catch (Exception e) {

        log.warn("Unable to write to output stream: " + e.getMessage(), e);
    }
}

From source file:com.digitalpebble.stormcrawler.aws.s3.S3CacheChecker.java

License:Apache License

@Override
public void execute(Tuple tuple) {
    String url = tuple.getStringByField("url");
    Metadata metadata = (Metadata) tuple.getValueByField("metadata");

    // normalises URL
    String key = "";
    try {//from w  ww .j  ava  2 s .c  om
        key = URLEncoder.encode(url, "UTF-8");
    } catch (UnsupportedEncodingException e) {
        // ignore it - we know UTF-8 is valid
    }
    // check size of the key
    if (key.length() >= 1024) {
        LOG.info("Key too large : {}", key);
        eventCounter.scope("result_keytoobig").incrBy(1);
        _collector.emit(tuple, new Values(url, metadata));
        // ack it no matter what
        _collector.ack(tuple);
        return;
    }

    long preCacheQueryTime = System.currentTimeMillis();
    S3Object obj = null;
    try {
        obj = client.getObject(bucketName, key);
    } catch (AmazonS3Exception e) {
        eventCounter.scope("result_misses").incrBy(1);
        // key does not exist?
        // no need for logging
    }
    long postCacheQueryTime = System.currentTimeMillis();
    LOG.debug("Queried S3 cache in {} msec", (postCacheQueryTime - preCacheQueryTime));

    if (obj != null) {
        try {
            byte[] content = IOUtils.toByteArray(obj.getObjectContent());
            eventCounter.scope("result_hits").incrBy(1);
            eventCounter.scope("bytes_fetched").incrBy(content.length);

            metadata.setValue(INCACHE, "true");

            _collector.emit(CACHE_STREAM, tuple, new Values(url, content, metadata));
            _collector.ack(tuple);
            return;
        } catch (Exception e) {
            eventCounter.scope("result.exception").incrBy(1);
            LOG.error("IOException when extracting byte array", e);
        }
    }

    _collector.emit(tuple, new Values(url, metadata));
    _collector.ack(tuple);
}

From source file:com.digitaslbi.helios.mock.utils.ConnectionHelper.java

public static InputStream getObject(String key) {
    try {/*  ww w  . j a  va2 s  .  com*/
        log.debug("Downloading an object");

        S3Object s3object = s3Client.getObject(
                new GetObjectRequest(prop.getProperty(MocksConstants.AWS_BUCKET_NAME.getValue()), key));

        log.debug("Content-Type: " + s3object.getObjectMetadata().getContentType());
        //displayTextInputStream(s3object.getObjectContent());

        return s3object.getObjectContent();
    } catch (AmazonServiceException ase) {
        log.error("Caught an AmazonServiceException, which" + " means your request made it "
                + "to Amazon S3, but was rejected with an error response" + " for some reason.");
        log.error("Error Message:    " + ase.getMessage());
        log.error("HTTP Status Code: " + ase.getStatusCode());
        log.error("AWS Error Code:   " + ase.getErrorCode());
        log.error("Error Type:       " + ase.getErrorType());
        log.error("Request ID:       " + ase.getRequestId());
    } catch (AmazonClientException ace) {
        log.error("Caught an AmazonClientException, which means" + " the client encountered "
                + "an internal error while trying to " + "communicate with S3, "
                + "such as not being able to access the network.");
        log.error("Error Message: " + ace.getMessage());
    }

    return null;
}

From source file:com.digitaslbi.helios.utils.S3Helper.java

public static File getObject(String key) {
    connect();/*from w ww  . j av a  2 s .com*/

    try {
        log.info("[S3Helper][getObject] Downloading an object");

        S3Object s3object = s3Client
                .getObject(new GetObjectRequest(S3Properties.getInstance().getBucketName(), key));
        byte[] contentBytes = IOUtils.toByteArray(s3object.getObjectContent());

        log.info("Content-Type: " + s3object.getObjectMetadata().getContentType());

        File aux = new File();
        aux.setPath(s3object.getKey());
        aux.setIsFile(true);
        aux.setContent(new String(Base64.encodeBase64String(contentBytes)));

        return aux;
    } catch (AmazonServiceException ase) {
        log.error(
                "[S3Helper][getObject] Caught an AmazonServiceException, which" + " means your request made it "
                        + "to Amazon S3, but was rejected with an error response" + " for some reason.");
        log.error("Error Message:    " + ase.getMessage());
        log.error("HTTP Status Code: " + ase.getStatusCode());
        log.error("AWS Error Code:   " + ase.getErrorCode());
        log.error("Error Type:       " + ase.getErrorType());
        log.error("Request ID:       " + ase.getRequestId());
    } catch (AmazonClientException ace) {
        log.error("[S3Helper][getObject] Caught an AmazonClientException, which means"
                + " the client encountered " + "an internal error while trying to " + "communicate with S3, "
                + "such as not being able to access the network.");
        log.error("Error Message: " + ace.getMessage());
    } catch (IOException e) {
        log.error("[S3Helper][getObject] Error: " + e);
    }

    return null;
}

From source file:com.dongli.model.MyJSONData.java

License:Open Source License

public static MyJSONObject queryObject(String uid) throws MyRESTException {

    String jsonStr = "";
    MyJSONObject myJSONObject = new MyJSONObject();

    try {//from   w  w w . j  ava2 s .  c  o m
        // send query command to AWS S3
        S3Object s3object = MyAWSStorage.getInstance().s3client
                .getObject(new GetObjectRequest(MyConfiguration.getInstance().bucket, uid));
        jsonStr = displayTextInputStream(s3object.getObjectContent());
        // convert the object file to JSON object
        myJSONObject.setJSONObjectFromString(jsonStr);
    } catch (AmazonServiceException ase) {
        throw new MyRESTException("Failed to query the object " + uid + ".");
    } catch (AmazonClientException ace) {
        throw new MyRESTException("Failed to query the object " + uid + ".");
    } catch (IOException e) {
        throw new MyRESTException("Failed to query the object " + uid + ".");
    }

    return myJSONObject;

}