Example usage for com.amazonaws.util IOUtils toByteArray

List of usage examples for com.amazonaws.util IOUtils toByteArray

Introduction

In this page you can find the example usage for com.amazonaws.util IOUtils toByteArray.

Prototype

public static byte[] toByteArray(InputStream is) throws IOException 

Source Link

Document

Reads and returns the rest of the given input stream as a byte array.

Usage

From source file:com.casadocodigo.ecommerce.infra.AmazonFileSaver.java

public String write(String baseFolder, Part multipartFile) throws IOException {

    String fileName = extractFilename(multipartFile.getHeader(CONTENT_DISPOSITION));

    String path = baseFolder + File.separator + fileName;

    AmazonS3 s3client = new AmazonS3Client(new ProfileCredentialsProvider());

    ObjectMetadata metaData = new ObjectMetadata();
    byte[] bytes = IOUtils.toByteArray(multipartFile.getInputStream());
    metaData.setContentLength(bytes.length);
    /*ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes);
    PutObjectRequest putObjectRequest = new PutObjectRequest(bucket, key, byteArrayInputStream, metadata);
    client.putObject(putObjectRequest);*/

    s3client.putObject(new PutObjectRequest(BUCKET_NAME, path, multipartFile.getInputStream(), metaData)
            .withCannedAcl(CannedAccessControlList.PublicRead));

    /*s3client.putObject(BUCKET_NAME,fileName, 
        multipartFile.getInputStream(), metaData);*/
    return END_POINT + File.separator + BUCKET_NAME + File.separator + path;

}

From source file:com.jeet.s3.util.HashUtil.java

public static String generateFileHash(File file) {
    String hash = "";
    try {/*from w w  w  . j a  v a 2 s . c  o m*/
        Mac sha256_HMAC = Mac.getInstance("HmacSHA256");
        SecretKeySpec secret_key = new SecretKeySpec(Constants.HASH_SECRET.getBytes(), "HmacSHA256");
        sha256_HMAC.init(secret_key);

        hash = Base64.encodeBase64String(sha256_HMAC.doFinal(IOUtils.toByteArray(new FileInputStream(file))));
    } catch (Exception ex) {
        System.out.println("Error in generating hash.");
    }
    return hash;
}

From source file:com.mrbjoern.blog.api.service.s3.S3Wrapper.java

License:Open Source License

@Override
public byte[] download(final String key) throws IOException {
    GetObjectRequest getObjectRequest = new GetObjectRequest(bucket, key);

    S3Object s3Object = amazonS3Client.getObject(getObjectRequest);

    S3ObjectInputStream objectInputStream = s3Object.getObjectContent();

    return IOUtils.toByteArray(objectInputStream);
}

From source file:com.sangupta.urn.service.impl.AmazonS3UrnStorageServiceImpl.java

License:Apache License

@Override
protected UrnObject get(String objectKey) {
    S3Object object = this.client.getObject(this.bucketName, objectKey);
    if (object == null) {
        return null;
    }// ww  w. ja  v a2  s .c o m

    try {
        InputStream stream = object.getObjectContent();

        byte[] bytes = IOUtils.toByteArray(stream);

        UrnObject urnObject = new UrnObject(objectKey, bytes);

        // TODO: read and populate metadata
        ObjectMetadata metadata = object.getObjectMetadata();
        if (metadata != null) {
            if (metadata.getHttpExpiresDate() != null) {
                urnObject.expiry = metadata.getHttpExpiresDate().getTime();
            }

            urnObject.mime = metadata.getContentType();
            urnObject.stored = metadata.getLastModified().getTime();

            // TODO:parse the value to extract the filename if available
            urnObject.name = metadata.getContentDisposition();
        }

        // return the object
        return urnObject;
    } catch (IOException e) {
        // happens when we cannot read data from S3
        LOGGER.debug("Exception reading data from S3 for object key: " + objectKey, e);
        return null;
    } finally {
        if (object != null) {
            try {
                object.close();
            } catch (IOException e) {
                LOGGER.warn("Unable to close S3 object during/after reading the object");
            }
        }
    }
}

From source file:org.nuxeo.ecm.core.storage.sql.CloudFrontBinaryManager.java

License:Apache License

private static PrivateKey loadPrivateKey(String privateKeyPath) throws InvalidKeySpecException, IOException {
    if (privateKeyPath == null) {
        return null;
    }/*from   w  w w .j a v  a2  s . c  om*/

    try (FileInputStream is = new FileInputStream(new File(privateKeyPath))) {
        if (privateKeyPath.toLowerCase().endsWith(".pem")) {
            return PEM.readPrivateKey(is);
        }

        if (privateKeyPath.toLowerCase().endsWith(".der")) {
            return RSA.privateKeyFromPKCS8(IOUtils.toByteArray(is));
        }

        throw new AmazonClientException("Unsupported file type for private key");
    }
}

From source file:org.symphonyoss.vb.util.AwsS3Client.java

License:Apache License

public void putObject(String destBucket, String key, InputStream inputStream, ObjectMetadata metaData) {

    try {/*from w  w w. ja  va 2  s .co m*/
        logger.info("Put object for s3://{}/{}", destBucket, key);
        byte[] bytes = IOUtils.toByteArray(inputStream);

        if (metaData == null)
            metaData = new ObjectMetadata();

        metaData.setContentLength(bytes.length);
        ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes);

        s3Client.putObject(new PutObjectRequest(destBucket, key, byteArrayInputStream, metaData));

    } catch (AmazonServiceException ase) {
        logger.error("Caught an AmazonServiceException, " + "which means your request made it "
                + "to Amazon S3, but was rejected with an error response " + "for some reason.");
        logger.error("Error Message:    " + ase.getMessage());
        logger.error("HTTP Status Code: " + ase.getStatusCode());
        logger.error("AWS Error Code:   " + ase.getErrorCode());
        logger.error("Error Type:       " + ase.getErrorType());
        logger.error("Request ID:       " + ase.getRequestId());

    } catch (AmazonClientException ace) {
        logger.error("Caught an AmazonClientException, " + "which means the client encountered "
                + "an internal error while trying to communicate" + " with S3, "
                + "such as not being able to access the network.");
        logger.error("Error Message: " + ace.getMessage());
    } catch (IOException e) {
        logger.error("Obtaining length", e);
    }

}

From source file:oulib.aws.s3.S3Util.java

/**
 * Generate a small tiff file from large Tiff S3 bucket object <br>
 * Note: the small tiff file will have the same key path as the original one
 * /*from  w  w w.  j  a  va  2  s.  c om*/
 * @param s3client : S3 client
 * @param s3 : S3 object that con
 * @param targetBucketName : the bucket that stores the small tiff file
 * @param targetKey : key of the object in the target bucket
 * @param compressionRate : compression rate
 * @return : PutObjectResult
 */
public static PutObjectResult generateSmallTiff(AmazonS3 s3client, S3Object s3, String targetBucketName,
        String targetKey, double compressionRate) {

    PutObjectResult result = null;
    ByteArrayOutputStream bos = null;
    ByteArrayOutputStream os = null;
    ByteArrayInputStream is = null;
    S3ObjectInputStream s = null;
    ByteArrayInputStream byteInputStream = null;

    try {
        System.setProperty("com.sun.media.jai.disableMediaLib", "true");

        bos = new ByteArrayOutputStream();
        s = s3.getObjectContent();
        byte[] bytes = IOUtils.toByteArray(s);
        byteInputStream = new ByteArrayInputStream(bytes);

        TIFFDecodeParam param = new TIFFDecodeParam();
        ImageDecoder dec = ImageCodec.createImageDecoder("TIFF", byteInputStream, param);

        RenderedImage image = dec.decodeAsRenderedImage();

        RenderingHints qualityHints = new RenderingHints(RenderingHints.KEY_RENDERING,
                RenderingHints.VALUE_RENDER_QUALITY);

        RenderedOp resizedImage = JAI.create("SubsampleAverage", image, compressionRate, compressionRate,
                qualityHints);

        TIFFEncodeParam params = new com.sun.media.jai.codec.TIFFEncodeParam();

        resizedImage = JAI.create("encode", resizedImage, bos, "TIFF", params);

        BufferedImage imagenew = resizedImage.getSourceImage(0).getAsBufferedImage();

        os = new ByteArrayOutputStream();
        ImageIO.write(imagenew, "tif", os);
        is = new ByteArrayInputStream(os.toByteArray());

        ObjectMetadata metadata = new ObjectMetadata();
        metadata.setContentLength(os.toByteArray().length);
        metadata.setContentType("image/tiff");
        metadata.setLastModified(new Date());

        os.close();

        imagenew.flush();

        result = s3client.putObject(new PutObjectRequest(targetBucketName, targetKey, is, metadata));
    } catch (IOException | AmazonClientException ex) {
        Logger.getLogger(S3Util.class.getName()).log(Level.SEVERE, null, ex);
    } finally {
        try {
            if (bos != null) {
                bos.close();
            }
            if (os != null) {
                os.close();
            }
            if (is != null) {
                is.close();
            }
            if (s != null) {
                s.close();
            }
            if (byteInputStream != null) {
                byteInputStream.close();
            }
        } catch (IOException ex) {
            Logger.getLogger(S3Util.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    return result;
}

From source file:oulib.aws.s3.S3Util.java

/**
 * Pull out Tiff metadata from input S3 object and inject into the 
 * content of target S3 Object;<br>
 * Generate the new output S3 object that has the metadata from input object.
 * /*from w  w  w  .  j  a v a 2 s  .com*/
 * @param s3client : S3 client
 * @param obj1 : input object that provides metadata
 * @param obj2 : target object that receives metadata
 * 
 * @return PutObjectResult
 */
public static PutObjectResult copyS3ObjectTiffMetadata(AmazonS3 s3client, S3Object obj1, S3Object obj2) {

    PutObjectResult result = null;

    BufferedInputStream bufferedInputStrean = null;
    ByteArrayOutputStream byteArrayOutputStream = null;
    ByteArrayInputStream byteArrayInputStream = null;
    ByteArrayInputStream bis = null;
    S3ObjectInputStream content1 = null;
    S3ObjectInputStream content2 = null;
    String targetBucketName = obj2.getBucketName();
    String outputKey = obj2.getKey().split(".tif")[0] + "-copied.tif";

    ImageMetadata metadata1, metadata2;
    TiffImageMetadata tiffMetadata1, tiffMetadata2;
    TiffOutputSet output1, output2;

    try {
        content1 = obj1.getObjectContent();
        content2 = obj2.getObjectContent();

        byte[] bytes1 = IOUtils.toByteArray(content1);
        byte[] bytes2 = IOUtils.toByteArray(content2);

        metadata1 = Imaging.getMetadata(bytes1);
        metadata2 = Imaging.getMetadata(bytes2);

        tiffMetadata1 = (TiffImageMetadata) metadata1;
        tiffMetadata2 = (TiffImageMetadata) metadata2;

        output1 = tiffMetadata1.getOutputSet();
        output2 = tiffMetadata2.getOutputSet();

        TiffOutputDirectory rootDir = output2.getOrCreateRootDirectory();
        TiffOutputDirectory exifDir = output2.getOrCreateExifDirectory();
        TiffOutputDirectory gpsDir = output2.getOrCreateGPSDirectory();

        if (null != output1.getRootDirectory()) {
            List<TiffOutputField> fs = output1.getRootDirectory().getFields();
            for (TiffOutputField f1 : fs) {
                if (null == rootDir.findField(f1.tag)
                        // CANNOT create the output image with this tag included!
                        && !"PlanarConfiguration".equals(f1.tagInfo.name)) {
                    rootDir.add(f1);
                }
            }
        }

        if (null != output1.getExifDirectory()) {
            for (TiffOutputField f2 : output1.getExifDirectory().getFields()) {
                exifDir.removeField(f2.tagInfo);
                exifDir.add(f2);
            }
        }

        if (null != output1.getGPSDirectory()) {
            for (TiffOutputField f3 : output1.getGPSDirectory().getFields()) {
                gpsDir.removeField(f3.tagInfo);
                gpsDir.add(f3);
            }
        }

        byteArrayOutputStream = new ByteArrayOutputStream();
        TiffImageWriterLossy writerLossy = new TiffImageWriterLossy(output2.byteOrder);
        writerLossy.write(byteArrayOutputStream, output2);

        byteArrayInputStream = new ByteArrayInputStream(byteArrayOutputStream.toByteArray());

        ObjectMetadata metadata = new ObjectMetadata();
        metadata.setContentLength(byteArrayOutputStream.toByteArray().length);
        metadata.setContentType("image/tiff");
        metadata.setLastModified(new Date());

        result = s3client
                .putObject(new PutObjectRequest(targetBucketName, outputKey, byteArrayInputStream, metadata));

    } catch (ImageReadException | IOException | ImageWriteException ex) {
        Logger.getLogger(S3Util.class.getName()).log(Level.SEVERE, null, ex);
    } finally {
        try {
            if (null != content1) {
                content1.close();
            }
            if (null != content2) {
                content2.close();
            }
            if (null != bufferedInputStrean) {
                bufferedInputStrean.close();
            }
            if (null != byteArrayInputStream) {
                byteArrayInputStream.close();
            }
            if (null != byteArrayOutputStream) {
                byteArrayOutputStream.close();
            }
            if (null != bis) {
                bis.close();
            }
        } catch (IOException ex) {
            Logger.getLogger(S3Util.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    return result;
}