Example usage for com.amazonaws.services.s3.model S3ObjectInputStream close

List of usage examples for com.amazonaws.services.s3.model S3ObjectInputStream close

Introduction

In this page you can find the example usage for com.amazonaws.services.s3.model S3ObjectInputStream close.

Prototype

@Override
public void close() throws IOException 

Source Link

Document

If the stream still contains unread data, the underlying HTTP request will aborted.

Usage

From source file:aws.example.s3.GetObject.java

License:Open Source License

public static void main(String[] args) {
    final String USAGE = "\n" + "To run this example, supply the name of an S3 bucket and object to\n"
            + "download from it.\n" + "\n" + "Ex: GetObject <bucketname> <filename>\n";

    if (args.length < 2) {
        System.out.println(USAGE);
        System.exit(1);//from   w  w  w  . j av a  2 s .  c  om
    }

    String bucket_name = args[0];
    String key_name = args[1];

    System.out.format("Downloading %s from S3 bucket %s...\n", key_name, bucket_name);
    final AmazonS3 s3 = new AmazonS3Client();
    try {
        S3Object o = s3.getObject(bucket_name, key_name);
        S3ObjectInputStream s3is = o.getObjectContent();
        FileOutputStream fos = new FileOutputStream(new File(key_name));
        byte[] read_buf = new byte[1024];
        int read_len = 0;
        while ((read_len = s3is.read(read_buf)) > 0) {
            fos.write(read_buf, 0, read_len);
        }
        s3is.close();
        fos.close();
    } catch (AmazonServiceException e) {
        System.err.println(e.getErrorMessage());
        System.exit(1);
    } catch (FileNotFoundException e) {
        System.err.println(e.getMessage());
        System.exit(1);
    } catch (IOException e) {
        System.err.println(e.getMessage());
        System.exit(1);
    }
    System.out.println("Done!");
}

From source file:com.amazon.photosharing.servlets.PrivateMediaServlet.java

License:Open Source License

private void streamS3Content(String p_s3_bucket, String p_s3_file, HttpServletResponse p_resp,
        boolean p_no_retry) throws IOException {
    S3ObjectInputStream stream = ContentHelper.getInstance().downloadContent(p_s3_bucket, p_s3_file);
    if (stream != null) {
        try {//from w w w  .ja v a2  s.c  o  m
            IOUtils.copy(stream, p_resp.getOutputStream());
        } catch (IOException e) {
            //usually broken pipe if user cancels
        } finally {
            stream.close();
            p_resp.getOutputStream().flush();
            p_resp.getOutputStream().close();
        }
    } else {
        try {
            Thread.sleep(1000); //back off. eventually consistency S3 responses..
        } catch (InterruptedException e) {
            //why would that happen?
        }
        if (!p_no_retry)
            streamS3Content(p_s3_bucket, p_s3_file, p_resp, true);
    }
}

From source file:com.bigstep.S3Sampler.java

License:Apache License

@Override
public SampleResult runTest(JavaSamplerContext context) {
    // pull parameters
    String bucket = context.getParameter("bucket");
    String object = context.getParameter("object");
    String method = context.getParameter("method");
    String local_file_path = context.getParameter("local_file_path");
    String key_id = context.getParameter("key_id");
    String secret_key = context.getParameter("secret_key");
    String proxy_host = context.getParameter("proxy_host");
    String proxy_port = context.getParameter("proxy_port");
    String endpoint = context.getParameter("endpoint");

    log.debug("runTest:method=" + method + " local_file_path=" + local_file_path + " bucket=" + bucket
            + " object=" + object);

    SampleResult result = new SampleResult();
    result.sampleStart(); // start stopwatch

    try {/* w w  w . ja v a 2s  .c o m*/
        ClientConfiguration config = new ClientConfiguration();
        if (proxy_host != null && !proxy_host.isEmpty()) {
            config.setProxyHost(proxy_host);
        }
        if (proxy_port != null && !proxy_port.isEmpty()) {
            config.setProxyPort(Integer.parseInt(proxy_port));
        }
        //config.setProtocol(Protocol.HTTP);

        AWSCredentials credentials = new BasicAWSCredentials(key_id, secret_key);

        AmazonS3 s3Client = new AmazonS3Client(credentials, config);
        if (endpoint != null && !endpoint.isEmpty()) {
            s3Client.setEndpoint(endpoint);
        }
        ObjectMetadata meta = null;

        if (method.equals("GET")) {
            File file = new File(local_file_path);
            //meta= s3Client.getObject(new GetObjectRequest(bucket, object), file);
            S3Object s3object = s3Client.getObject(bucket, object);
            S3ObjectInputStream stream = s3object.getObjectContent();
            //while(stream.skip(1024*1024)>0);
            stream.close();
        } else if (method.equals("PUT")) {
            File file = new File(local_file_path);
            s3Client.putObject(bucket, object, file);
        }

        result.sampleEnd(); // stop stopwatch
        result.setSuccessful(true);
        if (meta != null) {
            result.setResponseMessage(
                    "OK on url:" + bucket + "/" + object + ". Length=" + meta.getContentLength());
        } else {
            result.setResponseMessage("OK on url:" + bucket + "/" + object + ".No metadata");
        }
        result.setResponseCodeOK(); // 200 code

    } catch (Exception e) {
        result.sampleEnd(); // stop stopwatch
        result.setSuccessful(false);
        result.setResponseMessage("Exception: " + e);

        // get stack trace as a String to return as document data
        java.io.StringWriter stringWriter = new java.io.StringWriter();
        e.printStackTrace(new java.io.PrintWriter(stringWriter));
        result.setResponseData(stringWriter.toString());
        result.setDataType(org.apache.jmeter.samplers.SampleResult.TEXT);
        result.setResponseCode("500");
    }

    return result;
}

From source file:dataprocessing.amazonwebservices.S3Client.java

License:GNU General Public License

/** *************************************************************
 * @param filename name of file to retrieve from s3
 * @return file name of local file/*from  w  w  w .ja  v a2s  .  com*/
 * Gets file from S3 and writes to local file
 */
public List<String> readS3File(String filename) {

    List<String> lines = new ArrayList<>();

    try {
        S3Object object = client.getObject(bucket, filename);
        S3ObjectInputStream stream = object.getObjectContent();
        BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(stream));

        String line;
        while ((line = bufferedReader.readLine()) != null)
            lines.add(line);

        bufferedReader.close();
        stream.close();
    } catch (Exception e) {
        e.printStackTrace();
    }

    return lines;
}

From source file:org.apache.oodt.cas.filemgr.datatransfer.S3DataTransferer.java

License:Apache License

private void stageFile(S3Object file, Reference ref, File directory) throws IOException {
    S3ObjectInputStream inStream = null;
    FileOutputStream outStream = null;
    try {/*from w w w.  j a va 2s.c om*/
        inStream = file.getObjectContent();
        outStream = new FileOutputStream(
                new File(directory, new File(stripProtocol(ref.getDataStoreReference(), false)).getName()));
        IOUtils.copy(inStream, outStream);
    } finally {
        try {
            inStream.close();
        } catch (Exception ignored) {
        }
        try {
            outStream.close();
        } catch (Exception ignored) {
        }
    }
}

From source file:org.pentaho.amazon.client.impl.S3ClientImpl.java

License:Apache License

private String readLogFromS3(String stagingBucketName, String key) {

    Scanner logScanner = null;//from   ww w .ja v a  2  s . com
    S3ObjectInputStream s3ObjectInputStream = null;
    GZIPInputStream gzipInputStream = null;
    String lineSeparator = System.getProperty("line.separator");
    StringBuilder logContents = new StringBuilder();
    S3Object outObject;

    try {
        if (s3Client.doesObjectExist(stagingBucketName, key)) {

            outObject = s3Client.getObject(stagingBucketName, key);
            s3ObjectInputStream = outObject.getObjectContent();
            gzipInputStream = new GZIPInputStream(s3ObjectInputStream);

            logScanner = new Scanner(gzipInputStream);
            while (logScanner.hasNextLine()) {
                logContents.append(logScanner.nextLine() + lineSeparator);
            }
        }
    } catch (IOException e) {
        e.printStackTrace();
    } finally {
        try {
            if (logScanner != null) {
                logScanner.close();
            }
            if (s3ObjectInputStream != null) {
                s3ObjectInputStream.close();
            }
            if (gzipInputStream != null) {
                gzipInputStream.close();
            }
        } catch (IOException e) {
            //do nothing
        }
    }
    return logContents.toString();
}

From source file:org.shareok.data.commons.recipes.S3BookRecipeFileGenerator.java

/**
 * Read out the manifest file information and convert it into a map with keys of bookname and values of hash codes
 * /*from   w w w.j  av a2  s  .c om*/
 * @param bookName
 * @return 
 */
public Map<String, String> getManifest(String bookName) {

    Map<String, String> manifest = new HashMap<>();
    S3ObjectInputStream in = null;
    BufferedReader reader = null;
    String line;

    try {
        in = s3client.getObject(new GetObjectRequest(sourceBucket, bookName + "/manifest-md5.txt"))
                .getObjectContent();
        reader = new BufferedReader(new InputStreamReader(in));
        while ((line = reader.readLine()) != null) {
            if (null != line) {
                String[] manifestInfo = line.split(" ");
                int length = manifestInfo.length;
                if (length < 2) {
                    continue;
                }
                String hash = manifestInfo[0];
                String filePath = manifestInfo[length - 1];
                if (!filePath.endsWith(".tif")) {
                    continue;
                }
                //                    String fileInfo[] = filePath.split("/");
                //                    String fileName = fileInfo[fileInfo.length-1];
                manifest.put(bookName + "/" + filePath, hash);
            }
        }
    } catch (Exception ex) {
        ex.printStackTrace();
    } finally {
        if (null != in) {
            try {
                in.close();
            } catch (IOException ex) {
                Logger.getLogger(S3BookRecipeFileGenerator.class.getName()).log(Level.SEVERE, null, ex);
            }
        }
        if (null != reader) {
            try {
                reader.close();
            } catch (IOException ex) {
                Logger.getLogger(S3BookRecipeFileGenerator.class.getName()).log(Level.SEVERE, null, ex);
            }
        }
    }
    return manifest;
}

From source file:org.springframework.integration.aws.support.S3Session.java

License:Apache License

@Override
public void read(String source, OutputStream outputStream) throws IOException {
    String[] bucketKey = splitPathToBucketAndKey(source);
    S3Object s3Object = this.amazonS3.getObject(bucketKey[0], bucketKey[1]);
    S3ObjectInputStream objectContent = s3Object.getObjectContent();
    try {//from  w w w  .j av a 2  s . c om
        StreamUtils.copy(objectContent, outputStream);
    } finally {
        objectContent.close();
    }
}

From source file:oulib.aws.s3.S3Util.java

/**
 * Generate a small tiff file from large Tiff S3 bucket object <br>
 * Note: the small tiff file will have the same key path as the original one
 * //w  ww  .  ja v  a 2s .c  om
 * @param s3client : S3 client
 * @param s3 : S3 object that con
 * @param targetBucketName : the bucket that stores the small tiff file
 * @param targetKey : key of the object in the target bucket
 * @param compressionRate : compression rate
 * @return : PutObjectResult
 */
public static PutObjectResult generateSmallTiff(AmazonS3 s3client, S3Object s3, String targetBucketName,
        String targetKey, double compressionRate) {

    PutObjectResult result = null;
    ByteArrayOutputStream bos = null;
    ByteArrayOutputStream os = null;
    ByteArrayInputStream is = null;
    S3ObjectInputStream s = null;
    ByteArrayInputStream byteInputStream = null;

    try {
        System.setProperty("com.sun.media.jai.disableMediaLib", "true");

        bos = new ByteArrayOutputStream();
        s = s3.getObjectContent();
        byte[] bytes = IOUtils.toByteArray(s);
        byteInputStream = new ByteArrayInputStream(bytes);

        TIFFDecodeParam param = new TIFFDecodeParam();
        ImageDecoder dec = ImageCodec.createImageDecoder("TIFF", byteInputStream, param);

        RenderedImage image = dec.decodeAsRenderedImage();

        RenderingHints qualityHints = new RenderingHints(RenderingHints.KEY_RENDERING,
                RenderingHints.VALUE_RENDER_QUALITY);

        RenderedOp resizedImage = JAI.create("SubsampleAverage", image, compressionRate, compressionRate,
                qualityHints);

        TIFFEncodeParam params = new com.sun.media.jai.codec.TIFFEncodeParam();

        resizedImage = JAI.create("encode", resizedImage, bos, "TIFF", params);

        BufferedImage imagenew = resizedImage.getSourceImage(0).getAsBufferedImage();

        os = new ByteArrayOutputStream();
        ImageIO.write(imagenew, "tif", os);
        is = new ByteArrayInputStream(os.toByteArray());

        ObjectMetadata metadata = new ObjectMetadata();
        metadata.setContentLength(os.toByteArray().length);
        metadata.setContentType("image/tiff");
        metadata.setLastModified(new Date());

        os.close();

        imagenew.flush();

        result = s3client.putObject(new PutObjectRequest(targetBucketName, targetKey, is, metadata));
    } catch (IOException | AmazonClientException ex) {
        Logger.getLogger(S3Util.class.getName()).log(Level.SEVERE, null, ex);
    } finally {
        try {
            if (bos != null) {
                bos.close();
            }
            if (os != null) {
                os.close();
            }
            if (is != null) {
                is.close();
            }
            if (s != null) {
                s.close();
            }
            if (byteInputStream != null) {
                byteInputStream.close();
            }
        } catch (IOException ex) {
            Logger.getLogger(S3Util.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    return result;
}

From source file:oulib.aws.s3.S3Util.java

/**
 * Pull out Tiff metadata from input S3 object and inject into the 
 * content of target S3 Object;<br>
 * Generate the new output S3 object that has the metadata from input object.
 * //from w w  w. j  a  va2 s.  co m
 * @param s3client : S3 client
 * @param obj1 : input object that provides metadata
 * @param obj2 : target object that receives metadata
 * 
 * @return PutObjectResult
 */
public static PutObjectResult copyS3ObjectTiffMetadata(AmazonS3 s3client, S3Object obj1, S3Object obj2) {

    PutObjectResult result = null;

    BufferedInputStream bufferedInputStrean = null;
    ByteArrayOutputStream byteArrayOutputStream = null;
    ByteArrayInputStream byteArrayInputStream = null;
    ByteArrayInputStream bis = null;
    S3ObjectInputStream content1 = null;
    S3ObjectInputStream content2 = null;
    String targetBucketName = obj2.getBucketName();
    String outputKey = obj2.getKey().split(".tif")[0] + "-copied.tif";

    ImageMetadata metadata1, metadata2;
    TiffImageMetadata tiffMetadata1, tiffMetadata2;
    TiffOutputSet output1, output2;

    try {
        content1 = obj1.getObjectContent();
        content2 = obj2.getObjectContent();

        byte[] bytes1 = IOUtils.toByteArray(content1);
        byte[] bytes2 = IOUtils.toByteArray(content2);

        metadata1 = Imaging.getMetadata(bytes1);
        metadata2 = Imaging.getMetadata(bytes2);

        tiffMetadata1 = (TiffImageMetadata) metadata1;
        tiffMetadata2 = (TiffImageMetadata) metadata2;

        output1 = tiffMetadata1.getOutputSet();
        output2 = tiffMetadata2.getOutputSet();

        TiffOutputDirectory rootDir = output2.getOrCreateRootDirectory();
        TiffOutputDirectory exifDir = output2.getOrCreateExifDirectory();
        TiffOutputDirectory gpsDir = output2.getOrCreateGPSDirectory();

        if (null != output1.getRootDirectory()) {
            List<TiffOutputField> fs = output1.getRootDirectory().getFields();
            for (TiffOutputField f1 : fs) {
                if (null == rootDir.findField(f1.tag)
                        // CANNOT create the output image with this tag included!
                        && !"PlanarConfiguration".equals(f1.tagInfo.name)) {
                    rootDir.add(f1);
                }
            }
        }

        if (null != output1.getExifDirectory()) {
            for (TiffOutputField f2 : output1.getExifDirectory().getFields()) {
                exifDir.removeField(f2.tagInfo);
                exifDir.add(f2);
            }
        }

        if (null != output1.getGPSDirectory()) {
            for (TiffOutputField f3 : output1.getGPSDirectory().getFields()) {
                gpsDir.removeField(f3.tagInfo);
                gpsDir.add(f3);
            }
        }

        byteArrayOutputStream = new ByteArrayOutputStream();
        TiffImageWriterLossy writerLossy = new TiffImageWriterLossy(output2.byteOrder);
        writerLossy.write(byteArrayOutputStream, output2);

        byteArrayInputStream = new ByteArrayInputStream(byteArrayOutputStream.toByteArray());

        ObjectMetadata metadata = new ObjectMetadata();
        metadata.setContentLength(byteArrayOutputStream.toByteArray().length);
        metadata.setContentType("image/tiff");
        metadata.setLastModified(new Date());

        result = s3client
                .putObject(new PutObjectRequest(targetBucketName, outputKey, byteArrayInputStream, metadata));

    } catch (ImageReadException | IOException | ImageWriteException ex) {
        Logger.getLogger(S3Util.class.getName()).log(Level.SEVERE, null, ex);
    } finally {
        try {
            if (null != content1) {
                content1.close();
            }
            if (null != content2) {
                content2.close();
            }
            if (null != bufferedInputStrean) {
                bufferedInputStrean.close();
            }
            if (null != byteArrayInputStream) {
                byteArrayInputStream.close();
            }
            if (null != byteArrayOutputStream) {
                byteArrayOutputStream.close();
            }
            if (null != bis) {
                bis.close();
            }
        } catch (IOException ex) {
            Logger.getLogger(S3Util.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    return result;
}