Example usage for com.amazonaws.services.glacier.model InitiateMultipartUploadRequest InitiateMultipartUploadRequest

List of usage examples for com.amazonaws.services.glacier.model InitiateMultipartUploadRequest InitiateMultipartUploadRequest

Introduction

In this page you can find the example usage for com.amazonaws.services.glacier.model InitiateMultipartUploadRequest InitiateMultipartUploadRequest.

Prototype

public InitiateMultipartUploadRequest(String vaultName, String archiveDescription, String partSize) 

Source Link

Document

Constructs a new InitiateMultipartUploadRequest object.

Usage

From source file:maebackup.MaeBackup.java

License:Open Source License

public static void upload(String lrzname) {
    try {//ww w .j av  a2 s  .c o m
        System.out.println("Uploading to Glacier...");
        ClientConfiguration config = new ClientConfiguration();
        config.setProtocol(Protocol.HTTPS);
        AmazonGlacierClient client = new AmazonGlacierClient(credentials, config);
        client.setEndpoint(endpoint);

        File file = new File(lrzname);
        String archiveid = "";
        if (file.length() < 5 * 1024 * 1024) {
            System.out.println("File is small, uploading as single chunk");
            String treehash = TreeHashGenerator.calculateTreeHash(file);

            InputStream is = new FileInputStream(file);
            byte[] buffer = new byte[(int) file.length()];
            int bytes = is.read(buffer);
            if (bytes != file.length())
                throw new RuntimeException("Only read " + bytes + " of " + file.length()
                        + " byte file when preparing for upload.");
            InputStream bais = new ByteArrayInputStream(buffer);

            UploadArchiveRequest request = new UploadArchiveRequest(vaultname, lrzname, treehash, bais);
            UploadArchiveResult result = client.uploadArchive(request);
            archiveid = result.getArchiveId();
        } else {
            long chunks = file.length() / chunksize;
            while (chunks > 10000) {
                chunksize <<= 1;
                chunks = file.length() / chunksize;
            }
            String chunksizestr = new Integer(chunksize).toString();
            System.out.println(
                    "Starting multipart upload: " + chunks + " full chunks of " + chunksizestr + " bytes");

            InitiateMultipartUploadResult imures = client.initiateMultipartUpload(
                    new InitiateMultipartUploadRequest(vaultname, lrzname, chunksizestr));

            String uploadid = imures.getUploadId();
            RandomAccessFile raf = new RandomAccessFile(file, "r");

            byte[] buffer = new byte[chunksize];

            for (long x = 0; x < chunks; x++) {
                try {
                    System.out.println("Uploading chunk " + x + "/" + chunks);

                    raf.seek(x * chunksize);
                    raf.read(buffer);

                    String parthash = TreeHashGenerator.calculateTreeHash(new ByteArrayInputStream(buffer));
                    String range = "bytes " + (x * chunksize) + "-" + ((x + 1) * chunksize - 1) + "/*";

                    client.uploadMultipartPart(new UploadMultipartPartRequest(vaultname, uploadid, parthash,
                            range, new ByteArrayInputStream(buffer)));
                } catch (Exception e) {
                    e.printStackTrace();
                    System.err.println("Error uploading chunk " + x + ", retrying...");
                    x--;
                }
            }

            if (file.length() > chunks * chunksize) {
                do {
                    try {
                        System.out.println("Uploading final partial chunk");
                        raf.seek(chunks * chunksize);
                        int bytes = raf.read(buffer);

                        String parthash = TreeHashGenerator
                                .calculateTreeHash(new ByteArrayInputStream(buffer, 0, bytes));
                        String range = "bytes " + (chunks * chunksize) + "-" + (file.length() - 1) + "/*";

                        client.uploadMultipartPart(new UploadMultipartPartRequest(vaultname, uploadid, parthash,
                                range, new ByteArrayInputStream(buffer, 0, bytes)));
                    } catch (Exception e) {
                        e.printStackTrace();
                        System.err.println("Error uploading final chunk, retrying...");
                        continue;
                    }
                } while (false);
            }

            System.out.println("Completing upload");
            String treehash = TreeHashGenerator.calculateTreeHash(file);
            CompleteMultipartUploadResult result = client
                    .completeMultipartUpload(new CompleteMultipartUploadRequest(vaultname, uploadid,
                            new Long(file.length()).toString(), treehash));
            archiveid = result.getArchiveId();
        }

        System.out.println("Uploaded " + lrzname + " to Glacier as ID " + archiveid);

        File listfile = new File(cachedir, "archives.lst");
        FileWriter fw = new FileWriter(listfile, true);
        fw.write(archiveid + " " + lrzname + "\n");
        fw.close();
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}