Example usage for com.amazonaws.event ProgressEvent getEventCode

List of usage examples for com.amazonaws.event ProgressEvent getEventCode

Introduction

In this page you can find the example usage for com.amazonaws.event ProgressEvent getEventCode.

Prototype

@Deprecated
public int getEventCode() 

Source Link

Document

Returns the unique event code identifying the type of event this object represents.

Usage

From source file:com.zahdoo.android.extension.GCM.DownloadModel.java

License:Open Source License

public DownloadModel(Context context, String key, String downloadType) {
    super(context, Uri.parse(key));

    this.context = context;
    strFileName = key;//key is the name of the file;
    fileDownloadType = downloadType;//from w  ww . j  a v  a 2  s  .c  o m

    mStatus = Status.IN_PROGRESS;
    mListener = new ProgressListener() {
        @Override
        public void progressChanged(ProgressEvent event) {
            Log.d("CADIE S3",
                    "Download Progress - " + (int) Global.mDownload.getProgress().getPercentTransferred());

            try {

                //                    if(!fileDownloadType.contentEquals("THUMB_DOWNLOAD"))
                //                    {
                if ((int) Global.mDownload.getProgress().getPercentTransferred() != 0
                        && progressVal != (int) Global.mDownload.getProgress().getPercentTransferred()) {
                    progressVal = (int) Global.mDownload.getProgress().getPercentTransferred();
                    Log.d("CADIE S3", "Download Progress Event Dispatch - " + progressVal);
                    try {
                        FileTransferFunction.gcmCon.dispatchStatusEventAsync("REGISTERED",
                                "TRANSFER_PROGRESS^" + progressVal);
                    } catch (Exception e) {
                        Log.d("CADIE S3", "Download err - " + e.toString());
                    }
                }
                //}

                if (event.getEventCode() == ProgressEvent.COMPLETED_EVENT_CODE) {
                    Global.mDownload.removeProgressListener(mListener);
                    mStatus = Status.COMPLETED;

                    if (fileDownloadType.contentEquals("THUMB_DOWNLOAD")) {
                        Log.d("CADIE S3", "Thumb Downloaded");
                        try {
                            FileTransferFunction.gcmCon.dispatchStatusEventAsync("REGISTERED",
                                    "THUMB_DOWNLOAD");
                        } catch (Exception e) {
                            Log.d("CADIE S3", "Thumb Download err - " + e.toString());
                        }
                    } else {
                        Log.d("CADIE S3", "File Downloaded");
                        try {
                            FileTransferFunction.gcmCon.dispatchStatusEventAsync("REGISTERED",
                                    "DOWNLOAD_SUCCESSFUL");
                        } catch (Exception e) {
                            Log.d("CADIE S3", "File Download err - " + e.toString());
                        }
                    }
                    ((FileTransferService) DownloadModel.this.context).stopSelf();
                } else if (event.getEventCode() == ProgressEvent.FAILED_EVENT_CODE) {
                    Global.mDownload.removeProgressListener(mListener);

                    try {
                        AmazonClientException e = Global.mDownload.waitForException();
                        Log.e("CADIE ",
                                "CADIE S3 Exception - " + e.toString() + " " + event.getBytesTransferred());

                        try {
                            FileTransferFunction.gcmCon.dispatchStatusEventAsync("REGISTERED", "ERROR ");
                        } catch (Exception e1) {
                            Log.d("CADIE S3", "CADIE S3 Exception 100 - " + e1.toString());
                        }

                        Global.mTransferManager.shutdownNow();
                        Global.mTransferManager = null;
                        ((FileTransferService) DownloadModel.this.context).stopSelf();
                    } catch (InterruptedException e) {
                    }
                }

            } catch (Exception e) {
                Log.d("CADIE S3", "S3 Download Exc - " + e.toString());
            }
        }
    };
}

From source file:com.zahdoo.android.extension.GCM.UploadModel.java

License:Open Source License

public UploadModel(Context context, Uri uri, String fileName, final String uploadType,
        final String serverResponse) {
    super(context, uri);

    this.context = context;

    strFileName = fileName;//from  w w  w .  j a va  2s . com

    isTempFile = false;
    fileUploadType = uploadType;

    try {
        if (uploadType.contentEquals("THUMB")) {
            Log.d("CADIE GCM", "UPLOADING THUMB");
            filePath = "/data/data/air.com.zahdoo.cadie/com.zahdoo.cadie/Local Store/thumbnails/" + fileName;
        } else {
            filePath = Environment.getExternalStorageDirectory() + "/cadie/" + fileName;

            if (!(new File(filePath).exists())) {
                filePath = "/data/data/air.com.zahdoo.cadie/com.zahdoo.cadie/Local Store/cadie/" + fileName;
            }
        }

        mFile = new File(filePath);

        if (mFile.exists()) {
            Log.d("CADIE S3", "File Exists");
        } else {
            isTempFile = true;
            Log.d("CADIE S3", "File does not exist");
        }

        int i = fileName.lastIndexOf('.');

        if (i >= 0) {
            mExtension = fileName.substring(i + 1);
        }

        Log.d("CADIE S3", "File Extension - " + mExtension);

        mListener = new ProgressListener() {
            @Override
            public void progressChanged(ProgressEvent event) {
                Log.d("CADIE S3",
                        "Upload Progress - " + (int) Global.mUpload.getProgress().getPercentTransferred());

                if (fileUploadType.contentEquals("ALL_FILES")) {
                    if ((int) Global.mUpload.getProgress().getPercentTransferred() != 0
                            && progressVal != (int) Global.mUpload.getProgress().getPercentTransferred()) {
                        progressVal = (int) Global.mUpload.getProgress().getPercentTransferred();
                        Log.d("CADIE S3", "Upload Progress Event Dispatch - " + progressVal);
                        try {
                            FileTransferFunction.gcmCon.dispatchStatusEventAsync("REGISTERED",
                                    "TRANSFER_PROGRESS^" + progressVal);
                        } catch (Exception e) {
                            Log.d("CADIE S3", "Upload Progress Event Dispatch Error - " + e.toString());
                        }
                    }
                }

                if (event.getEventCode() == ProgressEvent.COMPLETED_EVENT_CODE) {
                    Log.d("CADIE S3", "File Uploaded");

                    Global.mUpload.removeProgressListener(mListener);

                    if (mFile != null) {
                        if (isTempFile)
                            mFile.delete();

                        Log.d("CADIE S3", "File Deleted");

                        if (fileUploadType.contentEquals("ALL_FILES")) {
                            String[] vStrings = serverResponse.split("\\^");

                            try {
                                if (vStrings[0].contentEquals("FILE_UPLOADED")) {
                                    FileTransferFunction.gcmCon.dispatchStatusEventAsync("REGISTERED",
                                            "FILE_UPLOADED^" + vStrings[1]);
                                } else {
                                    FileTransferFunction.gcmCon.dispatchStatusEventAsync("REGISTERED",
                                            "ERROR ");
                                }
                            } catch (Exception e) {
                                Log.d("CADIE S3", "File Upload Error - " + e.toString());
                            }
                        } else//THUMB
                        {
                            Log.d(CommonUtilities.TAG, "THUMB UPLOADED");
                            try {
                                FileTransferFunction.gcmCon.dispatchStatusEventAsync("REGISTERED",
                                        "THUMB_UPLOADED");
                            } catch (Exception e) {
                                Log.d(CommonUtilities.TAG, "THUMB UPLOADED Error - " + e.toString());
                            }
                        }

                        //Global.mTransferManager.shutdownNow(false);
                        //Global.mTransferManager = null;
                        ((FileTransferService) UploadModel.this.context).stopSelf();
                    }
                } else if (event.getEventCode() == ProgressEvent.FAILED_EVENT_CODE) {
                    Global.mUpload.removeProgressListener(mListener);
                    //upload();

                    try {
                        AmazonClientException e = Global.mUpload.waitForException();
                        Log.e("CADIE ",
                                "CADIE S3 Exception - " + e.toString() + " " + event.getBytesTransferred());

                        try {
                            FileTransferFunction.gcmCon.dispatchStatusEventAsync("REGISTERED", "ERROR ");
                        } catch (Exception e1) {
                            Log.d(CommonUtilities.TAG, "CADIE S3 Exception - " + e1.toString());
                        }

                        Global.mTransferManager.shutdownNow(false);
                        Global.mTransferManager = null;
                        ((FileTransferService) UploadModel.this.context).stopSelf();
                    } catch (InterruptedException e) {
                    }
                }
            }
        };
    } catch (Exception e) {
        Log.d(CommonUtilities.TAG, "UPLOAD EXCEPTIOn -  " + e.toString());
    }
}

From source file:org.apache.hadoop.fs.s3a.S3AFileSystem.java

License:Apache License

/**
 * The src file is on the local disk.  Add it to FS at
 * the given dst name.//from ww w . ja v  a2  s  . c  om
 *
 * This version doesn't need to create a temporary file to calculate the md5. Sadly this doesn't seem to be
 * used by the shell cp :(
 *
 * delSrc indicates if the source should be removed
 * @param delSrc whether to delete the src
 * @param overwrite whether to overwrite an existing file
 * @param src path
 * @param dst path
 */
@Override
public void copyFromLocalFile(boolean delSrc, boolean overwrite, Path src, Path dst) throws IOException {
    String key = pathToKey(dst);

    if (!overwrite && exists(dst)) {
        throw new IOException(dst + " already exists");
    }

    LOG.info("Copying local file from " + src + " to " + dst);

    // Since we have a local file, we don't need to stream into a temporary file
    LocalFileSystem local = getLocal(getConf());
    File srcfile = local.pathToFile(src);

    TransferManagerConfiguration transferConfiguration = new TransferManagerConfiguration();
    transferConfiguration.setMinimumUploadPartSize(partSize);
    transferConfiguration.setMultipartUploadThreshold(partSizeThreshold);

    TransferManager transfers = new TransferManager(s3);
    transfers.setConfiguration(transferConfiguration);

    final ObjectMetadata om = new ObjectMetadata();
    if (StringUtils.isNotBlank(serverSideEncryptionAlgorithm)) {
        om.setServerSideEncryption(serverSideEncryptionAlgorithm);
    }

    PutObjectRequest putObjectRequest = new PutObjectRequest(bucket, key, srcfile);
    putObjectRequest.setCannedAcl(cannedACL);
    putObjectRequest.setMetadata(om);

    ProgressListener progressListener = new ProgressListener() {
        public void progressChanged(ProgressEvent progressEvent) {
            switch (progressEvent.getEventCode()) {
            case ProgressEvent.PART_COMPLETED_EVENT_CODE:
                statistics.incrementWriteOps(1);
                break;
            }
        }
    };

    Upload up = transfers.upload(putObjectRequest);
    up.addProgressListener(progressListener);
    try {
        up.waitForUploadResult();
        statistics.incrementWriteOps(1);
    } catch (InterruptedException e) {
        throw new IOException("Got interrupted, cancelling");
    } finally {
        transfers.shutdownNow(false);
    }

    // This will delete unnecessary fake parent directories
    finishedWrite(key);

    if (delSrc) {
        local.delete(src, false);
    }
}

From source file:org.apache.hadoop.fs.s3a.S3AFileSystem.java

License:Apache License

private void copyFile(String srcKey, String dstKey) throws IOException {
    if (LOG.isDebugEnabled()) {
        LOG.debug("copyFile " + srcKey + " -> " + dstKey);
    }//from   w w  w  .  j  a v a 2  s.  com

    TransferManagerConfiguration transferConfiguration = new TransferManagerConfiguration();
    transferConfiguration.setMultipartCopyPartSize(partSize);

    TransferManager transfers = new TransferManager(s3);
    transfers.setConfiguration(transferConfiguration);

    ObjectMetadata srcom = s3.getObjectMetadata(bucket, srcKey);
    final ObjectMetadata dstom = srcom.clone();
    if (StringUtils.isNotBlank(serverSideEncryptionAlgorithm)) {
        dstom.setServerSideEncryption(serverSideEncryptionAlgorithm);
    }

    CopyObjectRequest copyObjectRequest = new CopyObjectRequest(bucket, srcKey, bucket, dstKey);
    copyObjectRequest.setCannedAccessControlList(cannedACL);
    copyObjectRequest.setNewObjectMetadata(dstom);

    ProgressListener progressListener = new ProgressListener() {
        public void progressChanged(ProgressEvent progressEvent) {
            switch (progressEvent.getEventCode()) {
            case ProgressEvent.PART_COMPLETED_EVENT_CODE:
                statistics.incrementWriteOps(1);
                break;
            }
        }
    };

    Copy copy = transfers.copy(copyObjectRequest);
    copy.addProgressListener(progressListener);
    try {
        copy.waitForCopyResult();
        statistics.incrementWriteOps(1);
    } catch (InterruptedException e) {
        throw new IOException("Got interrupted, cancelling");
    } finally {
        transfers.shutdownNow(false);
    }
}

From source file:org.apache.hadoop.fs.s3r.S3RFileSystem.java

License:Apache License

/**
 * The src file is on the local disk.  Add it to FS at
 * the given dst name./*from   w  ww  . j av  a2s.  co m*/
 *
 * This version doesn't need to create a temporary file to calculate the md5.
 * Sadly this doesn't seem to be used by the shell cp :(
 *
 * delSrc indicates if the source should be removed
 * @param delSrc whether to delete the src
 * @param overwrite whether to overwrite an existing file
 * @param src path
 * @param dst path
 */
@Override
public void copyFromLocalFile(boolean delSrc, boolean overwrite, Path src, Path dst) throws IOException {
    String key = pathToKey(dst);

    if (!overwrite && exists(dst)) {
        throw new IOException(dst + " already exists");
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug("Copying local file from " + src + " to " + dst);
    }

    // Since we have a local file, we don't need to stream into a temporary file
    LocalFileSystem local = getLocal(getConf());
    File srcfile = local.pathToFile(src);

    final ObjectMetadata om = new ObjectMetadata();
    if (StringUtils.isNotBlank(serverSideEncryptionAlgorithm)) {
        om.setServerSideEncryption(serverSideEncryptionAlgorithm);
    }
    PutObjectRequest putObjectRequest = new PutObjectRequest(bucket, key, srcfile);
    putObjectRequest.setCannedAcl(cannedACL);
    putObjectRequest.setMetadata(om);

    ProgressListener progressListener = new ProgressListener() {
        public void progressChanged(ProgressEvent progressEvent) {
            switch (progressEvent.getEventCode()) {
            case ProgressEvent.PART_COMPLETED_EVENT_CODE:
                statistics.incrementWriteOps(1);
                break;
            default:
                break;
            }
        }
    };

    Upload up = transfers.upload(putObjectRequest);
    up.addProgressListener(progressListener);
    try {
        up.waitForUploadResult();
        statistics.incrementWriteOps(1);
    } catch (InterruptedException e) {
        throw new IOException("Got interrupted, cancelling");
    }

    // This will delete unnecessary fake parent directories
    finishedWrite(key);

    if (delSrc) {
        local.delete(src, false);
    }
}

From source file:org.apache.hadoop.fs.s3r.S3RFileSystem.java

License:Apache License

private void copyFile(String srcKey, String dstKey) throws IOException {
    if (LOG.isDebugEnabled()) {
        LOG.debug("copyFile " + srcKey + " -> " + dstKey);
    }/*from  w  ww .  j a v a2 s . co  m*/

    ObjectMetadata srcom = s3.getObjectMetadata(bucket, srcKey);
    final ObjectMetadata dstom = srcom.clone();
    if (StringUtils.isNotBlank(serverSideEncryptionAlgorithm)) {
        dstom.setServerSideEncryption(serverSideEncryptionAlgorithm);
    }
    CopyObjectRequest copyObjectRequest = new CopyObjectRequest(bucket, srcKey, bucket, dstKey);
    copyObjectRequest.setCannedAccessControlList(cannedACL);
    copyObjectRequest.setNewObjectMetadata(dstom);

    ProgressListener progressListener = new ProgressListener() {
        public void progressChanged(ProgressEvent progressEvent) {
            switch (progressEvent.getEventCode()) {
            case ProgressEvent.PART_COMPLETED_EVENT_CODE:
                statistics.incrementWriteOps(1);
                break;
            default:
                break;
            }
        }
    };

    Copy copy = transfers.copy(copyObjectRequest);
    copy.addProgressListener(progressListener);
    try {
        copy.waitForCopyResult();
        statistics.incrementWriteOps(1);
    } catch (InterruptedException e) {
        throw new IOException("Got interrupted, cancelling");
    }
}