Example usage for com.amazonaws.services.s3.model S3Object close

List of usage examples for com.amazonaws.services.s3.model S3Object close

Introduction

In this page you can find the example usage for com.amazonaws.services.s3.model S3Object close.

Prototype

@Override
public void close() throws IOException 

Source Link

Document

Releases any underlying system resources.

Usage

From source file:jp.sanix.Rawdata.java

License:Open Source License

public TreeMap<Date, JSONObject> get() throws IOException, SQLException, ParseException {

    TreeMap<Date, JSONObject> data = new TreeMap<Date, JSONObject>();
    AmazonS3 s3 = new AmazonS3Client();
    s3.setRegion(Region.getRegion(Regions.AP_NORTHEAST_1));

    SimpleDateFormat fmt = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
    fmt.setTimeZone(TimeZone.getTimeZone("UTC"));
    Calendar cal = Calendar.getInstance();
    cal.setTime(this.getDate());

    /* AWS S3????? */
    String bucketName = "pvdata-storage-production";
    ObjectListing objectListing = s3.listObjects(new ListObjectsRequest().withBucketName(bucketName)
            .withPrefix("data/" + this.getUniqueCode() + "/" + toDate(cal) + "/"));

    /* get data from s3 */
    do {/*  w w w . ja va2 s.c om*/
        for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) {
            String keyname = objectSummary.getKey();
            S3Object object = s3.getObject(new GetObjectRequest(BUCKET, keyname));
            BufferedReader reader = new BufferedReader(
                    new InputStreamReader(new GZIPInputStream(object.getObjectContent())));
            while (reader.ready()) {
                String line = reader.readLine();
                int pos;
                try {
                    pos = line.indexOf("{");
                } catch (NullPointerException e) {
                    continue;
                }
                try {
                    String jsons = line.substring(pos);
                    JSONObject json = new JSONObject(jsons);
                    JSONArray arr = json.getJSONObject("data").getJSONArray("sample");
                    for (int i = 0; i < arr.length(); i++) {
                        JSONObject obj = arr.getJSONObject(i);
                        Date date = fmt.parse(obj.getString("time"));
                        for (int j = 0; j < obj.getJSONArray("powcon").length(); j++) {
                            if (obj.getJSONArray("powcon").getJSONObject(j).get("genKwh") != JSONObject.NULL) {
                                Double genkwh;
                                genkwh = obj.getJSONArray("powcon").getJSONObject(j).getDouble("genKwh");
                                JSONObject jobj;
                                if (data.containsKey(date)) {
                                    jobj = data.get(date);
                                } else {
                                    jobj = new JSONObject();
                                }
                                if (genkwh != null) {
                                    jobj.put(String.valueOf(j), genkwh);
                                }
                                if (jobj.length() > 0) {
                                    data.put(date, jobj);
                                }
                            }
                        }
                    }
                } catch (JSONException e) {
                    e.printStackTrace();
                } catch (StringIndexOutOfBoundsException e) {
                    e.printStackTrace();
                }
            }
            reader.close();
            object.close();
        }
        objectListing = s3.listNextBatchOfObjects(objectListing);
    } while (objectListing.getMarker() != null);

    Calendar today = Calendar.getInstance();
    today.setTimeZone(TimeZone.getTimeZone("JST"));
    if (toDate(cal).equals(toDate(today))) {
        SimpleDateFormat pgfmt = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        pgfmt.setTimeZone(TimeZone.getTimeZone("UTC"));
        Calendar recent = Calendar.getInstance();
        recent.setTimeZone(TimeZone.getTimeZone("UST"));
        recent.add(Calendar.HOUR, -2);
        Connection db;
        if (System.getProperty("user.name").toString().equals("ec2-user")) {
            db = DriverManager.getConnection(PG_CON, PG_USER, PG_PASS);
        } else {
            db = DriverManager.getConnection(PG_CON_LOCAL, PG_USER, PG_PASS);
        }
        Statement st = db.createStatement();
        String sql = "SELECT data FROM data WHERE pvs_unique_code='" + this.getUniqueCode()
                + "' AND created_at > '" + pgfmt.format(recent.getTime()) + "';";
        ResultSet rs = st.executeQuery(sql);
        while (rs.next()) {
            String jsons = rs.getString(1);
            try {
                JSONObject json = new JSONObject(jsons);
                JSONArray arr = json.getJSONObject("data").getJSONArray("sample");
                for (int i = 0; i < arr.length(); i++) {
                    JSONObject obj = arr.getJSONObject(i);
                    Date date = fmt.parse(obj.getString("time"));
                    for (int j = 0; j < obj.getJSONArray("powcon").length(); j++) {
                        if (obj.getJSONArray("powcon").getJSONObject(j).get("genKwh") != JSONObject.NULL) {
                            Double genkwh;
                            genkwh = obj.getJSONArray("powcon").getJSONObject(j).getDouble("genKwh");
                            JSONObject jobj;
                            if (data.containsKey(date)) {
                                jobj = data.get(date);
                            } else {
                                jobj = new JSONObject();
                            }
                            if (genkwh != null) {
                                jobj.put(String.valueOf(j), genkwh);
                            }
                            if (jobj.length() > 0) {
                                data.put(date, jobj);
                            }
                        }
                    }
                }
            } catch (JSONException e) {
                e.printStackTrace();
            }
        }
        rs.close();
        db.close();
    }
    return data;
}

From source file:jp.sanix.weatherData.java

License:Open Source License

public static void main(String[] args) throws IOException, SQLException, AmazonServiceException, JSONException,
        NullPointerException, ParseException {

    AmazonS3 s3 = new AmazonS3Client();
    s3.setRegion(Region.getRegion(Regions.AP_NORTHEAST_1));

    BufferedReader br = new BufferedReader(new InputStreamReader(
            new FileInputStream(new File("src/main/web/?.csv")), "SJIS"));

    String line = null;// ww w .j  a v a  2s. co  m
    br.readLine(); // 1???
    while ((line = br.readLine()) != null) {

        String[] col = line.split(",");

        /* AWS S3????? */
        String bucketName = "weather-forecast";
        ObjectListing objectListing = s3.listObjects(new ListObjectsRequest().withBucketName(bucketName)
                .withPrefix("day_insolation/" + col[0] + "/"));

        File file = File.createTempFile("temp", ".csv");
        file.deleteOnExit();
        BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), "SJIS"));
        bw.write(
                ",?,?,???,,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23\n");

        System.out.println(col[0] + ":" + col[1] + col[2]);
        /* get data from s3 */
        int i = 0;
        do {
            for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) {
                String keyname = objectSummary.getKey();
                S3Object object = s3.getObject(new GetObjectRequest(bucketName, keyname));
                BufferedReader reader = new BufferedReader(new InputStreamReader(object.getObjectContent()));
                StringBuilder sb = new StringBuilder();
                String line2 = null;
                System.out.print(String.valueOf(i++) + "\r");
                while ((line2 = reader.readLine()) != null) {
                    sb.append(line2);
                }
                reader.close();
                object.close();
                try {
                    JSONObject json = new JSONObject(sb.toString());
                    bw.write(String.join(",", col) + "," + json.get("date").toString().replace("-", "/") + ",");
                    JSONArray jarr = json.getJSONArray("hour_data");
                    bw.write(jarr.join(",") + "\n");
                    //                  System.out.println(String.join(",", col) + "," + json.get("date").toString().replace("-", "/") + "," + jarr.join(","));
                } catch (JSONException e) {
                    //                  System.exit(1);
                }
            }
            objectListing = s3.listNextBatchOfObjects(objectListing);
        } while (objectListing.getMarker() != null);

        bw.flush();
        bw.close();
        if (i > 0) {
            s3.putObject(new PutObjectRequest("sanix-data-analysis",
                    STORE_PATH + col[1] + col[2] + "_insolation.csv", file));
        }
    }
    br.close();

    br = new BufferedReader(new InputStreamReader(
            new FileInputStream(new File("src/main/web/?.csv")), "SJIS"));
    br.readLine(); // 1???
    while ((line = br.readLine()) != null) {

        String[] col = line.split(",");

        /* AWS S3????? */
        String bucketName = "weather-forecast";
        ObjectListing objectListing = s3.listObjects(new ListObjectsRequest().withBucketName(bucketName)
                .withPrefix("day_temperature/" + col[0] + "/"));

        File file = File.createTempFile("temp", ".csv");
        file.deleteOnExit();
        BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), "SJIS"));
        bw.write(
                ",?,?,???,,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23\n");

        System.out.println(col[0] + ":" + col[1] + col[2]);
        /* get data from s3 */
        int i = 0;
        do {
            for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) {
                String keyname = objectSummary.getKey();
                S3Object object = s3.getObject(new GetObjectRequest(bucketName, keyname));
                BufferedReader reader = new BufferedReader(new InputStreamReader(object.getObjectContent()));
                StringBuilder sb = new StringBuilder();
                String line2 = null;
                System.out.print(String.valueOf(i++) + "\r");
                while ((line2 = reader.readLine()) != null) {
                    sb.append(line2);
                }
                reader.close();
                object.close();
                try {
                    JSONObject json = new JSONObject(sb.toString());
                    bw.write(String.join(",", col) + "," + json.get("date").toString().replace("-", "/") + ",");
                    JSONArray jarr = json.getJSONArray("hour_data");
                    bw.write(jarr.join(",") + "\n");
                    //                  System.out.println(String.join(",", col) + "," + json.get("date").toString().replace("-", "/") + "," + jarr.join(","));
                } catch (JSONException e) {
                    //                  System.exit(1);
                }
            }
            objectListing = s3.listNextBatchOfObjects(objectListing);
        } while (objectListing.getMarker() != null);

        bw.flush();
        bw.close();
        if (i > 0) {
            s3.putObject(new PutObjectRequest("sanix-data-analysis",
                    STORE_PATH + col[1] + col[2] + "_temperture.csv", file));
        }
    }
    br.close();
}

From source file:jp.sanix.yokusei.java

License:Open Source License

public static void main(String[] args) throws IOException, SQLException, AmazonServiceException, JSONException,
        NullPointerException, ParseException {

    String id = "A0002441";
    String datefrom = "2015/10/01";
    String dateto = "2015/10/12";

    SimpleDateFormat format = new SimpleDateFormat("yyyy/MM/dd");
    format.setTimeZone(TimeZone.getTimeZone("JST"));
    SimpleDateFormat pgformat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
    pgformat.setTimeZone(TimeZone.getTimeZone("UTC"));

    Calendar cal = Calendar.getInstance();
    Calendar end = Calendar.getInstance();
    String today = toDate(cal);//from  w  w w.  j a  v  a2  s  .  c o  m

    try {
        cal.setTime(format.parse(datefrom));
    } catch (ParseException e) {
    }
    try {
        end.setTime(format.parse(dateto));
        end.add(Calendar.DAY_OF_MONTH, 1);
    } catch (ParseException e) {
    }

    AmazonS3 s3 = new AmazonS3Client();
    s3.setRegion(Region.getRegion(Regions.AP_NORTHEAST_1));

    Connection db = DriverManager.getConnection(PG_CON, PG_USER, PG_PASS);
    Statement st = db.createStatement();
    ResultSet rs = st.executeQuery(
            "SELECT data, pvs_unique_code FROM data WHERE pvs_serial_id='" + id + "' OFFSET 0 LIMIT 1;");
    rs.next();
    String json = rs.getString(1);
    String key = rs.getString(2);
    rs.close();
    db.close();
    Date recent = new Date();

    xlsSheetYokusei xls = new xlsSheetYokusei(json);
    while (cal.before(end)) {
        System.out.println("Getting data of " + toDate(cal));

        /* AWS S3????? */
        String bucketName = "pvdata-storage-production";
        System.out.println("Get s3 data by key='" + bucketName + "/data/" + key + "/" + toDate(cal) + "/'");
        ObjectListing objectListing = s3.listObjects(new ListObjectsRequest().withBucketName(bucketName)
                .withPrefix("data/" + key + "/" + toDate(cal) + "/"));

        /* get data from s3 */
        do {
            for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) {
                String keyname = objectSummary.getKey();
                S3Object object = s3.getObject(new GetObjectRequest(bucketName, keyname));
                BufferedReader reader = new BufferedReader(
                        new InputStreamReader(new GZIPInputStream(object.getObjectContent())));
                String line;
                while ((line = reader.readLine()) != null) {
                    try {
                        json = line.substring(line.indexOf("{"));
                        xls.putData(json);
                    } catch (NullPointerException e) {
                    }
                }
                reader.close();
                object.close();
            }
            objectListing = s3.listNextBatchOfObjects(objectListing);
        } while (objectListing.getMarker() != null);

        /* if today, read postgres to get recent data */
        if (toDate(cal).equals(today)) {
            System.out.println("Get recent data from postgres");
            try {
                db = DriverManager.getConnection(PG_CON, PG_USER, PG_PASS);
                st = db.createStatement();
                String sql = "SELECT data FROM data WHERE pvs_unique_code='" + key + "' AND created_at > '"
                        + pgformat.format(recent) + "';";
                System.out.println(sql);
                rs = st.executeQuery(sql);
                while (rs.next()) {
                    json = rs.getString(1);
                    xls.putData(json);
                }
                rs.close();
                db.close();
            } catch (PSQLException e) {
            } catch (ParseException e) {
            }
        }
        System.out.println("Write Buffer");
        xls.writeBuffer();

        cal.add(Calendar.DAY_OF_MONTH, 1);
    }
    File file = new File(
            "C:\\Users\\SANIX_CORERD\\Desktop\\" + id + "-Diamond" + toDate(cal).replace("/", "-") + ".xlsx");
    xls.putFile(new FileOutputStream(file));
    System.out.println("Finished: " + toDate(cal));
}

From source file:org.deeplearning4j.aws.s3.reader.S3Downloader.java

License:Apache License

public void download(String bucket, String key, File to) throws IOException {
    AmazonS3 s3 = getClient();/* w w w  .  j av a  2 s  .com*/
    S3Object obj = s3.getObject(bucket, key);
    InputStream is = obj.getObjectContent();
    BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(to));
    IOUtils.copy(is, bos);
    bos.close();
    is.close();
    obj.close();
}

From source file:org.deeplearning4j.aws.s3.reader.S3Downloader.java

License:Apache License

public void download(String bucket, String key, OutputStream to) throws IOException {
    AmazonS3 s3 = getClient();//  w  w  w  .  j  a va2  s . c  om
    S3Object obj = s3.getObject(bucket, key);
    InputStream is = obj.getObjectContent();
    BufferedOutputStream bos = new BufferedOutputStream(to);

    IOUtils.copy(is, bos);
    bos.close();
    is.close();
    obj.close();
}

From source file:org.exem.flamingo.web.filesystem.s3.S3BrowserServiceImpl.java

License:Apache License

@Override
public String getObjectAsString(String bucketName, String key, long size) throws IOException {
    // MAX_PREVIEW_SIZE ?  
    if (size <= S3Constansts.MAX_PREVIEW_SIZE) {
        return this.s3.getObjectAsString(bucketName, key);
    }//from w w w  .ja  v a 2 s .  c o m

    byte[] buffer = new byte[S3Constansts.MAX_PREVIEW_SIZE];
    ByteArrayOutputStream output = new ByteArrayOutputStream();

    S3Object object = this.s3.getObject(bucketName, key);
    InputStream is = object.getObjectContent();

    try {
        int readCount;
        int totalReadCount = 0;
        int len = S3Constansts.MAX_PREVIEW_SIZE;
        while (totalReadCount < S3Constansts.MAX_PREVIEW_SIZE) {
            readCount = is.read(buffer, 0, len);
            output.write(buffer, 0, readCount);
            totalReadCount += readCount;
            len = S3Constansts.MAX_PREVIEW_SIZE - totalReadCount;
        }
        output.toByteArray();
    } finally {
        object.close();
    }

    return new String(output.toByteArray());
}

From source file:org.opendedup.sdfs.filestore.cloud.BatchAwsS3ChunkStore.java

License:Open Source License

public byte[] getBytes(long id, int from, int to) throws IOException, DataArchivedException {
    // SDFSLogger.getLog().info("Downloading " + id);
    // SDFSLogger.getLog().info("Current readers :" + rr.incrementAndGet());
    String haName = EncyptUtils.encHashArchiveName(id, Main.chunkStoreEncryptionEnabled);
    this.s3clientLock.readLock().lock();
    S3Object sobj = null;
    byte[] data = null;
    // int ol = 0;
    try {/*from   w  w  w.  j a v a2 s  .c o  m*/

        long tm = System.currentTimeMillis();
        // ObjectMetadata omd = s3Service.getObjectMetadata(this.name,
        // "blocks/" + haName);
        // Map<String, String> mp = this.getUserMetaData(omd);
        // ol = Integer.parseInt(mp.get("compressedsize"));
        // if (ol <= to) {
        // to = ol;
        // SDFSLogger.getLog().info("change to=" + to);
        // }
        int cl = (int) to - from;
        GetObjectRequest gr = new GetObjectRequest(this.name, "blocks/" + haName);
        gr.setRange(from, to);
        sobj = s3Service.getObject(gr);
        InputStream in = sobj.getObjectContent();
        data = new byte[cl];
        IOUtils.readFully(in, data);
        IOUtils.closeQuietly(in);
        double dtm = (System.currentTimeMillis() - tm) / 1000d;
        double bps = (cl / 1024) / dtm;
        SDFSLogger.getLog().debug("read [" + id + "] at " + bps + " kbps");
        // mp = this.getUserMetaData(omd);
        /*
         * try { mp.put("lastaccessed",
         * Long.toString(System.currentTimeMillis()));
         * omd.setUserMetadata(mp); CopyObjectRequest req = new
         * CopyObjectRequest(this.name, "blocks/" + haName, this.name,
         * "blocks/" + haName) .withNewObjectMetadata(omd);
         * s3Service.copyObject(req); } catch (Exception e) {
         * SDFSLogger.getLog().debug("error setting last accessed", e); }
         */
        /*
         * if (mp.containsKey("deleted")) { boolean del =
         * Boolean.parseBoolean((String) mp.get("deleted")); if (del) {
         * S3Object kobj = s3Service.getObject(this.name, "keys/" + haName);
         * 
         * int claims = this.getClaimedObjects(kobj, id);
         * 
         * int delobj = 0; if (mp.containsKey("deleted-objects")) { delobj =
         * Integer.parseInt((String) mp .get("deleted-objects")) - claims;
         * if (delobj < 0) delobj = 0; } mp.remove("deleted");
         * mp.put("deleted-objects", Integer.toString(delobj));
         * mp.put("suspect", "true"); omd.setUserMetadata(mp);
         * CopyObjectRequest req = new CopyObjectRequest(this.name, "keys/"
         * + haName, this.name, "keys/" + haName)
         * .withNewObjectMetadata(omd); s3Service.copyObject(req); int _size
         * = Integer.parseInt((String) mp.get("size")); int _compressedSize
         * = Integer.parseInt((String) mp .get("compressedsize"));
         * HashBlobArchive.currentLength.addAndGet(_size);
         * HashBlobArchive.compressedLength.addAndGet(_compressedSize);
         * SDFSLogger.getLog().warn( "Reclaimed [" + claims +
         * "] blocks marked for deletion"); kobj.close(); } }
         */
        dtm = (System.currentTimeMillis() - tm) / 1000d;
        bps = (cl / 1024) / dtm;
    } catch (AmazonS3Exception e) {
        if (e.getErrorCode().equalsIgnoreCase("InvalidObjectState"))
            throw new DataArchivedException(id, null);
        else {
            SDFSLogger.getLog().error(
                    "unable to get block [" + id + "] at [blocks/" + haName + "] pos " + from + " to " + to, e);
            throw e;

        }
    } catch (Exception e) {
        throw new IOException(e);
    } finally {
        try {
            if (sobj != null) {
                sobj.close();
            }
        } catch (Exception e) {

        }
        this.s3clientLock.readLock().unlock();
    }
    return data;
}

From source file:org.opendedup.sdfs.filestore.cloud.BatchAwsS3ChunkStore.java

License:Open Source License

private void getData(long id, File f) throws Exception {
    // SDFSLogger.getLog().info("Downloading " + id);
    // SDFSLogger.getLog().info("Current readers :" + rr.incrementAndGet());
    String haName = EncyptUtils.encHashArchiveName(id, Main.chunkStoreEncryptionEnabled);
    this.s3clientLock.readLock().lock();
    S3Object sobj = null;/*w w  w .  j av  a2  s.  c  o m*/
    try {

        long tm = System.currentTimeMillis();
        ObjectMetadata omd = s3Service.getObjectMetadata(this.name, "blocks/" + haName);

        try {
            sobj = s3Service.getObject(this.name, "blocks/" + haName);
        } catch (Exception e) {
            throw new IOException(e);
        }
        int cl = (int) omd.getContentLength();
        if (this.simpleS3) {

            FileOutputStream out = null;
            InputStream in = null;
            try {
                out = new FileOutputStream(f);
                in = sobj.getObjectContent();
                IOUtils.copy(in, out);
                out.flush();

            } catch (Exception e) {
                throw new IOException(e);
            } finally {
                IOUtils.closeQuietly(out);
                IOUtils.closeQuietly(in);

            }
        } else {
            this.multiPartDownload("blocks/" + haName, f);
        }
        double dtm = (System.currentTimeMillis() - tm) / 1000d;
        double bps = (cl / 1024) / dtm;
        SDFSLogger.getLog().debug("read [" + id + "] at " + bps + " kbps");
        Map<String, String> mp = this.getUserMetaData(omd);
        if (md5sum && mp.containsKey("md5sum")) {
            byte[] shash = BaseEncoding.base64().decode(mp.get("md5sum"));

            InputStream in = new FileInputStream(f);
            byte[] chash = ServiceUtils.computeMD5Hash(in);
            IOUtils.closeQuietly(in);
            if (!Arrays.equals(shash, chash))
                throw new IOException("download corrupt at " + id);
        }

        try {
            mp.put("lastaccessed", Long.toString(System.currentTimeMillis()));
            omd.setUserMetadata(mp);

            updateObject("blocks/" + haName, omd);
        } catch (Exception e) {
            SDFSLogger.getLog().debug("error setting last accessed", e);
        }
        if (mp.containsKey("deleted")) {
            boolean del = Boolean.parseBoolean((String) mp.get("deleted"));
            if (del) {
                S3Object kobj = s3Service.getObject(this.name, "keys/" + haName);

                int claims = this.getClaimedObjects(kobj, id);

                int delobj = 0;
                if (mp.containsKey("deleted-objects")) {
                    delobj = Integer.parseInt((String) mp.get("deleted-objects")) - claims;
                    if (delobj < 0)
                        delobj = 0;
                }
                mp.remove("deleted");
                mp.put("deleted-objects", Integer.toString(delobj));
                mp.put("suspect", "true");
                omd.setUserMetadata(mp);

                updateObject("keys/" + haName, omd);
                int _size = Integer.parseInt((String) mp.get("size"));
                int _compressedSize = Integer.parseInt((String) mp.get("compressedsize"));
                HashBlobArchive.currentLength.addAndGet(_size);
                HashBlobArchive.compressedLength.addAndGet(_compressedSize);
                SDFSLogger.getLog().warn("Reclaimed [" + claims + "] blocks marked for deletion");
                kobj.close();
            }
        }
        dtm = (System.currentTimeMillis() - tm) / 1000d;
        bps = (cl / 1024) / dtm;
    } catch (AmazonS3Exception e) {
        if (e.getErrorCode().equalsIgnoreCase("InvalidObjectState"))
            throw new DataArchivedException(id, null);
        else {
            SDFSLogger.getLog().error("unable to get block [" + id + "] at [blocks/" + haName + "]", e);
            throw e;

        }
    } finally {
        try {
            if (sobj != null) {
                sobj.close();
            }
        } catch (Exception e) {

        }
        this.s3clientLock.readLock().unlock();
    }
}

From source file:org.opendedup.sdfs.filestore.cloud.BatchAwsS3ChunkStore.java

License:Open Source License

private int verifyDelete(long id) throws IOException, Exception {
    this.s3clientLock.readLock().lock();
    String haName = EncyptUtils.encHashArchiveName(id, Main.chunkStoreEncryptionEnabled);
    ObjectMetadata om = null;/*  ww w. j  a  v a  2 s  .c o  m*/
    S3Object kobj = null;

    int claims = 0;
    this.s3clientLock.readLock().lock();
    try {
        kobj = s3Service.getObject(this.name, "keys/" + haName);
        claims = this.getClaimedObjects(kobj, id);
        Map<String, String> mp = this.getUserMetaData(om);
        if (claims > 0) {
            if (this.clustered)
                om = this.getClaimMetaData(id);
            else {
                om = s3Service.getObjectMetadata(this.name, "keys/" + haName);
            }

            int delobj = 0;
            if (mp.containsKey("deleted-objects")) {
                delobj = Integer.parseInt((String) mp.get("deleted-objects")) - claims;
                if (delobj < 0)
                    delobj = 0;
            }
            mp.remove("deleted");
            mp.put("deleted-objects", Integer.toString(delobj));
            mp.put("suspect", "true");
            om.setUserMetadata(mp);
            String kn = null;
            if (this.clustered)
                kn = this.getClaimName(id);
            else
                kn = "keys/" + haName;

            this.updateObject(kn, om);

            SDFSLogger.getLog().warn("Reclaimed [" + claims + "] blocks marked for deletion");

        }

        if (claims == 0) {
            if (!clustered) {
                s3Service.deleteObject(this.name, "blocks/" + haName);
                s3Service.deleteObject(this.name, "keys/" + haName);
                SDFSLogger.getLog().debug("deleted block " + "blocks/" + haName + " id " + id);
            } else {
                s3Service.deleteObject(this.name, this.getClaimName(id));
                int _size = Integer.parseInt((String) mp.get("size"));
                int _compressedSize = Integer.parseInt((String) mp.get("compressedsize"));
                HashBlobArchive.currentLength.addAndGet(-1 * _size);
                HashBlobArchive.compressedLength.addAndGet(-1 * _compressedSize);
                ObjectListing ol = s3Service.listObjects(this.getName(), "claims/keys/" + haName);
                if (ol.getObjectSummaries().size() == 0) {
                    s3Service.deleteObject(this.name, "blocks/" + haName);
                    s3Service.deleteObject(this.name, "keys/" + haName);
                    SDFSLogger.getLog().debug("deleted block " + "blocks/" + haName + " id " + id);
                }
            }
        }
    } finally {
        try {
            kobj.close();
        } catch (Exception e) {
        }
        this.s3clientLock.readLock().unlock();
    }
    return claims;
}

From source file:org.opendedup.sdfs.filestore.cloud.BatchAwsS3ChunkStore.java

License:Open Source License

public StringResult getStringResult(String key) throws IOException, InterruptedException {
    this.s3clientLock.readLock().lock();
    S3Object sobj = null;
    try {/*from  w  w w .j  av a 2 s  .co m*/

        ObjectMetadata md = null;
        try {
            sobj = s3Service.getObject(getName(), key);
            md = s3Service.getObjectMetadata(this.name, key);
        } catch (Exception e) {
            throw new IOException(e);
        }
        int cl = (int) md.getContentLength();

        byte[] data = new byte[cl];
        DataInputStream in = null;
        try {
            in = new DataInputStream(sobj.getObjectContent());
            in.readFully(data);

        } catch (Exception e) {
            throw new IOException(e);
        } finally {
            if (in != null)
                in.close();
        }
        boolean encrypt = false;
        boolean compress = false;
        boolean lz4compress = false;
        Map<String, String> mp = this.getUserMetaData(md);
        byte[] ivb = null;
        if (mp.containsKey("ivspec")) {
            ivb = BaseEncoding.base64().decode(mp.get("ivspec"));
        }
        if (mp.containsKey("md5sum")) {
            try {
                byte[] shash = BaseEncoding.base64().decode(mp.get("md5sum"));
                byte[] chash = ServiceUtils.computeMD5Hash(data);
                if (!Arrays.equals(shash, chash))
                    throw new IOException("download corrupt at " + sobj.getKey());
            } catch (NoSuchAlgorithmException e) {
                throw new IOException(e);
            }
        }
        int size = Integer.parseInt(mp.get("size"));
        encrypt = Boolean.parseBoolean(mp.get("encrypt"));

        lz4compress = Boolean.parseBoolean(mp.get("lz4compress"));
        boolean changed = false;

        Long hid = EncyptUtils.decHashArchiveName(sobj.getKey().substring(5), encrypt);
        if (this.clustered)
            mp = s3Service.getObjectMetadata(this.name, this.getClaimName(hid)).getUserMetadata();
        if (mp.containsKey("deleted")) {
            mp.remove("deleted");
            changed = true;
        }
        if (mp.containsKey("deleted-objects")) {
            mp.remove("deleted-objects");
            changed = true;
        }

        if (encrypt) {

            if (ivb != null) {
                data = EncryptUtils.decryptCBC(data, new IvParameterSpec(ivb));
            } else {
                data = EncryptUtils.decryptCBC(data);
            }
        }
        if (compress)
            data = CompressionUtils.decompressZLIB(data);
        else if (lz4compress) {
            data = CompressionUtils.decompressLz4(data, size);
        }

        String hast = new String(data);
        SDFSLogger.getLog().debug("reading hashes " + (String) mp.get("objects") + " from " + hid + " encn "
                + sobj.getKey().substring(5));
        StringTokenizer ht = new StringTokenizer(hast, ",");
        StringResult st = new StringResult();
        st.id = hid;
        st.st = ht;
        if (mp.containsKey("bsize")) {
            HashBlobArchive.currentLength.addAndGet(Integer.parseInt(mp.get("bsize")));
        }
        if (mp.containsKey("bcompressedsize")) {
            HashBlobArchive.compressedLength.addAndGet(Integer.parseInt(mp.get("bcompressedsize")));
        }
        if (changed) {
            try {
                md = sobj.getObjectMetadata();
                md.setUserMetadata(mp);
                String kn = null;
                if (this.clustered)
                    kn = this.getClaimName(hid);
                else
                    kn = sobj.getKey();

                this.updateObject(kn, md);
            } catch (Exception e) {
                throw new IOException(e);
            }
        }
        return st;
    } finally {
        if (sobj != null)
            sobj.close();
        this.s3clientLock.readLock().unlock();
    }
}