Example usage for com.mongodb.client MongoCursor next

List of usage examples for com.mongodb.client MongoCursor next

Introduction

In this page you can find the example usage for com.mongodb.client MongoCursor next.

Prototype

@Override
    TResult next();

Source Link

Usage

From source file:com.dawsonsystems.session.MongoManager.java

License:Apache License

private String[] keys() {
    MongoCursor<String> cursor = getCollection().find(new BsonDocument()).projection(include("_id"))
            .map(doc -> (String) doc.get("_id")).iterator();

    List<String> ids = new ArrayList<>();
    while (cursor.hasNext()) {
        ids.add(cursor.next());
    }/* w  w  w  . j ava 2  s.  c o m*/
    return ids.toArray(new String[ids.size()]);
}

From source file:com.dilmus.dilshad.scabi.db.DBackFile.java

License:Open Source License

public long updateMetaData(String fileName, ObjectId fileID, String type, String contentType)
        throws IOException, DScabiException, ParseException {
    long n = 0;//from  www .  j  a  va 2s .c o m
    String uploadDate = null;
    Date datefromDB = null;

    Document documentWhere = new Document();
    documentWhere.put("_id", fileID);

    FindIterable<Document> cursor = m_table.find(documentWhere);
    MongoCursor<Document> cursorExist = cursor.iterator();
    n = m_table.count(documentWhere);
    if (1 == n) {
        log.debug("updateMetaData() Inside 1 == n");
        while (cursorExist.hasNext()) {
            Document ob = cursorExist.next();
            log.debug("updateMetaData() result from ob {}", ob.toString());
            //datefromDB = (String) ((BasicBSONObject) ob).getString("uploadDate");
            datefromDB = ob.getDate("uploadDate");
            if (null == datefromDB) {
                throw new DScabiException("updateMetaData() Unable to get uploadDate for file : " + fileName
                        + " fileID : " + fileID.toHexString(), "DBF.UMD.1");
            }
            log.debug("datefromDB : {}", datefromDB);

        }

    } else if (0 == n) {
        log.debug("updateMetaData() No matches for file : " + fileName + " fileID : " + fileID.toHexString());
        throw new DScabiException(
                "updateMetaData() No matches for file : " + fileName + " fileID : " + fileID.toHexString(),
                "DBF.UMD.2");
    } else {
        log.debug("updateMetaData() Multiple matches for file : " + fileName + " fileID : "
                + fileID.toHexString());
        throw new DScabiException("updateMetaData() Multiple matches for file : " + fileName + " fileID : "
                + fileID.toHexString(), "DBF.UMD.3");
    }

    Date date = new Date();
    SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmssSSS");
    dateFormat.setTimeZone(TimeZone.getTimeZone("ISO"));
    String putClientDateTime = dateFormat.format(date);
    // To parse from string : Date date2 = dateFormat.parse(putDateTime);
    // Uses java.time java 8 : ZonedDateTime now = ZonedDateTime.now( ZoneOffset.UTC );           
    String millisTime = "" + System.currentTimeMillis();
    String nanoTime = "" + System.nanoTime();

    /* If datefromDB is String
    SimpleDateFormat dateFormatFromDB = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
    dateFormatFromDB.setTimeZone(TimeZone.getTimeZone("ISO"));
            
    CharSequence cs1 = "T";
    CharSequence cs2 = "Z";
    String s1 = datefromDB.replace(cs1, " ");
    String s2 = s1.replace(cs2, "");
            
    Date date2 = dateFormatFromDB.parse(s2);
    uploadDate = dateFormat.format(date2);
    */

    uploadDate = dateFormat.format(datefromDB);
    log.debug("uploadDate : {}", uploadDate);

    Document documentUpdate = new Document();
    documentUpdate.append("PutFileName", fileName);
    documentUpdate.append("PutServerFileID", fileID.toHexString());
    documentUpdate.append("PutServerUploadDateTime", uploadDate);
    documentUpdate.append("PutType", type);
    documentUpdate.append("PutContentType", contentType);
    documentUpdate.append("PutClientDateTime", putClientDateTime);
    documentUpdate.append("PutClientDateTimeInMillis", millisTime);
    documentUpdate.append("PutClientDateTimeInNano", nanoTime);
    documentUpdate.append("PutStatus", "Completed");
    documentUpdate.append("PutLatestNumber", "1");

    Document updateObj = new Document();
    updateObj.put("$set", documentUpdate);

    UpdateResult result = m_table.updateMany(documentWhere, updateObj);
    if (1 != result.getModifiedCount())
        throw new DScabiException(
                "Update meta data failed for file : " + fileName + " fileID : " + fileID.toHexString(),
                "DBF.UMD.4");

    handlePreviousVersions(fileName, fileID.toHexString(), uploadDate);

    return result.getModifiedCount();

}

From source file:com.dilmus.dilshad.scabi.db.DBackFile.java

License:Open Source License

private int handlePreviousVersions(String fileName, String strFileID, String strPutServerUploadDateTime)
        throws IOException, DScabiException {
    long m = 0;/*  w w  w.j ava 2  s  .  c  o m*/
    long n = 0;

    // It is better to call this only after meta data is updated for currently uploaded file
    // This will skip checking for given input strFileID, file ID of currently uploaded file
    removeFilesIncompleteMetaData(fileName, strFileID);

    Document documentFind = new Document();
    documentFind.put("PutFileName", fileName);
    documentFind.append("PutServerFileID", strFileID);
    documentFind.append("PutStatus", "Completed");
    documentFind.append("PutLatestNumber", "1");

    FindIterable<Document> cursor = m_table.find(documentFind);
    m = m_table.count(documentFind);

    if (1 == m) {
        log.debug("handlePreviousVersions() Inside 1 == n");
    } else if (0 == m) {
        log.debug("handlePreviousVersions() No matches for file : " + fileName + " strFileID : " + strFileID);
        throw new DScabiException(
                "handlePreviousVersions() No matches for file : " + fileName + " strFileID : " + strFileID,
                "DBF.HPV.1");
    } else {
        log.debug("handlePreviousVersions() Multiple matches for file : " + fileName + " strFileID : "
                + strFileID);
        throw new DScabiException("handlePreviousVersions() Multiple matches for file : " + fileName
                + " strFileID : " + strFileID, "DBF.HPV.2");
    }

    Document documentQuery = new Document();
    documentQuery.put("PutFileName", fileName);
    documentQuery.append("PutStatus", "Completed");

    FindIterable<Document> cursor2 = m_table.find(documentQuery);
    MongoCursor<Document> cursorExist = cursor2.iterator();
    n = m_table.count(documentQuery);
    if (1 == n) {
        log.debug(
                "handlePreviousVersions() Information only : Inside 1 == n. Only one file / current file is found. No previous versions for file : "
                        + fileName + " with PutStatus=Completed");
        return 0;
    } else if (0 == n) {
        log.debug("handlePreviousVersions() No matches for file : " + fileName + " with PutStatus=Completed");
        throw new DScabiException(
                "handlePreviousVersions()() No matches for file : " + fileName + " with PutStatus=Completed",
                "DBF.HPV.3");
    } else {
        long lf1 = Long.parseLong(strPutServerUploadDateTime);
        while (cursorExist.hasNext()) {
            Document ob = cursorExist.next();
            log.debug("handlePreviousVersions() result from ob {}", ob.toString());

            String fid = ob.getString("PutServerFileID");
            if (null == fid) {
                throw new DScabiException("PutServerFileID is missing for one version of file : " + fileName,
                        "DBF.HPV.4");
            }
            /* Don't use. It should be based on date-time and not on file ID
            if (f.equals(strFileID)) {
               // proceed with other versions
               continue;
            }
            */
            String f = ob.getString("PutServerUploadDateTime");
            if (null == f) {
                throw new DScabiException("PutServerUploadDateTime is missing for one version of file : "
                        + fileName + " file ID : " + fid, "DBF.HPV.5");
            }
            String f2 = ob.getString("PutLatestNumber");
            if (null == f2) {
                throw new DScabiException("PutLatestNumber is missing for one version of file : " + fileName
                        + " file ID : " + fid, "DBF.HPV.6");
            }
            if (f.equals(strPutServerUploadDateTime) && f2.equals("1")) {
                // proceed with other versions
                continue;
            }
            long lf2 = Long.parseLong(f);
            if (lf1 < lf2 && f2.equals("1")) {
                // proceed with other versions
                continue;
            }
            if (f2.equals("1")) {
                // all file entries here have PutServerUploadDateTime < strPutServerUploadDateTime
                // there can be multiple previous versions with PutLatestNumber=1
                Document documentWhere = new Document();
                documentWhere.put("PutServerFileID", fid);

                Document documentUpdate = new Document();
                documentUpdate.append("PutLatestNumber", "2");

                Document updateObj = new Document();
                updateObj.put("$set", documentUpdate);
                // there should be only one entry for file ID fid
                UpdateResult result = m_table.updateMany(documentWhere, updateObj);
                if (result.getModifiedCount() <= 0)
                    throw new DScabiException("Update meta data to PutLatestNumber=2 failed for file : "
                            + fileName + " file ID : " + fid, "DBF.HPV.7");
            } else {
                // remove all other versions
                m_gridFSBucket.delete(new ObjectId(fid));
            }

        }
    }
    return 0;
}

From source file:com.dilmus.dilshad.scabi.db.DBackFile.java

License:Open Source License

private int removeFilesIncompleteMetaData(String fileName, String strFileID) {
    long n = 0;/* w ww.j  av  a  2 s.c  o  m*/
    Set<String> stMetaKeys = new HashSet<String>();
    stMetaKeys.add("PutFileName");
    stMetaKeys.add("PutServerFileID");
    stMetaKeys.add("PutServerUploadDateTime");
    stMetaKeys.add("PutType");
    stMetaKeys.add("PutContentType");
    stMetaKeys.add("PutClientDateTime");
    stMetaKeys.add("PutClientDateTimeInMillis");
    stMetaKeys.add("PutClientDateTimeInNano");
    stMetaKeys.add("PutStatus");
    stMetaKeys.add("PutLatestNumber");

    Document documentQuery = new Document();
    // "filename" is MongoDB/GridFS specific meta data name inside fs.files collection for each file
    documentQuery.put("filename", fileName);

    FindIterable<Document> cursor = m_table.find(documentQuery);
    MongoCursor<Document> cursorExist = cursor.iterator();
    n = m_table.count(documentQuery);
    if (0 == n) {
        log.debug("removeFilesIncompleteMetaData() Information only : No file found for file : " + fileName);
        return 0;
    } else {
        while (cursorExist.hasNext()) {
            Document ob = cursorExist.next();
            log.debug("removeFilesIncompleteMetaData() result from ob {}", ob.toString());
            // "_id" is MongoDB/GridFS specific meta data name inside fs.files collection for each file
            ObjectId oid = ob.getObjectId("_id");
            if (null == oid) {
                // what's the use in throwing exception here? throw new DScabiException("_id is missing for file : " + fileName, "DBF.RFI.1");
                // let it continue to cleanup as much as possible
                continue;
            }
            if (oid.toHexString().equals(strFileID)) {
                log.debug(
                        "removeFilesIncompleteMetaData() Information only : skipping given input file ID : {}",
                        strFileID);
                continue;
            }
            Set<String> st = ob.keySet();
            if (st.containsAll(stMetaKeys)) {
                continue;
            } else {
                // remove file
                m_gridFSBucket.delete(oid);

            }
        }
    }
    return 0;
}

From source file:com.dilmus.dilshad.scabi.db.DBackFile.java

License:Open Source License

public int removeAllFilesIncompleteMetaData() {
    long n = 0;/*  w  w  w . j  av  a2s.c o  m*/
    Set<String> stMetaKeys = new HashSet<String>();
    stMetaKeys.add("PutFileName");
    stMetaKeys.add("PutServerFileID");
    stMetaKeys.add("PutServerUploadDateTime");
    stMetaKeys.add("PutType");
    stMetaKeys.add("PutContentType");
    stMetaKeys.add("PutClientDateTime");
    stMetaKeys.add("PutClientDateTimeInMillis");
    stMetaKeys.add("PutClientDateTimeInNano");
    stMetaKeys.add("PutStatus");
    stMetaKeys.add("PutLatestNumber");

    FindIterable<Document> cursor = m_table.find();
    MongoCursor<Document> cursorExist = cursor.iterator();
    n = m_table.count();
    if (0 == n) {
        log.debug("removeAllFilesIncompleteMetaData() Information only : No file found");
        return 0;
    } else {
        while (cursorExist.hasNext()) {
            Document ob = cursorExist.next();
            log.debug("removeAllFilesIncompleteMetaData() result from ob {}", ob.toString());
            // "_id" is MongoDB/GridFS specific meta data name inside fs.files collection for each file
            ObjectId oid = ob.getObjectId("_id");
            if (null == oid) {
                // what's the use in throwing exception here? throw new DScabiException("_id is missing for file : " + fileName, "DBF.RAF.1");
                // let it continue to cleanup as much as possible
                continue;
            }
            Set<String> st = ob.keySet();
            if (st.containsAll(stMetaKeys)) {
                continue;
            } else {
                // remove file
                m_gridFSBucket.delete(oid);

            }
        }
    }
    return 0;
}

From source file:com.dilmus.dilshad.scabi.db.DBackFile.java

License:Open Source License

public String getLatestFileID(String fileName) throws DScabiException {

    // This call to removeFilesIncompleteMetaData() is needed because if the last file upload failed (network issue, etc.) 
    // that incomplete file entry will cause getLatestFileID() to throw exception. 
    // So good complete files already in DB will not be served.
    // The "" as file id below is just to enable method removeFilesIncompleteMetaData() to cleanup all incomplete files with this fileName
    // Don't call this as if a put is in progress for the same fileName, it will get deleted!!
    // // // removeFilesIncompleteMetaData(fileName, ""); 

    String latestFileID = null;//from  www.  j a v  a 2 s  .  co  m
    long latestServerDateTime = 0;
    long n = 0;

    // take only those file entries for fileName with complete meta-data
    Document documentQuery = new Document();
    documentQuery.put("PutFileName", fileName);
    documentQuery.append("PutStatus", "Completed");

    FindIterable<Document> cursor = m_table.find(documentQuery);
    MongoCursor<Document> cursorExist = cursor.iterator();
    n = m_table.count(documentQuery);
    if (1 == n) {
        while (cursorExist.hasNext()) {
            Document ob = cursorExist.next();
            log.debug("handlePreviousVersions() result from ob {}", ob.toString());

            String fid = ob.getString("PutServerFileID");
            if (null == fid) {
                throw new DScabiException("PutServerFileID is missing for file : " + fileName, "DBF.GLF.1");
            }
            return fid;
        }

    } else if (0 == n) {
        log.debug("getLatestFileID() No matches for file : " + fileName + " with PutStatus=Completed");
        throw new DScabiException(
                "getLatestFileID() No matches for file : " + fileName + " with PutStatus=Completed",
                "DBF.GLF.2");
    } else {
        while (cursorExist.hasNext()) {
            Document ob = cursorExist.next();
            log.debug("getLatestFileID() result from ob {}", ob.toString());

            // Analysis needed : can we just continue with next file entry instead of throwing exception?
            String fid = ob.getString("PutServerFileID");
            if (null == fid) {
                throw new DScabiException("PutServerFileID is missing for one version of file : " + fileName,
                        "DBF.GLF.3");
            }
            String f = ob.getString("PutServerUploadDateTime");
            if (null == f) {
                throw new DScabiException("PutServerUploadDateTime is missing for one version of file : "
                        + fileName + " file ID : " + fid, "DBF.GLF.4");
            }
            String f2 = ob.getString("PutLatestNumber");
            if (null == f2) {
                throw new DScabiException("PutLatestNumber is missing for one version of file : " + fileName
                        + " file ID : " + fid, "DBF.GLF.5");
            }
            long lf2 = Long.parseLong(f);
            if (latestServerDateTime < lf2 && f2.equals("1")) {
                // proceed with other versions
                latestServerDateTime = lf2;
                latestFileID = fid;
            }

        }
    }
    return latestFileID;
}

From source file:com.dilmus.dilshad.scabi.db.DBackFile.java

License:Open Source License

public boolean isValidMetaData(String fileName, String strFileID) throws IOException, DScabiException {
    long n = 0;//from   w  w  w  .j a  v  a  2  s .  c om
    Set<String> stMetaKeys = new HashSet<String>();
    stMetaKeys.add("PutFileName");
    stMetaKeys.add("PutServerFileID");
    stMetaKeys.add("PutServerUploadDateTime");
    stMetaKeys.add("PutType");
    stMetaKeys.add("PutContentType");
    stMetaKeys.add("PutClientDateTime");
    stMetaKeys.add("PutClientDateTimeInMillis");
    stMetaKeys.add("PutClientDateTimeInNano");
    stMetaKeys.add("PutStatus");
    stMetaKeys.add("PutLatestNumber");

    Document documentQuery = new Document();
    ObjectId fileID = new ObjectId(strFileID);
    // "_id" is MongoDB/GridFS specific meta data name inside fs.files collection for each file
    documentQuery.put("_id", fileID);

    FindIterable<Document> cursor = m_table.find(documentQuery);
    MongoCursor<Document> cursorExist = cursor.iterator();
    n = m_table.count(documentQuery);
    if (1 == n) {
        log.debug("isValidMetaData() Inside 1 == n");
        while (cursorExist.hasNext()) {
            Document ob = cursorExist.next();
            log.debug("isValidMetaData() result from ob {}", ob.toString());
            Set<String> st = ob.keySet();
            if (st.containsAll(stMetaKeys)) {
                return true;
            } else {
                return false;
            }
        }
    } else if (0 == n) {
        log.debug("isValidMetaData() No matches for file : " + fileName + " fileID : " + fileID.toHexString());
        throw new DScabiException(
                "isValidMetaData() No matches for file : " + fileName + " fileID : " + fileID.toHexString(),
                "DBF.IVM.1");
        //return false;
    } else {
        log.debug("isValidMetaData() Multiple matches for file : " + fileName + " fileID : "
                + fileID.toHexString());
        throw new DScabiException("isValidMetaData() Multiple matches for file : " + fileName + " fileID : "
                + fileID.toHexString(), "DBF.IVM.2");
        //return false;
    }
    return false;
}

From source file:com.dilmus.dilshad.scabi.db.DTable.java

License:Open Source License

public long removeAll() throws DScabiException {
    FindIterable<Document> cursor = m_table.find();
    MongoCursor<Document> cursorExist = cursor.iterator();
    while (cursorExist.hasNext()) {
        Document d = cursorExist.next();
        DeleteResult result = m_table.deleteMany(d);
        log.debug("remove() result is : {}", result.getDeletedCount());
        if (result.getDeletedCount() < 0)
            throw new DScabiException("Remove failed for DBackObject : " + d.toString(), "DBT.REE.1");
    }/*from   w  w w  .j  a va2s.c o  m*/

    return 0;
}

From source file:com.dilmus.dilshad.scabi.db.DTable.java

License:Open Source License

public String executeQuery(String jsonQuery) throws DScabiException, IOException {
    ArrayList<String> fieldList = fieldNamesUsingFindOne(); // fieldNames();
    DMJson djson = new DMJson(jsonQuery);
    Set<String> st = djson.keySet();
    Document document = new Document();
    ArrayList<String> finalList = new ArrayList<String>();
    HashMap<String, String> hmap = new HashMap<String, String>();
    DMJson djson3 = null;//from  w w  w .  j  ava2s  . c o m

    if (false == isEmpty(fieldList)) {
        if (false == fieldList.containsAll(st)) {
            throw new DScabiException(
                    "One or more field name in jsonQuery doesn't exist in fieldNames list. jsonQuery : "
                            + jsonQuery + " Field Names list : " + fieldList,
                    "DBT.EQY.1");
        }

    }

    for (String key : st) {
        // create a document to store key and value
        document.put(key, djson.getString(key));
    }
    FindIterable<Document> cursor = m_table.find(document);
    MongoCursor<Document> cursorExist = cursor.iterator();
    while (cursorExist.hasNext()) {

        hmap.clear();
        Document ob = cursorExist.next();
        Set<String> obkeys = ob.keySet();
        obkeys.remove("_id"); // exclude _id field
        //log.debug("executeQuery() result from ob {}", ob.toString());
        if (false == isEmpty(fieldList)) {
            if (false == obkeys.containsAll(fieldList)) {
                throw new DScabiException(
                        "One or more field name in fieldList doesn't exist in obkeys key set. obkeys : "
                                + obkeys + " Field Names list : " + fieldList,
                        "DBT.EQY.2");
            }
            for (String field : obkeys) {
                //if (field.equals("_id"))
                //   continue;
                String f = ob.getString(field);
                if (null == f) {
                    throw new DScabiException(
                            "Field name " + field + " doesn't exist in dbobject in dbcursor. jsonQuery : "
                                    + jsonQuery + " Field Names list : " + fieldList,
                            "DBT.EQY.3");
                }
                //log.debug("executeQuery() field is {}", field);
                //log.debug("executeQuery() f is {}", f);
                hmap.put(field, f);
            }
        } else {
            for (String key : obkeys) {
                //if (key.equals("_id"))
                //   continue;
                String f2 = ob.getString(key);
                if (null == f2) {
                    throw new DScabiException("Field name " + key
                            + " doesn't exist in dbobject in dbcursor. jsonQuery : " + jsonQuery, "DBT.EQY.4");
                }
                //log.debug("executeQuery() key is {}", key);
                //log.debug("executeQuery() f2 is {}", f2);
                hmap.put(key, f2);
            }
        }
        DMJson djson2 = null;
        //if (false == fieldList.isEmpty())
        //   djson2 = DJson.createDJsonList(hmap, fieldList);
        //else if (false == st.isEmpty())
        //   djson2 = DJson.createDJsonSet(hmap, st);
        if (false == obkeys.isEmpty())
            djson2 = DMJson.createDJsonSet(hmap, obkeys);
        if (null == djson2) {
            throw new DScabiException("djson2 is null. jsonQuery : " + jsonQuery, "DBT.EQY.5");
        }
        finalList.add(djson2.toString());
    }
    djson3 = DMJson.createDJsonWithCount(finalList);

    return djson3.toString();
}

From source file:com.erudika.para.persistence.MongoDBDAO.java

License:Apache License

@Override
public <P extends ParaObject> Map<String, P> readAll(String appid, List<String> keys, boolean getAllColumns) {
    if (keys == null || keys.isEmpty() || StringUtils.isBlank(appid)) {
        return new LinkedHashMap<String, P>();
    }//from   w  w  w .j a  v  a 2 s.com
    Map<String, P> results = new LinkedHashMap<String, P>(keys.size(), 0.75f, true);
    BasicDBObject inQuery = new BasicDBObject();
    inQuery.put(_ID, new BasicDBObject("$in", keys));

    MongoCursor<Document> cursor = getTable(appid).find(inQuery).iterator();
    while (cursor.hasNext()) {
        Document d = cursor.next();
        P obj = fromRow(d);
        results.put(d.getString(_ID), obj);
    }

    logger.debug("DAO.readAll() {}", results.size());
    return results;
}