Example usage for com.mongodb DBObject removeField

List of usage examples for com.mongodb DBObject removeField

Introduction

In this page you can find the example usage for com.mongodb DBObject removeField.

Prototype

Object removeField(String key);

Source Link

Document

Removes a field with a given name from this object.

Usage

From source file:com.stratio.qa.assertions.DBObjectsAssert.java

License:Apache License

private boolean isContained(List<DBObject> item, BasicDBObject doc) {
    boolean res = false;
    for (int i = 0; i < item.size() && !res; i++) {
        DBObject aux = item.get(i);
        aux.removeField("_id");
        aux.removeField("timestamp");

        if (aux.keySet().equals(doc.keySet())) {
            res = true;// w  ww  . jav  a2s  . c  o  m
        }
        // Obtenemos los columnNames
        List<String> cols = new ArrayList<String>(doc.keySet());
        for (int x = 0; x < cols.size() && res; x++) {
            if (!aux.get(cols.get(x)).equals(doc.get(cols.get(x)))) {
                res = false;
            } else {
                res = true;
            }
        }
    }
    return res;
}

From source file:com.tomtom.speedtools.mongodb.migratedb.MongoDBMigrator.java

License:Apache License

/**
 * Migrate the database to the requested toVersion.
 *
 * @param db     Database to migrate.//w  ww  .  j a v  a  2s . c  om
 * @param dryRun In dry-run mode, no modifications will be made to the database.
 * @return True if the database was modified (other than updating the schema version).
 * @throws MigrationException If an error was encountered during migration.
 */
public boolean migrate(@Nonnull final MongoDB db, final boolean dryRun) throws MigrationException {
    assert db != null;
    LOG.info("MigrateDB starting..." + (dryRun ? " (dryRun mode)" : ""));

    final Map<String, MongoDBMigration> migrationMap = new HashMap<>();
    for (final MongoDBMigration migration : migrations) {
        if (migrationMap.put(migration.getFromVersion(), migration) != null) {
            throw new MigrationException(
                    "Multiple migrations found with 'from'-version: " + migration.getFromVersion());
        }
    }

    // Read the current version from the database.
    final DBCollection collection = db.getCollection(MIGRATOR_COLLECTION_NAME);
    DBObject info = collection.findOne(new BasicDBObject("_id", INFO_ID));
    if (info == null) {
        info = new BasicDBObject("_id", INFO_ID);
    }
    Object currentVersionObj = info.get(CURRENT_VERSION);
    if (currentVersionObj == null) {
        currentVersionObj = getFirstVersion();
        info.put(CURRENT_VERSION, currentVersionObj);
    }
    final String currentVersion = currentVersionObj.toString().trim();

    // Check whether a previous migration was in progress.
    if (info.get(BUSY) != null) {
        throw new MigrationException("Previous migration was unsuccesful. Please restore database.");
    }

    // Indicate that migration is in progress.
    info.put(BUSY, "true");
    if (!dryRun) {
        info.put(CURRENT_VERSION, getTargetVersion());
        collection.save(info);
    }

    // Create migration path to toVersion.
    final List<MongoDBMigration> migrationPath = new ArrayList<>();
    String version = currentVersion;

    // Create a migration path.
    while (!version.equals(getTargetVersion())) {
        final MongoDBMigration migration = migrationMap.get(version);
        if (migration == null) {
            throw new MigrationException(
                    "No migration possible from version: " + version + " to version " + getTargetVersion());
        }
        migrationPath.add(migration);
        version = migration.getToVersion();
    }

    // Start migrating.
    boolean databaseChanged = false;
    List<MongoDBMigrationProblem> problems = Collections.emptyList();
    for (final MongoDBMigration migration : migrationPath) {
        LOG.info("Migrating database from version " + migration.getFromVersion() + " to version "
                + migration.getToVersion());
        try {
            migration.setDryRun(dryRun); // Do not change order:
            databaseChanged = migration.migrateChangedDatabase(db) || databaseChanged; // Always execute migrate!
            problems = migration.flush();
            if (!problems.isEmpty()) {
                break;
            }
        } catch (final MigrationException e) {
            LOG.error("Migration failed, please restore database from backup: " + e.getMessage());
            throw e;
        } catch (final RuntimeException e) {
            LOG.error("Migration failed, please restore database from backup: " + e.getMessage());
            if (e.getCause() instanceof MigrationException) {
                throw (MigrationException) e.getCause();
            }
            throw new MigrationException(e);
        }
    }

    // Close migration.
    info.put(CURRENT_VERSION, getTargetVersion());
    info.removeField(BUSY);
    if (!dryRun) {
        collection.save(info);
    }

    // Show problems.
    if (!problems.isEmpty()) {
        final StringBuilder problemString = new StringBuilder();
        problemString.append("Migration problems encountered:");
        for (final MongoDBMigrationProblem problem : problems) {
            problemString.append("\n  ").append(problem.getPath()).append(" - ").append(problem.getProblem());
        }
        final String str = problemString.toString();
        LOG.error(str);
    } else {
        LOG.info("Migration OK");
    }

    // Dry-run info.
    if (dryRun) {
        LOG.info("Migration was run in dry-run mode. No modifications were made to the database.");
        return false;
    }

    // Real mode.
    if (databaseChanged) {
        LOG.info("Database records have been modified (and schema version was updated).");
    } else {
        LOG.info("No database records have been modified (but schema version was updated).");
    }

    // Now, throw an exception is something was wrong.
    if (!problems.isEmpty()) {
        throw new MigrationException("Migration was not successful. Please restore database.");
    }
    return databaseChanged;
}

From source file:eu.cassandra.server.mongo.MongoCopyEntities.java

License:Apache License

private static void stripAppliances(DBObject obj) {
    if (obj.containsField("containsAppliances"))
        obj.removeField("containsAppliances");
}

From source file:eu.cassandra.server.mongo.util.MongoDBQueries.java

License:Apache License

/**
 * /*from w  w  w .  j a v  a2 s .co m*/
 * @param qKey
 * @param qValue
 * @param jsonToUpdate
 * @param collection
 * @param successMsg
 * @param refColl
 * @param refKeyName
 * @param intDocKey
 * @return
 */
public DBObject updateDocument(String qKey, String qValue, String jsonToUpdate, String collection,
        String successMsg, String refColl, String refKeyName, String intDocKey, int schemaType) {
    Vector<String> keysUpdated = new Vector<String>();
    try {
        DBObject dbObject = (DBObject) JSON.parse(jsonToUpdate);
        if (dbObject.containsField("_id")) {
            dbObject.removeField("_id");
            jsonToUpdate = dbObject.toString();
        }
        new JSONValidator().isValid(jsonToUpdate, schemaType, true);
        if (intDocKey != null && refKeyName != null && dbObject.containsField(refKeyName)) {
            ensureThatRefKeysMatch(dbObject, collection, refKeyName, intDocKey, qValue);
        } else if ((refColl != null || refKeyName != null) && dbObject.get(refKeyName) != null) {
            ensureThatRefKeyExists(dbObject, refColl, refKeyName, false);
        }
        for (String key : dbObject.keySet()) {
            if (!key.equalsIgnoreCase("id")) {
                keysUpdated.add(key);
                BasicDBObject keyToUpdate;
                if (qKey.equalsIgnoreCase("_id") || qKey.endsWith(".cid")) {
                    keyToUpdate = new BasicDBObject().append(qKey, new ObjectId(qValue));
                } else {
                    keyToUpdate = new BasicDBObject().append(qKey, qValue);
                }
                String keyName = key;
                if (intDocKey != null)
                    keyName = intDocKey + "." + key;
                DBConn.getConn().getCollection(collection).update(keyToUpdate,
                        new BasicDBObject().append("$set", new BasicDBObject(keyName, dbObject.get(key))));
            }
        }
    } catch (Exception e) {
        return jSON2Rrn.createJSONError("Update Failed for " + jsonToUpdate, e);
    }
    return getEntity(null, collection, qKey, qValue, successMsg, false,
            keysUpdated.toArray(new String[keysUpdated.size()]));
}

From source file:eu.cassandra.server.mongo.util.MongoDBQueries.java

License:Apache License

/**
 * //from  w  w  w .  j av  a2s . co  m
 * @param coll
 * @param dataToInsert
 * @param successMessage
 * @param refColl
 * @param refKeyName
 * @param canBeNull
 * @param schemaType
 * @param httpHeaders
 * @return
 */
public DBObject insertData(String coll, String dataToInsert, String successMessage, String[] refColl,
        String[] refKeyName, boolean[] canBeNull, int schemaType, HttpHeaders httpHeaders) {
    DBObject data;
    try {
        data = (DBObject) JSON.parse(dataToInsert);
        if (data.containsField("_id")) {
            data.removeField("_id");
            dataToInsert = data.toString();
        }
        new JSONValidator().isValid(dataToInsert, schemaType);
        if (refColl != null && refKeyName != null) {
            for (int i = 0; i < refColl.length; i++) {
                ensureThatRefKeyExists(data, refColl[i], refKeyName[i], canBeNull[i]);
            }
        }
        if (httpHeaders == null)
            DBConn.getConn().getCollection(coll).insert(data);
        else
            DBConn.getConn(MongoDBQueries.getDbNameFromHTTPHeader(httpHeaders)).getCollection(coll)
                    .insert(data);
    } catch (com.mongodb.util.JSONParseException e) {
        return jSON2Rrn.createJSONError("Error parsing JSON input", e.getMessage());
    } catch (Exception e) {
        return jSON2Rrn.createJSONError(dataToInsert, e);
    }
    return jSON2Rrn.createJSONInsertPostMessage(successMessage, data);
}

From source file:eu.eubrazilcc.lvl.storage.mongodb.MongoDBConnector.java

License:EUPL

/**
 * Saves a file into the current database using the specified <tt>namespace</tt> and <tt>filename</tt>. All files sharing the same 
 * <tt>namespace</tt> and <tt>filename</tt> are considered versions of the same file. So, inserting a new file with an existing 
 * <tt>namespace</tt> and <tt>filename</tt> will create a new entry in the database. The method {@link #readFile(String, String)} will 
 * retrieve the latest version of the file and the method {@link #readFile(String, String)} will remove all the versions of the file. 
 * Other possible options could be to define a unique index in the <tt>files</tt> collection to avoid duplicates (versions) to be 
 * created: <code>createIndex("filename", namespace + ".files");</code>
 * @param namespace - (optional) name space under the file is saved. When nothing specified, the default bucket is used
 * @param filename - filename to be assigned to the file in the database
 * @param file - file to be saved to the database
 * @param metadata - optional file metadata
 * @return the id associated to the file in the collection
 *//*from ww w  .  ja  v a 2s.  c o  m*/
public String saveFile(final @Nullable String namespace, final String filename, final File file,
        final @Nullable DBObject metadata) {
    checkArgument(isNotBlank(filename), "Uninitialized or invalid filename");
    checkArgument(file != null && file.canRead() && file.isFile(), "Uninitialized or invalid file");
    String objectId = null;
    final String namespace2 = trimToEmpty(namespace);
    final String filename2 = filename.trim();
    if (metadata != null) {
        metadata.removeField(IS_LATEST_VERSION_ATTR);
    }
    final DB db = client().getDB(CONFIG_MANAGER.getDbName());
    final GridFS gfsNs = isNotBlank(namespace2) ? new GridFS(db, namespace2) : new GridFS(db);
    // enforce isolation property: each namespace has its own bucket (encompassing 2 collections: files and chunks) and indexes in the database
    createSparseIndexWithUniqueConstraint(FILE_VERSION_PROP,
            gfsNs.getBucketName() + "." + GRIDFS_FILES_COLLECTION, false);
    // index open access links
    createNonUniqueIndex(FILE_OPEN_ACCESS_LINK_PROP, gfsNs.getBucketName() + "." + GRIDFS_FILES_COLLECTION,
            false);
    try {
        // insert new file/version in the database
        final GridFSInputFile gfsFile = gfsNs.createFile(file);
        gfsFile.setFilename(filename2);
        gfsFile.setContentType(mimeType(file));
        gfsFile.setMetaData(metadata);
        gfsFile.save();
        objectId = ObjectId.class.cast(gfsFile.getId()).toString();
        // unset the latest version in the database
        final GridFSDBFile latestVersion = getLatestVersion(gfsNs, filename2);
        if (latestVersion != null && latestVersion.getMetaData() != null) {
            latestVersion.getMetaData().removeField(IS_LATEST_VERSION_ATTR);
            latestVersion.save();
        }
    } catch (DuplicateKeyException dke) {
        throw new MongoDBDuplicateKeyException(dke.getMessage());
    } catch (IOException ioe) {
        throw new IllegalStateException("Failed to save file", ioe);
    } finally {
        // enforce versioning property by always restoring the latest version in the database
        restoreLatestVersion(gfsNs, filename2);
    }
    return objectId;
}

From source file:eu.vital.vitalcep.restApp.alert.Alerts.java

private void createAlertObject(DBObject dbObject, String randomUUIDString, JSONObject dsjo)
        throws JSONException {
    dbObject.put("@context", "http://vital-iot.eu/contexts/sensor.jsonld");
    dbObject.put("id", host + "/sensor/" + randomUUIDString);
    dbObject.put("uri", host + "/sensor/" + randomUUIDString);
    dbObject.removeField("type");
    dbObject.put("type", "vital:AlertSensor");
    dbObject.put("status", "vital:Running");
    JSONArray observes = new JSONArray();
    JSONArray compl = dsjo.getJSONArray("complex");
    for (int i = 0; i < compl.length(); i++) {
        JSONObject oComplex = new JSONObject(compl.get(i).toString());
        JSONObject oObserves = new JSONObject();
        oObserves.put("type", "vital:ComplexEvent");
        oObserves.put("uri", host + "/sensor/" + randomUUIDString + "/" + oComplex.getString("id").toString());
        oObserves.put("id", host + "/sensor/" + randomUUIDString + "/" + oComplex.getString("id").toString());
        observes.put(oObserves);/*from  w w  w. j  a v a2  s .  c  om*/
    }
    DBObject dbObject2 = (DBObject) JSON.parse(observes.toString());
    dbObject.put("ssn:observes", dbObject2);
}

From source file:eu.vital.vitalcep.restApp.filteringApi.StaticFiltering.java

private DBObject createCEPFilterStaticSensorJsonld(String info, String randomUUIDString, JSONObject jo,
        JSONObject dsjo, String type) throws JSONException {
    DBObject dbObject = (DBObject) JSON.parse(info);

    dbObject.removeField("id");
    dbObject.put("@context", "http://vital-iot.eu/contexts/sensor.jsonld");
    dbObject.put("id", host + "/sensor/" + randomUUIDString);
    dbObject.put("name", jo.getString("name"));
    dbObject.put("type", "vital:" + type);
    //dbObject.put("type", "vital:CEPSensor");
    dbObject.put("description", jo.getString("description"));
    //demo//from   ww w.j a  va 2  s. c  om
    //                        JSONArray data =  jo.getJSONArray("data");
    //                        dbObject.put("data",data.toString());
    JSONArray compl = dsjo.getJSONArray("complex");
    JSONArray observes = new JSONArray();
    for (int i = 0; i < compl.length(); i++) {
        JSONObject oComplex = new JSONObject(compl.get(i).toString());
        JSONObject oObserves = new JSONObject();
        oObserves.put("type", "vital:ComplexEvent");
        //oObserves.put("uri",  host.toString()
        //        +"/sensor/"+randomUUIDString
        //        +"/"+oComplex.getString("id").toString());
        oObserves.put("id", host + "/sensor/" + randomUUIDString + "/" + oComplex.getString("id").toString());
        observes.put(oObserves);
    }
    DBObject dbObject2 = (DBObject) JSON.parse(observes.toString());
    dbObject.put("ssn:observes", dbObject2);
    dbObject.put("status", "vital:running");
    return dbObject;
}

From source file:GeoHazardServices.Inst.java

License:Apache License

private DBObject getUserObj(String username) {

    DBCollection coll = db.getCollection("users");

    DBCursor cursor = coll.find(new BasicDBObject("username", username));

    if (!cursor.hasNext())
        return null;

    DBObject obj = cursor.next();/*from   w w  w .  ja  v  a  2 s. c  om*/
    cursor.close();

    BasicDBObject userObj = new BasicDBObject("username", obj.get("username"));
    userObj.put("_id", obj.get("_id"));
    userObj.put("permissions", obj.get("permissions"));
    userObj.put("properties", obj.get("properties"));
    userObj.put("notify", obj.get("notify"));
    userObj.put("api", obj.get("api"));

    ObjectId instId = (ObjectId) obj.get("inst");

    cursor = db.getCollection("institutions").find(new BasicDBObject("_id", instId));

    String instName = null;

    if (cursor.hasNext()) {

        DBObject inst = cursor.next();
        inst.removeField("_id");
        inst.removeField("secret");
        userObj.put("inst", inst);
        instName = (String) inst.get("name");
    }

    cursor.close();

    if (instName == null || instName.equals("gfz") || instName.equals("tdss15"))
        instName = "gfz_ex_test";

    /* get all available country codes and count elements in each group */
    DBObject groupFields = new BasicDBObject("_id", "$country");
    groupFields.put("count", new BasicDBObject("$sum", 1));

    DBObject group = new BasicDBObject("$group", groupFields);

    BasicDBList types = new BasicDBList();
    types.add(new BasicDBObject("sensor", "rad"));
    types.add(new BasicDBObject("sensor", "prs"));
    types.add(new BasicDBObject("sensor", "pr1"));
    types.add(new BasicDBObject("sensor", "flt"));
    types.add(new BasicDBObject("sensor", null));

    DBObject filterFields = new BasicDBObject("$or", types);

    BasicDBList andList = new BasicDBList();
    andList.add(filterFields);
    andList.add(new BasicDBObject("inst", instName));

    DBObject andObj = new BasicDBObject("$and", andList);
    DBObject filter = new BasicDBObject("$match", andObj);

    /* sort alphabetically */
    DBObject sortFields = new BasicDBObject("_id", 1);
    DBObject sort = new BasicDBObject("$sort", sortFields);

    AggregationOutput output = db.getCollection("stations").aggregate(filter, group, sort);
    BasicDBList countries = new BasicDBList();

    /* convert answer into string list */
    @SuppressWarnings("unchecked")
    List<String> clist = (List<String>) obj.get("countries");

    for (DBObject res : output.results()) {
        String code = (String) res.get("_id");
        if (code == null)
            continue;
        boolean isOn = (clist != null) && clist.contains(code);
        res.put("on", isOn);
        countries.add(res);
    }

    userObj.put("countries", countries);

    return userObj;
}

From source file:GeoHazardServices.Inst.java

License:Apache License

@POST
@Path("/fetch")
@Produces(MediaType.APPLICATION_JSON)//  w ww. j av a2s  .c o m
public String fetch(@Context HttpServletRequest request, @FormParam("limit") @DefaultValue("0") int limit,
        @FormParam("delay") @DefaultValue("0") int delay,
        @FormParam("undersea") @DefaultValue("false") boolean undersea,
        @CookieParam("server_cookie") String session) {

    /* check session key and find out if the request comes from an authorized user */
    User user = signedIn(session); /* returns null if user is not logged in */

    /* create lists for general and user specific earthquake entries */
    ArrayList<DBObject> mlist = new ArrayList<DBObject>();
    ArrayList<DBObject> ulist = new ArrayList<DBObject>();

    /* we want all entries since the beginning of time */
    Date maxTimestamp = new Date();

    /* used to convert to desired time format used by MongoDB */
    SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
    sdf.setTimeZone(TimeZone.getTimeZone("UTC"));

    /* select collection which contain the earthquake entries */
    DBCollection coll = db.getCollection("eqs");

    //ArrayList<User> users = new ArrayList<User>( institutions.values() );
    ArrayList<User> users = new ArrayList<User>();
    users.add(user);

    if (user != null) {

        if (user.inst != null) {
            users.add(institutions.get(user.inst));
        } else {
            users.add(institutions.get("gfz"));
        }

        DBCursor csr = db.getCollection("users").find(
                new BasicDBObject("username", user.name).append("provider", new BasicDBObject("$ne", null)));
        if (csr.hasNext()) {
            for (Object p : (BasicDBList) csr.next().get("provider")) {
                DBObject inst = db.getCollection("institutions").findOne(new BasicDBObject("_id", p));
                if (inst != null)
                    users.add(institutions.get(inst.get("name")));
            }
        }
    }

    /* return only entries that are older than 'delay' minutes */
    Date upperTimeLimit = new Date(System.currentTimeMillis() - delay * 60 * 1000);

    /* get earthquakes for each of the given users */
    for (User curUser : users) {

        if (curUser == null)
            continue;

        /* create DB query */
        BasicDBObject inQuery = new BasicDBObject("user", curUser.objId);

        if (undersea)
            inQuery.append("prop.sea_area", new BasicDBObject("$ne", null));

        if (delay > 0)
            inQuery.append("prop.date", new BasicDBObject("$lt", upperTimeLimit));

        inQuery.append("depr", new BasicDBObject("$ne", true));
        inQuery.append("evtset", null);

        /* query DB, sort the results by date and limit the number of returned entries */
        DBCursor cursor = coll.find(inQuery).sort(new BasicDBObject("prop.date", -1));

        if (limit > 0)
            cursor = cursor.limit(limit);

        /* walk through the returned entries */
        for (DBObject obj : cursor) {

            obj.removeField("image");

            /* check if entry belongs to general or user specific list */
            if (user != null && obj.get("user").equals(user.objId)) {

                ulist.add(obj);
            } else {
                mlist.add(obj);
            }

            /* update timestamp */
            Date timestamp = (Date) obj.get("timestamp");
            if (timestamp.after(maxTimestamp)) {
                maxTimestamp = timestamp;
            }
        }

        /* clean up query */
        cursor.close();
    }

    /* create new JSON object that can be used directly within JavaScript */
    JsonObject jsonObj = new JsonObject();
    jsonObj.add("main", gson.toJsonTree(mlist));
    jsonObj.add("user", gson.toJsonTree(ulist));

    if (user != null) {

        List<DBObject> msglist = msg(limit, user);

        if (!msglist.isEmpty()) {
            Date timestamp = (Date) msglist.get(0).get("CreatedTime");
            if (timestamp.after(maxTimestamp)) {
                maxTimestamp = timestamp;
            }
        }

        jsonObj.add("msg", gson.toJsonTree(msglist));

    } else {

        jsonObj.add("msg", gson.toJsonTree(new ArrayList<DBObject>()));
    }

    List<DBObject> evtsets = new ArrayList<DBObject>();
    if (user != null) {
        BasicDBObject query = new BasicDBObject("user", user.objId);
        query.append("timestamp", new BasicDBObject("$lte", maxTimestamp));
        DBCursor cursor = db.getCollection("evtsets").find(query).sort(new BasicDBObject("timestamp", -1))
                .limit(100);
        evtsets = cursor.toArray();
    }
    jsonObj.add("evtsets", gson.toJsonTree(evtsets));

    /* TODO */
    if (user != null) {
        for (Map.Entry<String, IDataProvider> entry : providers.entrySet()) {
            List<DBObject> list = entry.getValue().fetch(user, maxTimestamp, limit);
            jsonObj.add(entry.getKey(), gson.toJsonTree(list));
        }
    }

    jsonObj.addProperty("ts", sdf.format(maxTimestamp));
    return jsonObj.toString();
}