Example usage for com.mongodb DBObject removeField

List of usage examples for com.mongodb DBObject removeField

Introduction

In this page you can find the example usage for com.mongodb DBObject removeField.

Prototype

Object removeField(String key);

Source Link

Document

Removes a field with a given name from this object.

Usage

From source file:org.teiid.translator.mongodb.MongoDBUpdateExecution.java

License:Open Source License

private void executeInternal() throws TranslatorException {

    DBCollection collection = getCollection(this.visitor.mongoDoc.getTargetTable());
    MongoDocument mongoDoc = this.visitor.mongoDoc;
    AggregationOptions options = this.executionFactory.getOptions(this.executionContext.getBatchSize());

    List<WriteResult> executionResults = new ArrayList<WriteResult>();

    if (this.command instanceof Insert) {
        // get pull key based documents to embed
        LinkedHashMap<String, DBObject> embeddedDocuments = fetchEmbeddedDocuments();

        // check if this document need to be embedded in any other document
        if (mongoDoc.isMerged()) {
            DBObject match = getInsertMatch(mongoDoc, this.visitor.columnValues);
            BasicDBObject insert = this.visitor.getInsert(embeddedDocuments);

            if (mongoDoc.getMergeKey().getAssociation() == Association.MANY) {
                removeParentKey(mongoDoc, insert);
                BasicDBObject insertDoc = new BasicDBObject(mongoDoc.getQualifiedName(true), insert);
                LogManager.logDetail(LogConstants.CTX_CONNECTOR, "update - {\"$match\": {" + match + "}}"); //$NON-NLS-1$ //$NON-NLS-2$
                LogManager.logDetail(LogConstants.CTX_CONNECTOR, "update - {\"$push\": {" + insertDoc + "}}"); //$NON-NLS-1$ //$NON-NLS-2$
                executionResults.add(collection.update(match, new BasicDBObject("$push", insertDoc), false, //$NON-NLS-1$
                        true, WriteConcern.ACKNOWLEDGED));
            } else {
                insert.remove("_id"); //$NON-NLS-1$
                BasicDBObject insertDoc = new BasicDBObject(mongoDoc.getQualifiedName(true), insert);
                LogManager.logDetail(LogConstants.CTX_CONNECTOR, "update - {\"$match\": {" + match + "}}"); //$NON-NLS-1$ //$NON-NLS-2$
                LogManager.logDetail(LogConstants.CTX_CONNECTOR, "update - {\"$set\": {" + insertDoc + "}}"); //$NON-NLS-1$ //$NON-NLS-2$
                executionResults.add(collection.update(match, new BasicDBObject("$set", insertDoc), false, true, //$NON-NLS-1$
                        WriteConcern.ACKNOWLEDGED));
            }//from   w ww .j a  v  a 2s .c  o  m
        } else {
            for (String docName : embeddedDocuments.keySet()) {
                DBObject embeddedDoc = embeddedDocuments.get(docName);
                embeddedDoc.removeField("_id"); //$NON-NLS-1$
            }
            // gets its own collection
            BasicDBObject in = this.visitor.getInsert(embeddedDocuments);
            LogManager.logDetail(LogConstants.CTX_CONNECTOR, "{\"insert\": {" + in + "}}"); //$NON-NLS-1$ //$NON-NLS-2$
            executionResults.add(collection.insert(in, WriteConcern.ACKNOWLEDGED));
        }
    } else if (this.command instanceof Update) {
        // get pull key based documents to embed
        LinkedHashMap<String, DBObject> embeddedDocuments = fetchEmbeddedDocuments();
        DBObject match = new BasicDBObject();
        if (this.visitor.match != null) {
            match = this.visitor.match;
        }
        if (mongoDoc.isMerged()) {
            // multi items in array update not available, http://jira.mongodb.org/browse/SERVER-1243
            // this work-around for above issue
            List<String> parentKeyNames = parentKeyNames(mongoDoc);

            DBObject documentMatch = new BasicDBObject("$match", match); //$NON-NLS-1$                  
            DBObject projection = new BasicDBObject("$project", buildProjectForUpdate(mongoDoc)); //$NON-NLS-1$
            Cursor output = collection.aggregate(Arrays.asList(documentMatch, projection), options);
            while (output.hasNext()) {
                BasicDBObject row = (BasicDBObject) output.next();
                buildUpdate(mongoDoc, collection, row, parentKeyNames, 0, null, executionResults,
                        new UpdateOperationImpl());
            }
        } else {
            for (String docName : embeddedDocuments.keySet()) {
                DBObject embeddedDoc = embeddedDocuments.get(docName);
                embeddedDoc.removeField("_id"); //$NON-NLS-1$
            }
            BasicDBObject u = this.visitor.getUpdate(embeddedDocuments);
            LogManager.logDetail(LogConstants.CTX_CONNECTOR, "update - {\"$match\": {" + match + "}}"); //$NON-NLS-1$ //$NON-NLS-2$
            LogManager.logDetail(LogConstants.CTX_CONNECTOR, "update - {\"$set\": {" + u + "}}"); //$NON-NLS-1$ //$NON-NLS-2$
            executionResults.add(collection.update(match, new BasicDBObject("$set", u), false, true, //$NON-NLS-1$
                    WriteConcern.ACKNOWLEDGED));
        }

        // if the update is for the "embeddable" table, then since it is copied to other tables
        // those references need to be updated. I know this is not atomic operation, but not sure
        // how else to handle it.
        if (mongoDoc.isEmbeddable()) {
            updateReferenceTables(collection, mongoDoc, match, options);
        }
    } else {
        // Delete
        DBObject match = new BasicDBObject();
        if (this.visitor.match != null) {
            match = this.visitor.match;
        }

        if (mongoDoc.isEmbeddable()) {
            DBObject m = new BasicDBObject("$match", match); //$NON-NLS-1$
            Cursor output = collection.aggregate(Arrays.asList(m), options);
            while (output.hasNext()) {
                DBObject row = output.next();
                if (row != null) {
                    for (MergeDetails ref : mongoDoc.getEmbeddedIntoReferences()) {
                        DBCollection parent = getCollection(ref.getParentTable());
                        DBObject parentMatch = buildParentMatch(row, ref);
                        DBObject refMatch = new BasicDBObject("$match", parentMatch); //$NON-NLS-1$
                        Cursor referenceOutput = parent.aggregate(Arrays.asList(refMatch), options);
                        if (referenceOutput.hasNext()) {
                            throw new TranslatorException(MongoDBPlugin.Util.gs(MongoDBPlugin.Event.TEIID18010,
                                    this.visitor.mongoDoc.getTargetTable().getName(), ref.getParentTable()));
                        }
                    }
                }
            }
        }

        if (mongoDoc.isMerged()) {
            List<String> parentKeyNames = parentKeyNames(mongoDoc);

            DBObject documentMatch = new BasicDBObject("$match", match); //$NON-NLS-1$                      
            DBObject projection = new BasicDBObject("$project", buildProjectForUpdate(mongoDoc)); //$NON-NLS-1$
            Cursor output = collection.aggregate(Arrays.asList(documentMatch, projection), options);
            while (output.hasNext()) {
                BasicDBObject row = (BasicDBObject) output.next();
                buildUpdate(mongoDoc, collection, row, parentKeyNames, 0, null, executionResults,
                        new DeleteOperationImpl(match));
            }
        } else {
            LogManager.logDetail(LogConstants.CTX_CONNECTOR, "remove - {\"$match\": {" + match + "}}"); //$NON-NLS-1$ //$NON-NLS-2$
            executionResults.add(collection.remove(match, WriteConcern.ACKNOWLEDGED));
        }
    }

    if (!executionResults.isEmpty()) {
        if (this.command instanceof Insert) {
            if (this.executionContext.getCommandContext().isReturnAutoGeneratedKeys()) {
                addAutoGeneretedKeys(executionResults.get(0));
            }
        }

        int updated = 0;
        for (WriteResult result : executionResults) {
            updated += result.getN();
        }

        this.results = new int[1];
        this.results[0] = updated;
    }
}

From source file:org.teiid.translator.mongodb.MongoDBUpdateExecution.java

License:Open Source License

private void updateReferenceTables(DBCollection collection, MongoDocument mongoDoc, DBObject match,
        AggregationOptions options) throws TranslatorException {
    DBObject m = new BasicDBObject("$match", match); //$NON-NLS-1$
    Cursor output = collection.aggregate(Arrays.asList(m), options);
    while (output.hasNext()) {
        DBObject row = output.next();
        if (row != null) {
            for (MergeDetails ref : mongoDoc.getEmbeddedIntoReferences()) {
                DBCollection parent = getCollection(ref.getParentTable());
                //DBObject parentmatch = new BasicDBObject(ref.getReferenceName()+".$id", row.get("_id")); //$NON-NLS-1$ //$NON-NLS-2$
                DBObject parentmatch = buildParentMatch(row, ref);
                row.removeField("_id"); //$NON-NLS-1$
                parent.update(parentmatch, new BasicDBObject("$set", new BasicDBObject(ref.getName(), row)), //$NON-NLS-1$
                        false, true, WriteConcern.ACKNOWLEDGED);

                // see if there are nested references
                Table parentTable = this.metadata.getTable(mongoDoc.getTable().getParent().getName(),
                        ref.getParentTable());
                MongoDocument parentMongoDocument = new MongoDocument(parentTable, this.metadata);
                if (parentMongoDocument.isEmbeddable()) {
                    updateReferenceTables(parent, parentMongoDocument, parentmatch, options);
                }/*from   w ww .j a  v  a  2  s.  c  om*/
            }
        }
    }
}

From source file:org.wrml.contrib.runtime.service.mongo.MongoService.java

License:Apache License

private Model convertToModel(final DBObject mongoObject, final Keys keys, final Dimensions dimensions)
        throws ModelReadingException {

    mongoObject.removeField("_id");

    // Is JSON serialization fast enough here?
    final String jsonStringRepresentation = JSON.serialize(mongoObject);
    final byte[] jsonStringBytes = jsonStringRepresentation.getBytes();
    final InputStream inStream = new ByteArrayInputStream(jsonStringBytes);

    final Context context = getContext();
    final Model model = context.readModel(inStream, keys, dimensions, SystemFormat.json.getFormatUri());
    return model;
}

From source file:others.Capped.java

License:Apache License

public static void main(String[] args) throws MongoException, UnknownHostException {

    MongoURI uri = new MongoURI("mongodb://10.11.0.52:27017/test");
    DB db = new Mongo(uri).getDB("test");
    DBObject foo = new BasicDBObject();
    foo.put("create", "capped1");
    foo.put("capped", true);
    foo.put("size", 100000000);
    DBObject dbobj = db.command(foo);//from w  w  w.jav a  2  s  . c  o m
    DBCollection c = db.getCollection("capped1");

    DBObject obj = new BasicDBObject();
    Date begin = new Date();
    for (int i = 1; i <= 1000000; i++) {
        obj.removeField("x");
        obj.put("x", i);
        c.insert(obj);
    }
    Date end = new Date();
    System.out.println("One by one:" + ((end.getTime() - begin.getTime()) / 1000));

    DBObject foo2 = new BasicDBObject();
    foo.put("create", "capped2");
    foo.put("capped", true);
    foo.put("size", 100000000);
    DBObject dbobj2 = db.command(foo);
    DBCollection c2 = db.getCollection("capped2");

    begin = new Date();
    for (int i = 1; i <= 1000; i++) {
        List<DBObject> list = new ArrayList<DBObject>(1000);
        for (int j = 1; j <= 1000; j++) {
            DBObject dbo = new BasicDBObject();
            dbo.put("x", j + (i - 1) * 1000);
            list.add(dbo);
        }
        c2.insert(list);
    }
    end = new Date();
    System.out.println("Batch(per 1000):" + ((end.getTime() - begin.getTime()) / 1000));
}

From source file:tango.mongo.MongoConnector.java

License:Open Source License

public synchronized void duplicateExperiment(MongoConnector sourceMongo, String source, String destination) {
    DBObject xp = sourceMongo.getExperiment(source);
    xp.removeField("_id");
    xp.put("name", destination);
    experiment.insert(xp);//  w ww.  java  2s.  c o  m
}

From source file:tango.mongo.MongoConnector.java

License:Open Source License

public synchronized void duplicateNucSettings(String name, String newName) {
    DBObject type = (nucleusSettings.findOne(new BasicDBObject("name", name)));
    if (type == null) {
        ij.IJ.error("duplicate error: settings not found");
        return;//from  ww  w . j av a  2 s. c om
    }
    type.removeField("_id");
    type.put("name", newName);
    nucleusSettings.insert(type);
}

From source file:tango.mongo.MongoConnector.java

License:Open Source License

public synchronized void duplicateChannelSettings(String name, String newName) {
    DBObject type = (channelSettings.findOne(new BasicDBObject("name", name)));
    if (type == null) {
        ij.IJ.error("duplicate error: settings not found");
        return;/*from   ww  w  .  j a  v  a 2 s.  co m*/
    }
    type.removeField("_id");
    type.put("name", newName);
    channelSettings.insert(type);
}

From source file:Tsunami.TsunamiAdapter.java

License:Apache License

protected void finalizeLocations(EQTask task) {
    long t0, t1;//from ww w  .  ja va2s. c o  m
    t0 = System.nanoTime();
    HashMap<Integer, Double> maxEWH = new HashMap<Integer, Double>();
    HashMap<Integer, Double> minETA = new HashMap<Integer, Double>();
    /* Temporary data structure used to avoid many database interactions. */
    List<DBObject> comps = new ArrayList<DBObject>();
    List<DBObject> sldata = new ArrayList<DBObject>();
    /* translate date into time stamp */
    long time = task.eqparams.date.getTime() / 1000;
    for (String id : locations.keySet()) {
        DBObject loc = locations.get(id);
        if (loc.get("type").equals("TFP") || loc.get("type").equals("TSP")) {
            loc.removeField("lat");
            loc.removeField("lon");
            comps.add(loc);
        } else if (loc.get("type").equals("STATION")) {
            @SuppressWarnings("unchecked")
            List<DBObject> values = (List<DBObject>) loc.get("values");
            for (DBObject obj : values) {
                long rel_time = (long) obj.get("reltime") / task.accel;
                obj.put("inst", task.user.inst);
                obj.put("timestamp", time + rel_time);
                obj.put("reltime", rel_time);
                obj.put("station", id);
                obj.put("evid", task.id);
                sldata.add(obj);
            }
        }

        /* Update maximal and minimal CFZ values. */
        if (loc.get("type").equals("TSP")) {
            Double ewh = (Double) loc.get("ewh");
            Double eta = (Double) loc.get("eta");
            Integer cfz = (Integer) loc.get("cfcz");
            /* TODO: hack! */
            if (cfz == null)
                cfz = 100000 + (Integer) loc.get("ref");
            if (!maxEWH.containsKey(cfz)) {
                maxEWH.put(cfz, ewh);
                minETA.put(cfz, eta);
            }
            maxEWH.put(cfz, Math.max(maxEWH.get(cfz), ewh));
            minETA.put(cfz, Math.min(minETA.get(cfz), eta));
        }
    }
    /* Insert CFZ values into database. */
    for (Integer key : maxEWH.keySet()) {
        DBObject cfz = new BasicDBObject();
        /* TODO: hack! */
        if (key < 100000) {
            cfz.put("code", key);
            cfz.put("type", "CFZ");
            cfz.put("ewh", maxEWH.get(key));
            cfz.put("eta", minETA.get(key));
            cfz.put("EventID", task.id);
        } else {
            cfz.put("ref", key - 100000);
            cfz.put("type", "city");
            cfz.put("ewh", maxEWH.get(key));
            cfz.put("eta", minETA.get(key));
            cfz.put("EventID", task.id);
        }
        comps.add(cfz);
    }
    /* Bulk insert. */
    t1 = System.nanoTime();
    db.getCollection("comp").insert(comps);
    System.out.println("Comp: " + (System.nanoTime() - t1) / 1000000000.);
    t1 = System.nanoTime();
    db.getCollection("simsealeveldata").insert(sldata);
    System.out.println("Sealevel: " + (System.nanoTime() - t1) / 1000000000.);
    System.out.println("Total: " + (System.nanoTime() - t0) / 1000000000.);
}

From source file:v7db.files.mongodb.V7GridFS.java

License:Open Source License

void updateContents(DBObject metaData, ContentPointer newContents) throws IOException {
    ContentPointer oldContents = getContentPointer(metaData);

    if (newContents.contentEquals(oldContents))
        return;/* w w  w  .j av a 2 s . c om*/

    String filename = (String) metaData.get("filename");
    String contentType = (String) metaData.get("contentType");
    Object fileId = metaData.get("_id");

    BSONObject newContent = storage.updateBackRefs(newContents, fileId, filename, contentType);

    metaData.removeField("sha");
    metaData.removeField("length");
    metaData.removeField("in");

    metaData.putAll(newContent);

    updateMetaData(metaData);
}

From source file:v7db.files.mongodb.V7GridFS.java

License:Open Source License

void insertContents(DBObject metaData, ContentPointer newContents) throws IOException {

    String filename = (String) metaData.get("filename");
    String contentType = (String) metaData.get("contentType");
    Object fileId = metaData.get("_id");

    if (newContents != null) {
        BSONObject newContent = storage.updateBackRefs(newContents, fileId, filename, contentType);

        metaData.removeField("sha");
        metaData.removeField("length");
        metaData.removeField("in");

        metaData.putAll(newContent);/*from   w  w w .ja  v  a 2s. c  o  m*/
    }

    insertMetaData(metaData);
}