Example usage for com.mongodb.client.model Updates inc

List of usage examples for com.mongodb.client.model Updates inc

Introduction

In this page you can find the example usage for com.mongodb.client.model Updates inc.

Prototype

public static Bson inc(final String fieldName, final Number number) 

Source Link

Document

Creates an update that increments the value of the field with the given name by the given value.

Usage

From source file:com.exorath.exodata.impl.IExoDocument.java

License:Apache License

@Override
public Observable<UpdateResult> inc(String key, Number amount) {
    return update(Updates.inc(key, amount), true);
}

From source file:com.exorath.exodata.impl.IExoDocument.java

License:Apache License

private Observable<UpdateResult> incIfHas(String key, Number has, Number increment) {
    return update(and(getIdQuery(), Filters.gte(key, has)), Updates.inc(key, increment), false);
}

From source file:com.exorath.service.currency.service.MongoService.java

License:Apache License

@Override
public IncrementSuccess increment(IncrementReq req) {
    Document filter = getFilter(req.getCurrency(), req.getUuid());
    Document result;//from   ww  w .  jav  a  2 s . c  o  m
    if (req.getMin() != null) {

        filter.append("balance", new Document("$gte", req.getMin()));
        result = accounts.findOneAndUpdate(filter, Updates.inc("balance", req.getAmount()),
                new FindOneAndUpdateOptions().returnDocument(ReturnDocument.AFTER));
        if (result == null)
            return new IncrementSuccess(1, "Insufficient funds");
    } else
        result = accounts.findOneAndUpdate(filter, Updates.inc("balance", req.getAmount()),
                new FindOneAndUpdateOptions().returnDocument(ReturnDocument.AFTER).upsert(true));
    if (result == null)
        return new IncrementSuccess(-1, "Unknown error");
    return new IncrementSuccess(result.getInteger("balance"));
}

From source file:eu.project.ttc.models.occstore.MongoDBOccurrenceStore.java

License:Apache License

@Override
public void flush() {

    // bulk write occurrences
    final List<org.bson.Document> occDocuments = Lists.newArrayListWithCapacity(occurrencesBuffer.size());
    for (TermOccurrence o : this.occurrencesBuffer) {

        occDocuments.add(new org.bson.Document().append(TERM_ID, o.getTerm().getId())
                .append(DOC_ID, o.getSourceDocument().getId()).append(BEGIN, o.getBegin())
                .append(END, o.getEnd()).append(COVERED_TEXT, o.getCoveredText()));
    }/*  ww w  .  j a v a  2 s .c om*/
    if (!occurrencesBuffer.isEmpty())
        executor.execute(new Runnable() {
            public void run() {
                occurrenceCollection.insertMany(occDocuments);
            }
        });

    // bulk write documents
    final List<WriteModel<org.bson.Document>> documentUrlsOps = Lists
            .newArrayListWithCapacity(documentsUrls.size());
    for (Map.Entry<Integer, String> e : this.documentsUrls.entrySet()) {
        UpdateOneModel<org.bson.Document> w = new UpdateOneModel<org.bson.Document>(Filters.eq(_ID, e.getKey()),
                Updates.set(DOC_URL, e.getValue()), new UpdateOptions().upsert(true));
        documentUrlsOps.add(w);
    }

    if (!documentUrlsOps.isEmpty())
        executor.execute(new Runnable() {
            public void run() {
                documentUrlCollection.bulkWrite(documentUrlsOps, new BulkWriteOptions().ordered(false));
            }
        });

    // bulk write terms
    final List<WriteModel<org.bson.Document>> termsOps = Lists.newArrayList();
    for (Term t : termsBuffer.keySet()) {
        UpdateOneModel<org.bson.Document> w = new UpdateOneModel<org.bson.Document>(Filters.eq(_ID, t.getId()),
                Updates.inc(FREQUENCY, termsBuffer.get(t).intValue()), new UpdateOptions().upsert(true));
        termsOps.add(w);
    }
    if (!termsOps.isEmpty())
        executor.execute(new Runnable() {
            public void run() {
                termCollection.bulkWrite(termsOps, new BulkWriteOptions().ordered(false));
            }
        });

    resetBuffers();

}

From source file:org.nuxeo.directory.mongodb.MongoDBSession.java

License:Apache License

@Override
public DocumentModel createEntry(Map<String, Object> fieldMap) throws DirectoryException {
    checkPermission(SecurityConstants.WRITE);
    String id;//w  w  w  . j  a v a  2 s  .c o m
    if (autoincrementId) {
        Document filter = MongoDBSerializationHelper.fieldMapToBson(MONGODB_ID, directoryName);
        Bson update = Updates.inc(MONGODB_SEQ, 1L);
        FindOneAndUpdateOptions options = new FindOneAndUpdateOptions().returnDocument(ReturnDocument.AFTER);
        Long longId = getCollection(countersCollectionName).findOneAndUpdate(filter, update, options)
                .getLong(MONGODB_SEQ);
        fieldMap.put(getIdField(), longId);
        id = String.valueOf(longId);
    } else {
        id = String.valueOf(fieldMap.get(getIdField()));
        if (hasEntry(id)) {
            throw new DirectoryException(String.format("Entry with id %s already exists", id));
        }
    }
    if (fieldMap.get(getPasswordField()) != null) {
        String password = (String) fieldMap.get(getPasswordField());
        password = PasswordHelper.hashPassword(password, passwordHashAlgorithm);
        fieldMap.put(getPasswordField(), password);
    }
    try {
        Document bson = MongoDBSerializationHelper.fieldMapToBson(fieldMap);
        getCollection().insertOne(bson);

        DocumentModel docModel = BaseSession.createEntryModel(null, schemaName, id, fieldMap, isReadOnly());

        // Add references fields
        Field schemaIdField = schemaFieldMap.get(getIdField());
        String idFieldName = schemaIdField.getName().getPrefixedName();

        String sourceId = docModel.getId();
        for (Reference reference : getDirectory().getReferences()) {
            String referenceFieldName = schemaFieldMap.get(reference.getFieldName()).getName()
                    .getPrefixedName();
            if (getDirectory().getReferences(reference.getFieldName()).size() > 1) {
                log.warn("Directory " + getDirectory().getName() + " cannot create field "
                        + reference.getFieldName() + " for entry " + fieldMap.get(idFieldName)
                        + ": this field is associated with more than one reference");
                continue;
            }

            @SuppressWarnings("unchecked")
            List<String> targetIds = (List<String>) fieldMap.get(referenceFieldName);
            if (reference instanceof MongoDBReference) {
                MongoDBReference mongodbReference = (MongoDBReference) reference;
                mongodbReference.addLinks(sourceId, targetIds, this);
            } else {
                reference.addLinks(sourceId, targetIds);
            }
        }

        getDirectory().invalidateCaches();
        return docModel;
    } catch (MongoWriteException e) {
        throw new DirectoryException(e);
    }
}

From source file:org.opencb.opencga.catalog.db.mongodb.CatalogMongoMetaDBAdaptor.java

License:Apache License

public long getNewAutoIncrementId(String field) { //, MongoDBCollection metaCollection
    //        QueryResult<BasicDBObject> result = metaCollection.findAndModify(
    //                new BasicDBObject("_id", CatalogMongoDBAdaptor.METADATA_OBJECT_ID),  //Query
    //                new BasicDBObject(field, true),  //Fields
    //                null,
    //                new BasicDBObject("$inc", new BasicDBObject(field, 1)), //Update
    //                new QueryOptions("returnNew", true),
    //                BasicDBObject.class
    //        );//from  w  ww . ja v  a  2s  .co  m

    Bson query = Filters.eq(PRIVATE_ID, CatalogMongoDBAdaptorFactory.METADATA_OBJECT_ID);
    Document projection = new Document(field, true);
    Bson inc = Updates.inc(field, 1L);
    QueryOptions queryOptions = new QueryOptions("returnNew", true);
    QueryResult<Document> result = metaCollection.findAndUpdate(query, projection, null, inc, queryOptions);
    //        return (int) Float.parseFloat(result.getResult().get(0).get(field).toString());
    return result.getResult().get(0).getLong(field);
}

From source file:org.opencb.opencga.catalog.db.mongodb.CatalogMongoStudyDBAdaptor.java

License:Apache License

/***
 * This method is called every time a file has been inserted, modified or deleted to keep track of the current study diskUsage.
 *
 * @param studyId   Study Identifier/*  w  ww.  java2 s . c om*/
 * @param diskUsage disk usage of a new created, updated or deleted file belonging to studyId. This argument
 *                  will be > 0 to increment the diskUsage field in the study collection or < 0 to decrement it.
 * @throws CatalogDBException An exception is launched when the update crashes.
 */
public void updateDiskUsage(long studyId, long diskUsage) throws CatalogDBException {
    Bson query = new Document(QueryParams.ID.key(), studyId);
    Bson update = Updates.inc(QueryParams.DISK_USAGE.key(), diskUsage);
    if (studyCollection.update(query, update, null).getNumTotalResults() == 0) {
        throw new CatalogDBException(
                "CatalogMongoStudyDBAdaptor updateDiskUsage: Couldn't update the diskUsage field of"
                        + " the study " + studyId);
    }
}

From source file:org.opencb.opencga.catalog.db.mongodb.MetaMongoDBAdaptor.java

License:Apache License

public long getNewAutoIncrementId(String field) { //, MongoDBCollection metaCollection
    //        QueryResult<BasicDBObject> result = metaCollection.findAndModify(
    //                new BasicDBObject("_id", CatalogMongoDBAdaptor.METADATA_OBJECT_ID),  //Query
    //                new BasicDBObject(field, true),  //Fields
    //                null,
    //                new BasicDBObject("$inc", new BasicDBObject(field, 1)), //Update
    //                new QueryOptions("returnNew", true),
    //                BasicDBObject.class
    //        );//  ww w  .ja v  a  2s  .  c o  m

    Bson query = Filters.eq(PRIVATE_ID, MongoDBAdaptorFactory.METADATA_OBJECT_ID);
    Document projection = new Document(field, true);
    Bson inc = Updates.inc(field, 1L);
    QueryOptions queryOptions = new QueryOptions("returnNew", true);
    QueryResult<Document> result = metaCollection.findAndUpdate(query, projection, null, inc, queryOptions);
    //        return (int) Float.parseFloat(result.getResult().get(0).get(field).toString());
    return result.getResult().get(0).getLong(field);
}

From source file:org.opencb.opencga.catalog.db.mongodb.MongoDBUtils.java

License:Apache License

@Deprecated
static long getNewAutoIncrementId(String field, MongoDBCollection metaCollection) {
    //        QueryResult<BasicDBObject> result = metaCollection.findAndModify(
    //                new BasicDBObject("_id", CatalogMongoDBAdaptor.METADATA_OBJECT_ID),  //Query
    //                new BasicDBObject(field, true),  //Fields
    //                null,
    //                new BasicDBObject("$inc", new BasicDBObject(field, 1)), //Update
    //                new QueryOptions("returnNew", true),
    //                BasicDBObject.class
    //        );//  w  ww .j  a  v a  2s  .  c om

    Bson query = Filters.eq("_id", MongoDBAdaptorFactory.METADATA_OBJECT_ID);
    Document projection = new Document(field, true);
    Bson inc = Updates.inc(field, 1);
    QueryOptions queryOptions = new QueryOptions("returnNew", true);
    QueryResult<Document> result = metaCollection.findAndUpdate(query, projection, null, inc, queryOptions);
    //        return (int) Float.parseFloat(result.getResult().get(0).get(field).toString());
    return result.getResult().get(0).getInteger(field);
}