Example usage for com.mongodb.client.model UpdateOneModel UpdateOneModel

List of usage examples for com.mongodb.client.model UpdateOneModel UpdateOneModel

Introduction

In this page you can find the example usage for com.mongodb.client.model UpdateOneModel UpdateOneModel.

Prototype

public UpdateOneModel(final Bson filter, final List<? extends Bson> update, final UpdateOptions options) 

Source Link

Document

Construct a new instance.

Usage

From source file:com.mastfrog.asyncpromises.mongo.BulkWriteBuilderImpl.java

License:Open Source License

public BulkWriteBuilder<T> updateOne(Bson filter, Bson update, UpdateOptions options) {
    requests.add(new UpdateOneModel<T>(filter, update, options));
    return this;
}

From source file:com.yahoo.ycsb.db3.MongoDbClient.java

License:Open Source License

/**
 * Insert a record in the database. Any field/value pairs in the specified
 * values HashMap will be written into the record with the specified record
 * key.//w w w .j  av a  2s .c  o m
 * 
 * @param table
 *          The name of the table
 * @param key
 *          The record key of the record to insert.
 * @param values
 *          A HashMap of field/value pairs to insert in the record
 * @return Zero on success, a non-zero error code on error. See the {@link DB}
 *         class's description for a discussion of error codes.
 */
@Override
public Status insert(String table, String key, HashMap<String, ByteIterator> values) {
    try {
        MongoCollection<Document> collection = database.getCollection(table);
        Document toInsert = new Document("_id", key);
        for (Map.Entry<String, ByteIterator> entry : values.entrySet()) {
            toInsert.put(entry.getKey(), entry.getValue().toArray());
        }

        if (batchSize == 1) {
            if (useUpsert) {
                // this is effectively an insert, but using an upsert instead due
                // to current inability of the framework to clean up after itself
                // between test runs.
                collection.replaceOne(new Document("_id", toInsert.get("_id")), toInsert, UPDATE_WITH_UPSERT);
            } else {
                collection.insertOne(toInsert);
            }
        } else {
            bulkInserts.add(toInsert);
            if (bulkInserts.size() == batchSize) {
                if (useUpsert) {
                    List<UpdateOneModel<Document>> updates = new ArrayList<UpdateOneModel<Document>>(
                            bulkInserts.size());
                    for (Document doc : bulkInserts) {
                        updates.add(new UpdateOneModel<Document>(new Document("_id", doc.get("_id")), doc,
                                UPDATE_WITH_UPSERT));
                    }
                    collection.bulkWrite(updates);
                } else {
                    collection.insertMany(bulkInserts, INSERT_UNORDERED);
                }
                bulkInserts.clear();
            } else {
                return OptionsSupport.BATCHED_OK;
            }
        }
        return Status.OK;
    } catch (Exception e) {
        System.err.println("Exception while trying bulk insert with " + bulkInserts.size());
        e.printStackTrace();
        return Status.ERROR;
    }

}

From source file:eu.project.ttc.models.occstore.MongoDBOccurrenceStore.java

License:Apache License

@Override
public void flush() {

    // bulk write occurrences
    final List<org.bson.Document> occDocuments = Lists.newArrayListWithCapacity(occurrencesBuffer.size());
    for (TermOccurrence o : this.occurrencesBuffer) {

        occDocuments.add(new org.bson.Document().append(TERM_ID, o.getTerm().getId())
                .append(DOC_ID, o.getSourceDocument().getId()).append(BEGIN, o.getBegin())
                .append(END, o.getEnd()).append(COVERED_TEXT, o.getCoveredText()));
    }//from   w w  w .j a v a 2 s  .co m
    if (!occurrencesBuffer.isEmpty())
        executor.execute(new Runnable() {
            public void run() {
                occurrenceCollection.insertMany(occDocuments);
            }
        });

    // bulk write documents
    final List<WriteModel<org.bson.Document>> documentUrlsOps = Lists
            .newArrayListWithCapacity(documentsUrls.size());
    for (Map.Entry<Integer, String> e : this.documentsUrls.entrySet()) {
        UpdateOneModel<org.bson.Document> w = new UpdateOneModel<org.bson.Document>(Filters.eq(_ID, e.getKey()),
                Updates.set(DOC_URL, e.getValue()), new UpdateOptions().upsert(true));
        documentUrlsOps.add(w);
    }

    if (!documentUrlsOps.isEmpty())
        executor.execute(new Runnable() {
            public void run() {
                documentUrlCollection.bulkWrite(documentUrlsOps, new BulkWriteOptions().ordered(false));
            }
        });

    // bulk write terms
    final List<WriteModel<org.bson.Document>> termsOps = Lists.newArrayList();
    for (Term t : termsBuffer.keySet()) {
        UpdateOneModel<org.bson.Document> w = new UpdateOneModel<org.bson.Document>(Filters.eq(_ID, t.getId()),
                Updates.inc(FREQUENCY, termsBuffer.get(t).intValue()), new UpdateOptions().upsert(true));
        termsOps.add(w);
    }
    if (!termsOps.isEmpty())
        executor.execute(new Runnable() {
            public void run() {
                termCollection.bulkWrite(termsOps, new BulkWriteOptions().ordered(false));
            }
        });

    resetBuffers();

}

From source file:io.mandrel.metrics.impl.MongoMetricsRepository.java

License:Apache License

@Override
public void sync(Map<String, Long> accumulators) {

    LocalDateTime now = LocalDateTime.now();
    LocalDateTime keytime = now.withMinute(0).withSecond(0).withNano(0);

    // {global.XXX=0, global.YYY=0, ...} to {global{XXX=O, YYY=0}, ...}
    Stream<Pair<String, Pair<String, Long>>> map = accumulators.entrySet().stream().map(e -> {
        Iterable<String> results = splitter.split(e.getKey());
        List<String> elts = Lists.newArrayList(results);
        return Pair.of(elts.get(0), Pair.of(elts.get(1), e.getValue()));
    });//from w w w . j av  a2  s  .com
    Map<String, List<Pair<String, Long>>> byKey = map.collect(Collectors.groupingBy(e -> e.getLeft(),
            Collectors.mapping(e -> e.getRight(), Collectors.toList())));

    List<? extends WriteModel<Document>> requests = byKey.entrySet().stream().map(e -> {
        Document updates = new Document();

        e.getValue().stream().forEach(i -> {
            Iterable<String> results = splitter.split(i.getKey());
            List<String> elts = Lists.newArrayList(results);
            if (elts.size() > 1) {
                updates.put(elts.get(0) + "." + JsonBsonCodec.toBson(elts.get(1)), i.getValue());
            } else {
                updates.put(i.getKey(), i.getValue());
            }
        });

        return new UpdateOneModel<Document>(Filters.eq("_id", e.getKey()), new Document("$inc", updates),
                new UpdateOptions().upsert(true));
    }).collect(Collectors.toList());

    counters.bulkWrite(requests);

    requests = byKey.entrySet().stream().map(e -> {
        List<UpdateOneModel<Document>> tsUpdates = Lists.newArrayList();

        e.getValue().stream().forEach(i -> {
            Iterable<String> results = splitter.split(i.getKey());
            List<String> elts = Lists.newArrayList(results);

            if (elts.size() == 1 && e.getKey().equalsIgnoreCase(MetricKeys.global())) {
                tsUpdates.add(new UpdateOneModel<Document>(
                        Filters.and(Filters.eq("type", e.getKey() + MetricKeys.METRIC_DELIM + i.getKey()),
                                Filters.eq("timestamp_hour", keytime)),
                        new Document("$inc",
                                new Document("values." + Integer.toString(now.getMinute()), i.getValue())),
                        new UpdateOptions().upsert(true)));
            }
        });

        return tsUpdates;
    }).flatMap(list -> list.stream()).collect(Collectors.toList());

    timeseries.bulkWrite(requests);

}

From source file:io.mandrel.metrics.impl.MongoMetricsRepository.java

License:Apache License

public void prepareMinutes(LocalDateTime keytime) {
    List<? extends WriteModel<Document>> requests = Arrays
            .asList(MetricKeys.globalTotalSize(), MetricKeys.globalNbPages()).stream().map(el -> {
                Document document = new Document();
                document.append("type", el).append("timestamp_hour", keytime);
                document.append("values", IntStream.range(0, 60).collect(Document::new,
                        (doc, val) -> doc.put(Integer.toString(val), Long.valueOf(0)), Document::putAll));
                return document;
            })/*ww  w.ja va  2 s .c  om*/
            .map(doc -> new UpdateOneModel<Document>(
                    Filters.and(Filters.eq("type", doc.getString("type")),
                            Filters.eq("timestamp_hour", keytime)),
                    new Document("$set", doc), new UpdateOptions().upsert(true)))
            .collect(Collectors.toList());

    timeseries.bulkWrite(requests);
}

From source file:net.acesinc.nifi.processors.mongodb.PartialUpdateMongo.java

protected BulkWriteResult performBlukUpdate(List<Map<String, Document>> updateDocs, ProcessContext context,
        ProcessSession session) {//from  ww  w .  j ava2s  .  c  om
    final ProcessorLog logger = getLogger();
    StopWatch watch = new StopWatch(true);

    logger.debug("Performing Bulk Update of [ " + updateDocs.size() + " ] documents");

    final WriteConcern writeConcern = getWriteConcern(context);
    final MongoCollection<Document> collection = getCollection(context).withWriteConcern(writeConcern);

    List<WriteModel<Document>> updates = new ArrayList<>();

    for (Map<String, Document> update : updateDocs) {
        UpdateOneModel<Document> upOne = new UpdateOneModel<>(update.get("query"), // find part
                update.get("update"), // update part
                new UpdateOptions().upsert(true) // options like upsert
        );
        updates.add(upOne);
    }

    BulkWriteResult bulkWriteResult = collection.bulkWrite(updates, new BulkWriteOptions().ordered(false));
    return bulkWriteResult;

}

From source file:org.eclipse.ditto.services.thingsearch.persistence.write.model.ThingDeleteModel.java

License:Open Source License

@Override
public WriteModel<Document> toMongo() {
    final Bson filter = getFilter();
    final Bson update = new BsonDocument().append(SET,
            new BsonDocument().append(FIELD_DELETE_AT, new BsonDateTime(0L)));
    final UpdateOptions updateOptions = new UpdateOptions().bypassDocumentValidation(true);
    return new UpdateOneModel<>(filter, update, updateOptions);
}

From source file:org.opencb.commons.datastore.mongodb.MongoDBNativeQuery.java

License:Apache License

public BulkWriteResult update(List<? extends Bson> documentList, List<? extends Bson> updatesList,
        boolean upsert, boolean multi) {
    if (documentList.size() != updatesList.size()) {
        throw wrongQueryUpdateSize(documentList, updatesList);
    }//from   w  w w .  ja v a 2  s .  c  om

    Iterator<? extends Bson> queryIterator = documentList.iterator();
    Iterator<? extends Bson> updateIterator = updatesList.iterator();

    List<WriteModel<Document>> actions = new ArrayList<>(documentList.size());
    UpdateOptions updateOptions = new UpdateOptions().upsert(upsert);

    while (queryIterator.hasNext()) {
        Bson query = queryIterator.next();
        Bson update = updateIterator.next();

        if (multi) {
            actions.add(new UpdateManyModel<>(query, update, updateOptions));
        } else {
            actions.add(new UpdateOneModel<>(query, update, updateOptions));
        }

        //        BulkWriteOperation bulk = dbCollection.initializeUnorderedBulkOperation();
        //            BulkWriteRequestBuilder builder = bulk.find(query);
        //            if (upsert) {
        //                if (multi) {
        ////                    builder.upsert().update(update);
        //
        //                } else {
        ////                    builder.upsert().updateOne(update);
        //                }
        //            } else {
        //                if (multi) {
        ////                    builder.update(update);
        //                } else {
        ////                    builder.updateOne(update);
        //                }
        //            }

    }
    //        return bulk.execute();
    return dbCollection.bulkWrite(actions, new BulkWriteOptions().ordered(false));
}

From source file:org.restheart.db.DAOUtils.java

License:Open Source License

private static List<WriteModel<BsonDocument>> getBulkWriteModel(final MongoCollection<BsonDocument> mcoll,
        final BsonArray documents, BsonDocument shardKeys, final ObjectId etag) {
    Objects.requireNonNull(mcoll);
    Objects.requireNonNull(documents);

    List<WriteModel<BsonDocument>> updates = new ArrayList<>();

    documents.stream().filter(_document -> _document.isDocument()).forEach(new Consumer<BsonValue>() {
        @Override//w w  w.ja va2s  . c o m
        public void accept(BsonValue _document) {
            BsonDocument document = _document.asDocument();

            // generate new id if missing, will be an insert
            if (!document.containsKey("_id")) {
                document.put("_id", new BsonObjectId(new ObjectId()));
            }

            // add the _etag
            document.put("_etag", new BsonObjectId(etag));

            Bson filter = eq("_id", document.get("_id"));

            if (shardKeys != null) {
                filter = and(filter, shardKeys);
            }

            updates.add(new UpdateOneModel<>(filter, getUpdateDocument(document),
                    new UpdateOptions().upsert(true)));
        }
    });

    return updates;
}