Example usage for com.mongodb.client.model BulkWriteOptions BulkWriteOptions

List of usage examples for com.mongodb.client.model BulkWriteOptions BulkWriteOptions

Introduction

In this page you can find the example usage for com.mongodb.client.model BulkWriteOptions BulkWriteOptions.

Prototype

BulkWriteOptions

Source Link

Usage

From source file:at.rocworks.oa4j.logger.dbs.NoSQLMongoDB.java

private int storeDataEvents(DataList events) {
    Date t1 = new Date();

    DataItem item;/*from  ww w  .j  a v a2s  .  c  om*/
    EventItem event;
    ValueItem value;

    ArrayList<WriteModel<Document>> list = new ArrayList<>();

    try {
        for (int i = 0; i <= events.getHighWaterMark() && (item = events.getItem(i)) != null; i++) {
            if (!(item instanceof EventItem))
                continue;
            event = (EventItem) item;

            //JDebug.out.info(getTagOfDp(event.getDp()));
            Document obj = new Document("type", "event").append("tag", getTagOfDp(event.getDp()))
                    .append("ns", event.getTimeNS()) // ISODate does not support nanoseconds                        
                    .append("ts", new Date(event.getTimeMS())) // ISODate
            ;
            //                        .append("sys", event.getDp().getSystem())
            //                        .append("dp", event.getDp().getDp())
            //                        .append("el", event.getDp().getElement())
            //                        .append("dpel", event.getDp().getDpEl());

            // value
            value = event.getValue();
            Map<String, Object> map = new HashMap<>();

            // value_number
            Double dval = value.getDouble();
            if (dval != null)
                map.put("number", dval);

            // value_string                    
            if (value.getString() != null)
                map.put("text", value.getString());

            // value_timestamp
            if (value.getTime() != null)
                map.put("time", value.getTime());

            // dynvar
            if (value.getVariableType() == VariableType.DynVar) {
                if (value.getValueObject() instanceof DynVar) {
                    DynVar dyn = (DynVar) value.getValueObject();
                    JSONArray arr = new JSONArray();
                    dyn.asList().forEach((row) -> arr.add(row.getValueObject()));
                    map.put("array", arr);
                }
            }

            obj.append("value", map);

            // attributes
            if (event.hasAttributes()) {
                obj.append("status", event.getStatus());
                obj.append("manager", event.getManager());
                obj.append("user", event.getUser());
            }

            // check if text is a json document
            if (documents && value.getString() != null) {
                try {
                    Object doc = (new JSONParser()).parse(value.getString());
                    obj.append("document", doc);
                } catch (ParseException ex) {
                    // no json text
                }
            }

            list.add(new InsertOneModel<>(obj));
            //JDebug.out.info(obj.string());
        }

        evcoll.bulkWrite(list, new BulkWriteOptions().ordered(false));
        Date t2 = new Date();
        addServerStats(events.getHighWaterMark(), t2.getTime() - t1.getTime());
        return INoSQLInterface.OK;
    } catch (MongoBulkWriteException ex) {
        // TODO mongodb bulk exception
        //JDebug.out.log(Level.SEVERE, "Bulk exception {0} on {1} records.", new Object[]{ex.getCode(), ex.getWriteErrors().size()});
        //JDebug.StackTrace(Level.SEVERE, ex);
        return INoSQLInterface.ERR_UNRECOVERABLE;
    } catch (Exception ex) {
        JDebug.StackTrace(Level.SEVERE, ex);
        return INoSQLInterface.ERR_REPEATABLE;
    }
}

From source file:com.erudika.para.persistence.MongoDBDAO.java

License:Apache License

@Override
public <P extends ParaObject> void updateAll(String appid, List<P> objects) {
    if (StringUtils.isBlank(appid) || objects == null) {
        return;//from  w ww .j a v  a 2s.  c o  m
    }
    try {
        ArrayList<WriteModel<Document>> updates = new ArrayList<WriteModel<Document>>();
        List<String> ids = new ArrayList<String>(objects.size());
        for (P object : objects) {
            if (object != null) {
                object.setUpdated(Utils.timestamp());
                Document id = new Document(_ID, object.getId());
                Document data = new Document("$set", toRow(object, Locked.class, true));
                UpdateOneModel<Document> um = new UpdateOneModel<Document>(id, data);
                updates.add(um);
                ids.add(object.getId());
            }
        }
        BulkWriteResult res = getTable(appid).bulkWrite(updates, new BulkWriteOptions().ordered(true));
        logger.debug("Updated: " + res.getModifiedCount() + ", keys: " + ids);
    } catch (Exception e) {
        logger.error(null, e);
    }
    logger.debug("DAO.updateAll() {}", (objects == null) ? 0 : objects.size());
}

From source file:com.hazelcast.loader.MongoMapStore.java

License:Open Source License

@Override
public void storeAll(Map<String, Supplement> map) {
    List<InsertOneModel> batch = new LinkedList<InsertOneModel>();
    for (Map.Entry<String, Supplement> entry : map.entrySet()) {
        String key = entry.getKey();
        Supplement value = entry.getValue();
        batch.add(new InsertOneModel(
                new Document("name", value.getName()).append("price", value.getPrice()).append("_id", key)));
    }/*  w  w  w  . ja  v a 2  s . c  om*/
    this.collection.bulkWrite(batch, new BulkWriteOptions().ordered(false));
}

From source file:com.qwazr.connectors.MongoDbConnector.java

License:Apache License

@JsonIgnore
public BulkWriteOptions getNewBulkWriteOptions(boolean ordered) {
    return new BulkWriteOptions().ordered(ordered);
}

From source file:com.qwazr.library.mongodb.MongoDbConnector.java

License:Apache License

@JsonIgnore
public BulkWriteOptions getNewBulkWriteOptions(final boolean ordered) {
    return new BulkWriteOptions().ordered(ordered);
}

From source file:eu.project.ttc.models.occstore.MongoDBOccurrenceStore.java

License:Apache License

@Override
public void flush() {

    // bulk write occurrences
    final List<org.bson.Document> occDocuments = Lists.newArrayListWithCapacity(occurrencesBuffer.size());
    for (TermOccurrence o : this.occurrencesBuffer) {

        occDocuments.add(new org.bson.Document().append(TERM_ID, o.getTerm().getId())
                .append(DOC_ID, o.getSourceDocument().getId()).append(BEGIN, o.getBegin())
                .append(END, o.getEnd()).append(COVERED_TEXT, o.getCoveredText()));
    }/*from  ww  w  . j av a 2  s  . c  om*/
    if (!occurrencesBuffer.isEmpty())
        executor.execute(new Runnable() {
            public void run() {
                occurrenceCollection.insertMany(occDocuments);
            }
        });

    // bulk write documents
    final List<WriteModel<org.bson.Document>> documentUrlsOps = Lists
            .newArrayListWithCapacity(documentsUrls.size());
    for (Map.Entry<Integer, String> e : this.documentsUrls.entrySet()) {
        UpdateOneModel<org.bson.Document> w = new UpdateOneModel<org.bson.Document>(Filters.eq(_ID, e.getKey()),
                Updates.set(DOC_URL, e.getValue()), new UpdateOptions().upsert(true));
        documentUrlsOps.add(w);
    }

    if (!documentUrlsOps.isEmpty())
        executor.execute(new Runnable() {
            public void run() {
                documentUrlCollection.bulkWrite(documentUrlsOps, new BulkWriteOptions().ordered(false));
            }
        });

    // bulk write terms
    final List<WriteModel<org.bson.Document>> termsOps = Lists.newArrayList();
    for (Term t : termsBuffer.keySet()) {
        UpdateOneModel<org.bson.Document> w = new UpdateOneModel<org.bson.Document>(Filters.eq(_ID, t.getId()),
                Updates.inc(FREQUENCY, termsBuffer.get(t).intValue()), new UpdateOptions().upsert(true));
        termsOps.add(w);
    }
    if (!termsOps.isEmpty())
        executor.execute(new Runnable() {
            public void run() {
                termCollection.bulkWrite(termsOps, new BulkWriteOptions().ordered(false));
            }
        });

    resetBuffers();

}

From source file:examples.tour.QuickTour.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args takes an optional single argument for the connection string
 *//*from   w  w w  .j  a v  a2  s  .c om*/
public static void main(final String[] args) {
    MongoClient mongoClient;

    if (args.length == 0) {
        // connect to the local database server
        mongoClient = new MongoClient();
    } else {
        mongoClient = new MongoClient(new MongoClientURI(args[0]));
    }

    // get handle to "mydb" database
    MongoDatabase database = mongoClient.getDatabase("mydb");

    // get a handle to the "test" collection
    MongoCollection<Document> collection = database.getCollection("test");

    // drop all the data in it
    collection.drop();

    // make a document and insert it
    Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info",
            new Document("x", 203).append("y", 102));

    collection.insertOne(doc);

    // get it (since it's the only one in there since we dropped the rest earlier on)
    Document myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // now, lets add lots of little documents to the collection so we can explore queries and cursors
    List<Document> documents = new ArrayList<Document>();
    for (int i = 0; i < 100; i++) {
        documents.add(new Document("i", i));
    }
    collection.insertMany(documents);
    System.out.println(
            "total # of documents after inserting 100 small ones (should be 101) " + collection.count());

    // find first
    myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // lets get all the documents in the collection and print them out
    MongoCursor<Document> cursor = collection.find().iterator();
    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    for (Document cur : collection.find()) {
        System.out.println(cur.toJson());
    }

    // now use a query to get 1 document out
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    cursor = collection.find(gt("i", 50)).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // range query with multiple constraints
    cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // Query Filters
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    Block<Document> printBlock = new Block<Document>() {

        public void apply(final Document document) {
            System.out.println(document.toJson());
        }
    };
    collection.find(gt("i", 50)).forEach(printBlock);

    // filter where; 50 < i <= 100
    collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock);

    // Sorting
    myDoc = collection.find(exists("i")).sort(descending("i")).first();
    System.out.println(myDoc.toJson());

    // Projection
    myDoc = collection.find().projection(excludeId()).first();
    System.out.println(myDoc.toJson());

    // Aggregation
    collection
            .aggregate(
                    asList(match(gt("i", 0)), project(Document.parse("{ITimes10: {$multiply: ['$i', 10]}}"))))
            .forEach(printBlock);

    myDoc = collection.aggregate(singletonList(group(null, sum("total", "$i")))).first();
    System.out.println(myDoc.toJson());

    // Update One
    collection.updateOne(eq("i", 10), set("i", 110));

    // Update Many
    UpdateResult updateResult = collection.updateMany(lt("i", 100), inc("i", 100));
    System.out.println(updateResult.getModifiedCount());

    // Delete One
    collection.deleteOne(eq("i", 110));

    // Delete Many
    DeleteResult deleteResult = collection.deleteMany(gte("i", 100));
    System.out.println(deleteResult.getDeletedCount());

    collection.drop();

    // ordered bulk writes
    List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>();
    writes.add(new InsertOneModel<Document>(new Document("_id", 4)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 5)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 6)));
    writes.add(
            new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2))));
    writes.add(new DeleteOneModel<Document>(new Document("_id", 2)));
    writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4)));

    collection.bulkWrite(writes);

    collection.drop();

    collection.bulkWrite(writes, new BulkWriteOptions().ordered(false));
    //collection.find().forEach(printBlock);

    // Clean up
    database.drop();

    // release resources
    mongoClient.close();
}

From source file:mongodb.QuickTour.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args takes an optional single argument for the connection string
 *///  ww  w  .  ja  va  2s  .c o m
public static void main(final String[] args) {

    //represents a pool of connections to the database
    MongoClient mongoClient = new MongoClient("10.9.17.105", 27017);

    // get handle to "mydb" database
    MongoDatabase database = mongoClient.getDatabase("test");

    // get a handle to the "test" collection
    MongoCollection<Document> collection = database.getCollection("test");

    // drop all the data in it
    collection.drop();

    // make a document and insert it
    Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info",
            new Document("x", 203).append("y", 102));

    collection.insertOne(doc);

    // get it (since it's the only one in there since we dropped the rest earlier on)
    Document myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // now, lets add lots of little documents to the collection so we can explore queries and cursors
    List<Document> documents = new ArrayList<Document>();
    for (int i = 0; i < 100; i++) {
        documents.add(new Document("i", i));
    }
    collection.insertMany(documents);
    System.out.println(
            "total # of documents after inserting 100 small ones (should be 101) " + collection.count());

    // find first
    myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // lets get all the documents in the collection and print them out
    MongoCursor<Document> cursor = collection.find().iterator();
    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    for (Document cur : collection.find()) {
        System.out.println(cur.toJson());
    }

    // now use a query to get 1 document out
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    cursor = collection.find(gt("i", 50)).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // range query with multiple constraints
    cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // Query Filters
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    Block<Document> printBlock = new Block<Document>() {
        @Override
        public void apply(final Document document) {
            System.out.println(document.toJson());
        }
    };
    collection.find(gt("i", 50)).forEach(printBlock);

    // filter where; 50 < i <= 100
    collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock);

    // Sorting
    myDoc = collection.find(exists("i")).sort(descending("i")).first();
    System.out.println(myDoc.toJson());

    // Projection
    myDoc = collection.find().projection(excludeId()).first();
    System.out.println(myDoc.toJson());

    // Update One
    collection.updateOne(eq("i", 10), new Document("$set", new Document("i", 110)));

    // Update Many
    UpdateResult updateResult = collection.updateMany(lt("i", 100),
            new Document("$inc", new Document("i", 100)));
    System.out.println(updateResult.getModifiedCount());

    // Delete One
    collection.deleteOne(eq("i", 110));

    // Delete Many
    DeleteResult deleteResult = collection.deleteMany(gte("i", 100));
    System.out.println(deleteResult.getDeletedCount());

    collection.drop();

    // ordered bulk writes
    List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>();
    writes.add(new InsertOneModel<Document>(new Document("_id", 4)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 5)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 6)));
    writes.add(
            new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2))));
    writes.add(new DeleteOneModel<Document>(new Document("_id", 2)));
    writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4)));

    collection.bulkWrite(writes);

    collection.drop();

    collection.bulkWrite(writes, new BulkWriteOptions().ordered(false));
    collection.find().forEach(printBlock);

    // Clean up
    //        database.drop();

    // release resources
    mongoClient.close();
}

From source file:mongoSample.MongoSample.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args/*  www  .j  av a  2 s. c  o  m*/
 *            takes an optional single argument for the connection string
 */
public static void main(final String[] args) {
    String mongoServer = args[0];

    MongoClient mongoClient = new MongoClient(mongoServer);
    MongoDatabase database = mongoClient.getDatabase("sakila");
    MongoCollection<Document> collection = database.getCollection("test");

    // drop all the data in it
    collection.drop();

    // make a document and insert it
    Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info",
            new Document("x", 203).append("y", 102));

    collection.insertOne(doc);

    // get it (since it's the only one in there since we dropped the rest
    // earlier on)
    Document myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // now, lets add lots of little documents to the collection so we can
    // explore queries and cursors
    List<Document> documents = new ArrayList<Document>();
    for (int i = 0; i < 100; i++) {
        documents.add(new Document("i", i));
    }
    collection.insertMany(documents);
    System.out.println(
            "total # of documents after inserting 100 small ones (should be 101) " + collection.count());

    // find first
    myDoc = collection.find().first();
    System.out.println(myDoc);
    System.out.println(myDoc.toJson());

    // lets get all the documents in the collection and print them out
    MongoCursor<Document> cursor = collection.find().iterator();
    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    for (Document cur : collection.find()) {
        System.out.println(cur.toJson());
    }

    // now use a query to get 1 document out
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    cursor = collection.find(gt("i", 50)).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // range query with multiple constraints
    cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // Query Filters
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    Block<Document> printBlock = new Block<Document>() {
        @Override
        public void apply(final Document document) {
            System.out.println(document.toJson());
        }
    };
    collection.find(gt("i", 50)).forEach(printBlock);

    // filter where; 50 < i <= 100
    collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock);

    // Sorting
    myDoc = collection.find(exists("i")).sort(descending("i")).first();
    System.out.println(myDoc.toJson());

    // Projection
    myDoc = collection.find().projection(excludeId()).first();
    System.out.println(myDoc.toJson());

    // Update One
    collection.updateOne(eq("i", 10), new Document("$set", new Document("i", 110)));

    // Update Many
    UpdateResult updateResult = collection.updateMany(lt("i", 100),
            new Document("$inc", new Document("i", 100)));
    System.out.println(updateResult.getModifiedCount());

    // Delete One
    collection.deleteOne(eq("i", 110));

    // Delete Many
    DeleteResult deleteResult = collection.deleteMany(gte("i", 100));
    System.out.println(deleteResult.getDeletedCount());

    collection.drop();

    // ordered bulk writes
    List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>();
    writes.add(new InsertOneModel<Document>(new Document("_id", 4)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 5)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 6)));
    writes.add(
            new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2))));
    writes.add(new DeleteOneModel<Document>(new Document("_id", 2)));
    writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4)));

    collection.bulkWrite(writes);

    collection.drop();

    collection.bulkWrite(writes, new BulkWriteOptions().ordered(false));
    // collection.find().forEach(printBlock);

    // Clean up
    //database.drop();

    // release resources
    mongoClient.close();
}

From source file:net.acesinc.nifi.processors.mongodb.PartialUpdateMongo.java

protected BulkWriteResult performBlukUpdate(List<Map<String, Document>> updateDocs, ProcessContext context,
        ProcessSession session) {/*ww  w  .  java 2  s .  c  o m*/
    final ProcessorLog logger = getLogger();
    StopWatch watch = new StopWatch(true);

    logger.debug("Performing Bulk Update of [ " + updateDocs.size() + " ] documents");

    final WriteConcern writeConcern = getWriteConcern(context);
    final MongoCollection<Document> collection = getCollection(context).withWriteConcern(writeConcern);

    List<WriteModel<Document>> updates = new ArrayList<>();

    for (Map<String, Document> update : updateDocs) {
        UpdateOneModel<Document> upOne = new UpdateOneModel<>(update.get("query"), // find part
                update.get("update"), // update part
                new UpdateOptions().upsert(true) // options like upsert
        );
        updates.add(upOne);
    }

    BulkWriteResult bulkWriteResult = collection.bulkWrite(updates, new BulkWriteOptions().ordered(false));
    return bulkWriteResult;

}