Example usage for com.mongodb.client.model DeleteOneModel DeleteOneModel

List of usage examples for com.mongodb.client.model DeleteOneModel DeleteOneModel

Introduction

In this page you can find the example usage for com.mongodb.client.model DeleteOneModel DeleteOneModel.

Prototype

public DeleteOneModel(final Bson filter) 

Source Link

Document

Construct a new instance.

Usage

From source file:at.grahsl.kafka.connect.mongodb.cdc.debezium.mongodb.MongoDbDelete.java

License:Apache License

@Override
public WriteModel<BsonDocument> perform(SinkDocument doc) {

    BsonDocument keyDoc = doc.getKeyDoc()
            .orElseThrow(() -> new DataException("error: key doc must not be missing for delete operation"));

    try {/*from  w w  w  .  ja v a  2s. co m*/
        BsonDocument filterDoc = BsonDocument.parse("{" + DBCollection.ID_FIELD_NAME + ":"
                + keyDoc.getString(MongoDbHandler.JSON_ID_FIELD_PATH).getValue() + "}");
        return new DeleteOneModel<>(filterDoc);
    } catch (Exception exc) {
        throw new DataException(exc);
    }

}

From source file:at.grahsl.kafka.connect.mongodb.cdc.debezium.rdbms.RdbmsDelete.java

License:Apache License

@Override
public WriteModel<BsonDocument> perform(SinkDocument doc) {

    BsonDocument keyDoc = doc.getKeyDoc()
            .orElseThrow(() -> new DataException("error: key doc must not be missing for delete operation"));

    BsonDocument valueDoc = doc.getValueDoc()
            .orElseThrow(() -> new DataException("error: value doc must not be missing for delete operation"));

    try {//from   w ww.j  av a2  s  .c o  m
        BsonDocument filterDoc = RdbmsHandler.generateFilterDoc(keyDoc, valueDoc, OperationType.DELETE);
        return new DeleteOneModel<>(filterDoc);
    } catch (Exception exc) {
        throw new DataException(exc);
    }

}

From source file:com.everydots.kafka.connect.mongodb.cdc.debezium.mysql.MysqlDelete.java

License:Apache License

@Override
public WriteModel<BsonDocument> perform(SinkDocument doc) {

    BsonDocument keyDoc = doc.getKeyDoc()
            .orElseThrow(() -> new DataException("error: key doc must not be missing for delete operation"));

    BsonDocument valueDoc = doc.getValueDoc()
            .orElseThrow(() -> new DataException("error: value doc must not be missing for delete operation"));

    try {//from   www . j  av  a2 s . c  om
        BsonDocument filterDoc = MysqlHandler.generateFilterDoc(keyDoc, valueDoc, OperationType.DELETE);
        return new DeleteOneModel<>(filterDoc);
    } catch (Exception exc) {
        throw new DataException(exc);
    }

}

From source file:com.mastfrog.asyncpromises.mongo.BulkWriteBuilderImpl.java

License:Open Source License

@Override
public BulkWriteBuilder<T> deleteOne(Bson filter) {
    requests.add(new DeleteOneModel<T>(filter));
    return this;
}

From source file:com.mastfrog.asyncpromises.mongo.BulkWriteBuilderImpl.java

License:Open Source License

@Override
public QueryBuilder<T, BulkWriteBuilder<T>> deleteOne() {
    return new QueryBuilderImpl<>(new QueryBuilderImpl.Factory<T, BulkWriteBuilder<T>>() {

        @Override/*from  w w w.j  a v a2  s  .com*/
        public BulkWriteBuilderImpl<T> create(Document document) {
            requests.add(new DeleteOneModel<T>(document));
            return BulkWriteBuilderImpl.this;
        }
    });
}

From source file:com.px100systems.data.plugin.storage.mongo.MongoDatabaseStorage.java

License:Open Source License

private void batchSave(MongoDatabase db, List<StoredBean> inserts, List<StoredBean> updates,
        List<Delete> deletes) {
    Map<String, List<WriteModel<Document>>> batches = new HashMap<>();

    for (StoredBean bean : inserts) {
        String unitName = bean.unitName();
        List<WriteModel<Document>> batch = batches.get(unitName);
        if (batch == null) {
            batch = new ArrayList<>();
            batches.put(unitName, batch);
        }//  w  w w.j av a2  s. c om

        batch.add(new InsertOneModel<Document>(serialize(bean)));
    }

    for (StoredBean bean : updates) {
        String unitName = bean.unitName();
        List<WriteModel<Document>> batch = batches.get(unitName);
        if (batch == null) {
            batch = new ArrayList<>();
            batches.put(unitName, batch);
        }

        batch.add(new ReplaceOneModel<Document>(Filters.eq("id", bean.getId()), serialize(bean)));
    }

    for (Delete delete : deletes) {
        String unitName = delete.getUnitName();
        List<WriteModel<Document>> batch = batches.get(unitName);
        if (batch == null) {
            batch = new ArrayList<>();
            batches.put(unitName, batch);
        }

        batch.add(delete.getId() == null
                ? new DeleteManyModel<Document>(delete.getCriteria().convert(new FilterQueryBuilder()))
                : new DeleteOneModel<Document>(Filters.eq("id", delete.getId())));
    }

    for (Map.Entry<String, List<WriteModel<Document>>> e : batches.entrySet())
        db.getCollection(e.getKey()).bulkWrite(e.getValue());
}

From source file:com.streamsets.pipeline.stage.destination.mongodb.MongoDBTarget.java

License:Apache License

@Override
public void write(Batch batch) throws StageException {
    Iterator<Record> records = batch.getRecords();
    List<WriteModel<Document>> documentList = new ArrayList<>();
    List<Record> recordList = new ArrayList<>();
    while (records.hasNext()) {
        Record record = records.next();/* w  w  w . java2 s . c om*/
        try {
            ByteArrayOutputStream baos = new ByteArrayOutputStream(DEFAULT_CAPACITY);
            DataGenerator generator = generatorFactory.getGenerator(baos);
            generator.write(record);
            generator.close();
            Document document = Document.parse(new String(baos.toByteArray()));

            //create a write model based on record header
            if (!record.getHeader().getAttributeNames().contains(OPERATION_KEY)) {
                LOG.error(Errors.MONGODB_15.getMessage(), record.getHeader().getSourceId());
                throw new OnRecordErrorException(Errors.MONGODB_15, record.getHeader().getSourceId());
            }

            String operation = record.getHeader().getAttribute(OPERATION_KEY);
            switch (operation) {
            case INSERT:
                documentList.add(new InsertOneModel<>(document));
                recordList.add(record);
                break;
            case UPSERT:
                validateUniqueKey(operation, record);
                recordList.add(record);
                documentList.add(new ReplaceOneModel<>(
                        new Document(removeLeadingSlash(mongoTargetConfigBean.uniqueKeyField),
                                record.get(mongoTargetConfigBean.uniqueKeyField).getValueAsString()),
                        document, new UpdateOptions().upsert(true)));
                break;
            case DELETE:
                recordList.add(record);
                documentList.add(new DeleteOneModel<Document>(document));
                break;
            default:
                LOG.error(Errors.MONGODB_14.getMessage(), operation, record.getHeader().getSourceId());
                throw new StageException(Errors.MONGODB_14, operation, record.getHeader().getSourceId());
            }
        } catch (IOException | StageException e) {
            errorRecordHandler.onError(new OnRecordErrorException(record, Errors.MONGODB_13, e.toString(), e));
        }
    }

    if (!documentList.isEmpty()) {
        try {
            BulkWriteResult bulkWriteResult = coll.bulkWrite(documentList);
            if (bulkWriteResult.wasAcknowledged()) {
                LOG.trace("Wrote batch with {} inserts, {} updates and {} deletes",
                        bulkWriteResult.getInsertedCount(), bulkWriteResult.getModifiedCount(),
                        bulkWriteResult.getDeletedCount());
            }
        } catch (MongoException e) {
            for (Record record : recordList) {
                errorRecordHandler
                        .onError(new OnRecordErrorException(record, Errors.MONGODB_17, e.toString(), e));
            }
        }
    }
}

From source file:examples.tour.QuickTour.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args takes an optional single argument for the connection string
 *///from   w w  w .  j  a  va 2 s  .co  m
public static void main(final String[] args) {
    MongoClient mongoClient;

    if (args.length == 0) {
        // connect to the local database server
        mongoClient = new MongoClient();
    } else {
        mongoClient = new MongoClient(new MongoClientURI(args[0]));
    }

    // get handle to "mydb" database
    MongoDatabase database = mongoClient.getDatabase("mydb");

    // get a handle to the "test" collection
    MongoCollection<Document> collection = database.getCollection("test");

    // drop all the data in it
    collection.drop();

    // make a document and insert it
    Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info",
            new Document("x", 203).append("y", 102));

    collection.insertOne(doc);

    // get it (since it's the only one in there since we dropped the rest earlier on)
    Document myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // now, lets add lots of little documents to the collection so we can explore queries and cursors
    List<Document> documents = new ArrayList<Document>();
    for (int i = 0; i < 100; i++) {
        documents.add(new Document("i", i));
    }
    collection.insertMany(documents);
    System.out.println(
            "total # of documents after inserting 100 small ones (should be 101) " + collection.count());

    // find first
    myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // lets get all the documents in the collection and print them out
    MongoCursor<Document> cursor = collection.find().iterator();
    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    for (Document cur : collection.find()) {
        System.out.println(cur.toJson());
    }

    // now use a query to get 1 document out
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    cursor = collection.find(gt("i", 50)).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // range query with multiple constraints
    cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // Query Filters
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    Block<Document> printBlock = new Block<Document>() {

        public void apply(final Document document) {
            System.out.println(document.toJson());
        }
    };
    collection.find(gt("i", 50)).forEach(printBlock);

    // filter where; 50 < i <= 100
    collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock);

    // Sorting
    myDoc = collection.find(exists("i")).sort(descending("i")).first();
    System.out.println(myDoc.toJson());

    // Projection
    myDoc = collection.find().projection(excludeId()).first();
    System.out.println(myDoc.toJson());

    // Aggregation
    collection
            .aggregate(
                    asList(match(gt("i", 0)), project(Document.parse("{ITimes10: {$multiply: ['$i', 10]}}"))))
            .forEach(printBlock);

    myDoc = collection.aggregate(singletonList(group(null, sum("total", "$i")))).first();
    System.out.println(myDoc.toJson());

    // Update One
    collection.updateOne(eq("i", 10), set("i", 110));

    // Update Many
    UpdateResult updateResult = collection.updateMany(lt("i", 100), inc("i", 100));
    System.out.println(updateResult.getModifiedCount());

    // Delete One
    collection.deleteOne(eq("i", 110));

    // Delete Many
    DeleteResult deleteResult = collection.deleteMany(gte("i", 100));
    System.out.println(deleteResult.getDeletedCount());

    collection.drop();

    // ordered bulk writes
    List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>();
    writes.add(new InsertOneModel<Document>(new Document("_id", 4)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 5)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 6)));
    writes.add(
            new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2))));
    writes.add(new DeleteOneModel<Document>(new Document("_id", 2)));
    writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4)));

    collection.bulkWrite(writes);

    collection.drop();

    collection.bulkWrite(writes, new BulkWriteOptions().ordered(false));
    //collection.find().forEach(printBlock);

    // Clean up
    database.drop();

    // release resources
    mongoClient.close();
}

From source file:mongodb.QuickTour.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args takes an optional single argument for the connection string
 *//*from  w  w  w  . j ava  2 s  .com*/
public static void main(final String[] args) {

    //represents a pool of connections to the database
    MongoClient mongoClient = new MongoClient("10.9.17.105", 27017);

    // get handle to "mydb" database
    MongoDatabase database = mongoClient.getDatabase("test");

    // get a handle to the "test" collection
    MongoCollection<Document> collection = database.getCollection("test");

    // drop all the data in it
    collection.drop();

    // make a document and insert it
    Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info",
            new Document("x", 203).append("y", 102));

    collection.insertOne(doc);

    // get it (since it's the only one in there since we dropped the rest earlier on)
    Document myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // now, lets add lots of little documents to the collection so we can explore queries and cursors
    List<Document> documents = new ArrayList<Document>();
    for (int i = 0; i < 100; i++) {
        documents.add(new Document("i", i));
    }
    collection.insertMany(documents);
    System.out.println(
            "total # of documents after inserting 100 small ones (should be 101) " + collection.count());

    // find first
    myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // lets get all the documents in the collection and print them out
    MongoCursor<Document> cursor = collection.find().iterator();
    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    for (Document cur : collection.find()) {
        System.out.println(cur.toJson());
    }

    // now use a query to get 1 document out
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    cursor = collection.find(gt("i", 50)).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // range query with multiple constraints
    cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // Query Filters
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    Block<Document> printBlock = new Block<Document>() {
        @Override
        public void apply(final Document document) {
            System.out.println(document.toJson());
        }
    };
    collection.find(gt("i", 50)).forEach(printBlock);

    // filter where; 50 < i <= 100
    collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock);

    // Sorting
    myDoc = collection.find(exists("i")).sort(descending("i")).first();
    System.out.println(myDoc.toJson());

    // Projection
    myDoc = collection.find().projection(excludeId()).first();
    System.out.println(myDoc.toJson());

    // Update One
    collection.updateOne(eq("i", 10), new Document("$set", new Document("i", 110)));

    // Update Many
    UpdateResult updateResult = collection.updateMany(lt("i", 100),
            new Document("$inc", new Document("i", 100)));
    System.out.println(updateResult.getModifiedCount());

    // Delete One
    collection.deleteOne(eq("i", 110));

    // Delete Many
    DeleteResult deleteResult = collection.deleteMany(gte("i", 100));
    System.out.println(deleteResult.getDeletedCount());

    collection.drop();

    // ordered bulk writes
    List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>();
    writes.add(new InsertOneModel<Document>(new Document("_id", 4)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 5)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 6)));
    writes.add(
            new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2))));
    writes.add(new DeleteOneModel<Document>(new Document("_id", 2)));
    writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4)));

    collection.bulkWrite(writes);

    collection.drop();

    collection.bulkWrite(writes, new BulkWriteOptions().ordered(false));
    collection.find().forEach(printBlock);

    // Clean up
    //        database.drop();

    // release resources
    mongoClient.close();
}

From source file:mongoSample.MongoSample.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args//from   w  ww.  ja va2  s  .  c om
 *            takes an optional single argument for the connection string
 */
public static void main(final String[] args) {
    String mongoServer = args[0];

    MongoClient mongoClient = new MongoClient(mongoServer);
    MongoDatabase database = mongoClient.getDatabase("sakila");
    MongoCollection<Document> collection = database.getCollection("test");

    // drop all the data in it
    collection.drop();

    // make a document and insert it
    Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info",
            new Document("x", 203).append("y", 102));

    collection.insertOne(doc);

    // get it (since it's the only one in there since we dropped the rest
    // earlier on)
    Document myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // now, lets add lots of little documents to the collection so we can
    // explore queries and cursors
    List<Document> documents = new ArrayList<Document>();
    for (int i = 0; i < 100; i++) {
        documents.add(new Document("i", i));
    }
    collection.insertMany(documents);
    System.out.println(
            "total # of documents after inserting 100 small ones (should be 101) " + collection.count());

    // find first
    myDoc = collection.find().first();
    System.out.println(myDoc);
    System.out.println(myDoc.toJson());

    // lets get all the documents in the collection and print them out
    MongoCursor<Document> cursor = collection.find().iterator();
    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    for (Document cur : collection.find()) {
        System.out.println(cur.toJson());
    }

    // now use a query to get 1 document out
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    cursor = collection.find(gt("i", 50)).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // range query with multiple constraints
    cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // Query Filters
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    Block<Document> printBlock = new Block<Document>() {
        @Override
        public void apply(final Document document) {
            System.out.println(document.toJson());
        }
    };
    collection.find(gt("i", 50)).forEach(printBlock);

    // filter where; 50 < i <= 100
    collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock);

    // Sorting
    myDoc = collection.find(exists("i")).sort(descending("i")).first();
    System.out.println(myDoc.toJson());

    // Projection
    myDoc = collection.find().projection(excludeId()).first();
    System.out.println(myDoc.toJson());

    // Update One
    collection.updateOne(eq("i", 10), new Document("$set", new Document("i", 110)));

    // Update Many
    UpdateResult updateResult = collection.updateMany(lt("i", 100),
            new Document("$inc", new Document("i", 100)));
    System.out.println(updateResult.getModifiedCount());

    // Delete One
    collection.deleteOne(eq("i", 110));

    // Delete Many
    DeleteResult deleteResult = collection.deleteMany(gte("i", 100));
    System.out.println(deleteResult.getDeletedCount());

    collection.drop();

    // ordered bulk writes
    List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>();
    writes.add(new InsertOneModel<Document>(new Document("_id", 4)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 5)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 6)));
    writes.add(
            new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2))));
    writes.add(new DeleteOneModel<Document>(new Document("_id", 2)));
    writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4)));

    collection.bulkWrite(writes);

    collection.drop();

    collection.bulkWrite(writes, new BulkWriteOptions().ordered(false));
    // collection.find().forEach(printBlock);

    // Clean up
    //database.drop();

    // release resources
    mongoClient.close();
}