List of usage examples for com.mongodb.client.result UpdateResult getMatchedCount
public abstract long getMatchedCount();
From source file:BlogPostDAO.java
License:Apache License
public void addPostComment(final String name, final String email, final String body, final String permalink) { Document comment = new Document("author", name).append("body", body); if (email != null && !email.equals("")) { comment.append("email", email); }//from w w w. ja va 2 s. c om UpdateResult result = postsCollection.updateOne(new Document("permalink", permalink), new Document("$push", new Document("comments", comment))); logger.info("Matches: " + result.getMatchedCount()); logger.info("Modified: " + result.getModifiedCount()); }
From source file:com.amertkara.tinkerpop.blueprints.impl.mongodb.MongoDBElement.java
License:Apache License
@Override public void setProperty(final String key, final Object value) { ElementHelper.validateProperty(this, key, value); UpdateResult result = getMongoCollection().updateOne(this.rawElement, new Document("$set", new Document(MongoDBConstants.FIELD_PROPERTIES, new Document(key, value)))); if (result.getMatchedCount() == result.getModifiedCount()) { logger.info("Property of element " + this.rawElement.get(MongoDBConstants.FIELD_ID) + " is set."); }//from w ww .j a va 2s .c o m // Refresh the rawElement this.reload(); }
From source file:com.amertkara.tinkerpop.blueprints.impl.mongodb.MongoDBElement.java
License:Apache License
@Override public <T> T removeProperty(final String key) { if (getProperty(key) == null) { return null; } else {/*from w ww . j a v a 2 s .com*/ UpdateResult result = getMongoCollection().updateOne(this.rawElement, new Document("$unset", new Document(MongoDBConstants.FIELD_PROPERTIES + "." + key, ""))); if (result.getMatchedCount() == result.getModifiedCount()) { logger.info("Property of element " + this.rawElement.get(MongoDBConstants.FIELD_ID) + " is unset."); } T removedProperty = this.getProperty(key); // Refresh the rawElement reload(); return removedProperty; } }
From source file:com.github.cherimojava.data.mongo.entity.EntityInvocationHandler.java
License:Apache License
/** * stores the given EntityInvocationHandler represented Entity in the given Collection * * @param handler/* w w w. j a v a 2 s .c o m*/ * EntityInvocationHandler (Entity) to save * @param coll * MongoCollection to save entity into */ @SuppressWarnings("unchecked") static <T extends Entity> void save(EntityInvocationHandler handler, MongoCollection<T> coll) { for (ParameterProperty cpp : handler.properties.getValidationProperties()) { cpp.validate(handler.data.get(cpp.getMongoName())); } BsonDocumentWrapper wrapper = new BsonDocumentWrapper<>(handler.proxy, (org.bson.codecs.Encoder<Entity>) coll.getCodecRegistry().get(handler.properties.getEntityClass())); UpdateResult res = coll.updateOne( new BsonDocument("_id", BsonDocumentWrapper.asBsonDocument(EntityCodec._obtainId(handler.proxy), idRegistry)), new BsonDocument("$set", wrapper), new UpdateOptions()); if (res.getMatchedCount() == 0) { // TODO this seems too nasty, there must be a better way.for now live with it coll.insertOne((T) handler.proxy); } handler.persist(); }
From source file:com.telefonica.iot.cygnus.backends.mongo.MongoBackend.java
License:Open Source License
/** * Inserts a new document with given resolution in the given aggregated collection within the given database * (row-like mode)./*from w w w. ja v a 2 s.c o m*/ * @param dbName * @param collectionName * @param recvTimeTs * @param entityId * @param entityType * @param attrName * @param attrType * @param attrValue * @param resolution */ private void insertContextDataAggregatedForResoultion(String dbName, String collectionName, GregorianCalendar calendar, String entityId, String entityType, String attrName, String attrType, double attrValue, Resolution resolution) { // get database and collection MongoDatabase db = getDatabase(dbName); MongoCollection collection = db.getCollection(collectionName); // build the query BasicDBObject query = buildQueryForInsertAggregated(calendar, entityId, entityType, attrName, resolution); // prepopulate if needed BasicDBObject insert = buildInsertForPrepopulate(attrType, resolution); UpdateResult res = collection.updateOne(query, insert, new UpdateOptions().upsert(true)); if (res.getMatchedCount() == 0) { LOGGER.debug("Prepopulating data, database=" + dbName + ", collection=" + collectionName + ", query=" + query.toString() + ", insert=" + insert.toString()); } // if // do the update BasicDBObject update = buildUpdateForUpdate(attrType, attrValue); LOGGER.debug("Updating data, database=" + dbName + ", collection=" + collectionName + ", query=" + query.toString() + ", update=" + update.toString()); collection.updateOne(query, update); }
From source file:com.yahoo.ycsb.db3.MongoDbClient.java
License:Open Source License
/** * Update a record in the database. Any field/value pairs in the specified * values HashMap will be written into the record with the specified record * key, overwriting any existing values with the same field name. * /*from www . j a v a 2 s . c o m*/ * @param table * The name of the table * @param key * The record key of the record to write. * @param values * A HashMap of field/value pairs to update in the record * @return Zero on success, a non-zero error code on error. See this class's * description for a discussion of error codes. */ @Override public Status update(String table, String key, HashMap<String, ByteIterator> values) { try { MongoCollection<Document> collection = database.getCollection(table); Document query = new Document("_id", key); Document fieldsToSet = new Document(); for (Map.Entry<String, ByteIterator> entry : values.entrySet()) { fieldsToSet.put(entry.getKey(), entry.getValue().toArray()); } Document update = new Document("$set", fieldsToSet); UpdateResult result = collection.updateOne(query, update); if (result.wasAcknowledged() && result.getMatchedCount() == 0) { System.err.println("Nothing updated for key " + key); return Status.NOT_FOUND; } return Status.OK; } catch (Exception e) { System.err.println(e.toString()); return Status.ERROR; } }
From source file:joliex.mongodb.MongoDbConnector.java
@RequestResponse public Value updateMany(Value request) throws FaultException { try {/*w ww . jav a 2 s .com*/ String collectionName = request.getFirstChild("collection").strValue(); Value v = Value.create(); BsonDocument bsonQueryDocument = BsonDocument.parse(request.getFirstChild("filter").strValue()); prepareBsonQueryData(bsonQueryDocument, request.getFirstChild("filter")); printlnJson("Update filter", bsonQueryDocument); BsonDocument bsonDocument = BsonDocument.parse(request.getFirstChild("documentUpdate").strValue()); printlnJson("Update documentUpdate", bsonDocument); prepareBsonQueryData(bsonDocument, request.getFirstChild("documentUpdate")); printlnJson("Update documentUpdate", bsonDocument); if (request.hasChildren("writeConcern")) { WriteConcern writeConcern = new WriteConcern(); if (request.getFirstChild("writeConcern").hasChildren("journal")) { writeConcern.withJournal( request.getFirstChild("writeConcern").getFirstChild("journal").boolValue()); } if (request.getFirstChild("writeConcern").hasChildren("w")) { if (request.getFirstChild("writeConcern").getFirstChild("w").isInt()) { writeConcern.withW(request.getFirstChild("writeConcern").getFirstChild("w").intValue()); } if (request.getFirstChild("writeConcern").getFirstChild("w").isString()) { writeConcern.withW(request.getFirstChild("writeConcern").getFirstChild("w").strValue()); } } if (request.getFirstChild("writeConcern").hasChildren("timeout")) { writeConcern.withWTimeout( request.getFirstChild("writeConcern").getFirstChild("timeout").longValue(), TimeUnit.MILLISECONDS); } db.getCollection(collectionName, BsonDocument.class).withWriteConcern(writeConcern); } if (request.hasChildren("options")) { UpdateOptions updateOptions = new UpdateOptions(); updateOptions.upsert(request.getFirstChild("options").getFirstChild("upsert").boolValue()); updateOptions.bypassDocumentValidation( request.getFirstChild("options").getFirstChild("bypassDocumentValidation").boolValue()); UpdateResult resultUpdate = db.getCollection(collectionName, BsonDocument.class) .updateMany(bsonQueryDocument, bsonDocument, updateOptions); v.getNewChild("matchedCount").assignValue(Value.create(resultUpdate.getMatchedCount())); v.getNewChild("modifiedCount").assignValue(Value.create(resultUpdate.getModifiedCount())); } else { UpdateResult resultUpdate = db.getCollection(collectionName, BsonDocument.class) .updateMany(bsonQueryDocument, bsonDocument); v.getNewChild("matchedCount").assignValue(Value.create(resultUpdate.getMatchedCount())); v.getNewChild("modifiedCount").assignValue(Value.create(resultUpdate.getModifiedCount())); } return v; } catch (MongoException ex) { throw new FaultException("MongoException", ex); } catch (JsonParseException ex) { throw new FaultException("JsonParseException", ex); } }
From source file:joliex.mongodb.MongoDbConnector.java
@RequestResponse public Value update(Value request) throws FaultException { try {//from w ww . j a va 2 s . c o m Value v = Value.create(); String collectionName = request.getFirstChild("collection").strValue(); BsonDocument bsonQueryDocument = BsonDocument.parse(request.getFirstChild("filter").strValue()); prepareBsonQueryData(bsonQueryDocument, request.getFirstChild("filter")); printlnJson("Update filter", bsonQueryDocument); BsonDocument bsonDocument = BsonDocument.parse(request.getFirstChild("documentUpdate").strValue()); printlnJson("Update documentUpdate", bsonDocument); prepareBsonQueryData(bsonDocument, request.getFirstChild("documentUpdate")); printlnJson("Update documentUpdate", bsonDocument); showLog(); if (request.hasChildren("writeConcern")) { WriteConcern writeConcern = new WriteConcern(); if (request.getFirstChild("writeConcern").hasChildren("journal")) { writeConcern.withJournal( request.getFirstChild("writeConcern").getFirstChild("journal").boolValue()); } if (request.getFirstChild("writeConcern").hasChildren("w")) { if (request.getFirstChild("writeConcern").getFirstChild("w").isInt()) { writeConcern.withW(request.getFirstChild("writeConcern").getFirstChild("w").intValue()); } if (request.getFirstChild("writeConcern").getFirstChild("w").isString()) { writeConcern.withW(request.getFirstChild("writeConcern").getFirstChild("w").strValue()); } } if (request.getFirstChild("writeConcern").hasChildren("timeout")) { writeConcern.withWTimeout( request.getFirstChild("writeConcern").getFirstChild("timeout").longValue(), TimeUnit.MILLISECONDS); } db.getCollection(collectionName, BsonDocument.class).withWriteConcern(writeConcern); } if (request.hasChildren("options")) { UpdateOptions updateOptions = new UpdateOptions(); updateOptions.upsert(request.getFirstChild("options").getFirstChild("upsert").boolValue()); updateOptions.bypassDocumentValidation( request.getFirstChild("options").getFirstChild("bypassDocumentValidation").boolValue()); UpdateResult resultUpdate = db.getCollection(collectionName, BsonDocument.class) .updateOne(bsonQueryDocument, bsonDocument, updateOptions); v.getNewChild("matchedCount").assignValue(Value.create(resultUpdate.getMatchedCount())); v.getNewChild("modifiedCount").assignValue(Value.create(resultUpdate.getModifiedCount())); } else { UpdateResult resultUpdate = db.getCollection(collectionName, BsonDocument.class) .updateOne(bsonQueryDocument, bsonDocument); v.getNewChild("matchedCount").assignValue(Value.create(resultUpdate.getMatchedCount())); v.getNewChild("modifiedCount").assignValue(Value.create(resultUpdate.getModifiedCount())); } return v; } catch (MongoException ex) { throw new FaultException("MongoException", ex); } }
From source file:module.CreateStudy.java
License:Open Source License
@SuppressWarnings("unchecked") public CreateStudy() { // ===== Connection ===== MongoClient mongoClient = MongoUtil.buildMongoClient(); MongoDatabase db = mongoClient.getDatabase("epimed_experiments"); // === Excel data loader === String inputfile = this.getInputDirectory() + this.getDirSeparator() + "prolung2_expgrp4.xlsx"; System.out.println("LOADING \t " + inputfile); ExcelService excelService = new ExcelService(); excelService.load(inputfile);// ww w . ja va 2s . c o m List<Object> listCel = excelService.extractColumn(0); Integer indCel = excelService.getHeaderMap().get("gse8894_sample_cel"); // === New Series === MongoCollection<Document> collectionSeries = db.getCollection("series"); Document docSeries = new Document(); docSeries.append("_id", "PROLUNG").append("title", "Lung cancerous and non-cancerous samples") .append("platforms", null).append("submission_date", today).append("last_update", today) .append("import_date", today); UpdateResult updateResult = collectionSeries.updateOne(Filters.eq("_id", docSeries.get("_id")), new Document("$set", docSeries)); if (updateResult.getMatchedCount() == 0) { collectionSeries.insertOne(docSeries); } // === Add samples to new series === MongoCollection<Document> collectionSamples = db.getCollection("samples"); for (int i = 0; i < listCel.size(); i++) { String gsm = this.getGsm(listCel.get(i)); Document docSample = collectionSamples.find(Filters.eq("_id", gsm)).first(); if (docSample == null) { System.err.println("ERROR! Sample " + gsm + "doesn't exist. Try another column."); gsm = this.getGsm(excelService.getData().get(i).get(indCel)); docSample = collectionSamples.find(Filters.eq("_id", gsm)).first(); if (docSample == null) { System.err.println("ERROR! Sample " + gsm + " doesn't exist. Exit."); System.exit(0); } else { System.err.println("Found " + gsm); } } Document expGroup = (Document) docSample.get("exp_group"); setGpl.add(expGroup.get("id_platform").toString()); List<String> listSeries = (List<String>) docSample.get("series"); listSeries.add(docSeries.getString("_id")); docSample.put("series", listSeries); System.out.println(docSample); // updateResult = collectionSamples.updateOne(Filters.eq("_id", docSample.get("_id")), new Document("$set", docSample)); } // === Update platforms of the series === System.out.println(setGpl); docSeries.put("platforms", setGpl); updateResult = collectionSeries.updateOne(Filters.eq("_id", docSeries.get("_id")), new Document("$set", docSeries)); if (updateResult.getMatchedCount() == 0) { collectionSeries.insertOne(docSeries); } }
From source file:module.ImportArrayExpress.java
License:Open Source License
public ImportArrayExpress() { // ===== Connection ===== MongoClient mongoClient = MongoUtil.buildMongoClient(); MongoDatabase db = mongoClient.getDatabase("epimed_experiments"); MongoCollection<Document> collectionSeries = db.getCollection("series"); MongoCollection<Document> collectionSamples = db.getCollection("sample"); // ===== Pattern ===== String patternText = "\\[[\\p{Print}\\p{Space}]+\\]"; ;/*from ww w . j a v a 2 s . c o m*/ Pattern pattern = Pattern.compile(patternText); // ===== Series ===== for (String accession : listAccessions) { List<String> accessionAsList = new ArrayList<String>(); accessionAsList.add(accession); String urlString = "https://www.ebi.ac.uk/arrayexpress/files/" + accession + "/" + accession + ".idf.txt"; System.out.println(urlString); String text = webService.loadUrl(urlString); String[] parts = text.split(lineSeparator); List<String> dataSeries = new ArrayList<String>(Arrays.asList(parts)); AESeries series = new AESeries(dataSeries); System.out.println(series); // ===== Check if already imported as a GSE ===== boolean isGseFound = false; String gseNumber = null; for (String secondaryAccession : series.getListAccessions()) { if (secondaryAccession.startsWith("GSE")) { gseNumber = secondaryAccession; Document gse = db.getCollection("series").find(Filters.eq("_id", secondaryAccession)).first(); isGseFound = gse != null; } } int nbImportedSamples = 0; if (!isGseFound) { // ===== Create Mongo series ===== Document docSeries = mongoService.createSeries(accession, series.getTitle(), null, series.getSubmissionDate(), series.getSubmissionDate()); if (series.getListAccessions() != null && !series.getListAccessions().isEmpty()) { docSeries.put("secondary_accessions", series.getListAccessions()); } if (commit) { UpdateResult updateResult = collectionSeries.updateOne(Filters.eq("_id", accession), new Document("$set", docSeries)); if (updateResult.getMatchedCount() == 0) { collectionSeries.insertOne(docSeries); } } System.out.println(docSeries); // ===== Import clinical data ===== String url = "https://www.ebi.ac.uk/arrayexpress/files/" + accession + "/" + series.getSdrf(); System.out.println(url); String clindata = webService.loadUrl(url); String[] clinparts = clindata.split(lineSeparator); List<String> data = new ArrayList<String>(Arrays.asList(clinparts)); // ===== Recognize samples ===== List<String> header = this.createHeader(data.get(0), pattern); System.out.println(header); for (int i = 1; i < data.size(); i++) { Integer nbSamples = data.size() - 1; Map<String, Object> mapParameters = this.createMapParameters(data.get(i), header); String idSample = this.createIdSample(mapParameters); if (idSample == null) { System.err.println("ERROR: idSample is not recongnized for " + accession); System.out.println("Line " + i); System.out.println(mapParameters); mongoClient.close(); System.exit(0); } else { if (formatIdSample) { idSample = accession + "-" + idSample; idSample = idSample.trim().replaceAll(" ", "-"); } } idSample = idSample.split(" ")[0].trim(); // === Organism === String organism = (String) mapParameters.get("organism"); if (organism == null || organism.isEmpty()) { organism = defaultOrganism; } // === Platform === String platform = (String) mapParameters.get("LIBRARY_STRATEGY"); if (platform != null && !platform.isEmpty()) { platform = platform.toLowerCase().trim(); } else { platform = defaultPlatform; } Document docSampleExist = collectionSamples.find(Filters.eq("_id", idSample)).first(); boolean docAlreadyExist = docSampleExist != null; boolean analysed = false; if (docAlreadyExist) { analysed = (Boolean) docSampleExist.get("analyzed"); } // ===== Sample Document ===== Document docSample = mongoService.createSample(idSample, (String) docSeries.get("_id"), accessionAsList, organism, (Date) docSeries.get("submission_date"), (Date) docSeries.get("last_update"), analysed); Document expGroup = null; Document parameters = null; // System.out.println("------------------------------------------------------------------"); if (docAlreadyExist) { // === ID sample alredy exists === System.out.println(i + "/" + nbSamples + "\t " + docSeries.get("_id") + "\t " + idSample + ": already exists in the database, analyzed=" + analysed); expGroup = docSampleExist.get("exp_group", Document.class); parameters = mongoService.updateParameters(docSampleExist, mapParameters); } else { // === New sample === System.out.println(i + "/" + nbSamples + "\t " + docSeries.get("_id") + "\t " + idSample); expGroup = mongoService.createExpGroup(docSample, platform, null, null, organism); parameters = mongoService.createParameters(docSample, mapParameters); nbImportedSamples++; } // === Update sample_title, sample_source, layout === expGroup.put("sample_title", parameters.getString("organism part")); expGroup.put("sample_source", parameters.getString("Source Name")); expGroup.put("layout", parameters.getString("LIBRARY_LAYOUT")); docSample.append("exp_group", expGroup); docSample.append("parameters", parameters); if (commit) { // === Update old if already exist === if (docAlreadyExist) { // collectionSamples.deleteOne(eq("_id", idSample)); collectionSamples.updateOne(Filters.eq("_id", idSample), new Document("$set", docSample)); } else { // ===== Insert data ===== collectionSamples.insertOne(docSample); } // ===== Update series for platforms ===== List<String> listPlatforms = collectionSamples .distinct("exp_group.id_platform", Filters.in("series", accession), String.class) .into(new ArrayList<String>()); docSeries.append("platforms", listPlatforms); collectionSeries.updateOne(Filters.eq("_id", accession), new Document("$set", docSeries)); } } } else { System.out.println("GEO accession " + gseNumber + " corresponding to " + accession + " exists already. Skip import."); } System.out.println("Number of imported samples: " + nbImportedSamples); } mongoClient.close(); }