List of usage examples for com.mongodb.client.model Filters eq
public static <TItem> Bson eq(final String fieldName, @Nullable final TItem value)
From source file:de.gwdg.europeanaqa.client.rest.DocumentTransformer.java
private Document resolveReference(String collection, ObjectId id, boolean withFieldRename) { Document doc = mongoDb.getCollection(collection).find(Filters.eq("_id", id)).first(); if (doc != null) { doc.remove("_id"); doc.remove("className"); transformLanguageStructure(doc); if (collection.equals("PhysicalThing") && withFieldRename) { doc.put("europeanaProxy", Arrays.asList(((Boolean) doc.get("europeanaProxy")).toString())); }// w w w . j a v a 2 s . com if (withFieldRename) replaceKeys(doc); for (String key : subEntities.keySet()) { if (doc.containsKey(key)) { List<Document> subDocs = new ArrayList<Document>(); List<DBRef> subRefs = (List<DBRef>) doc.get(key); for (DBRef subRef : subRefs) { subDocs.add(resolveReference(subRef, withFieldRename)); } doc.remove(key); doc.put(subEntities.get(key), subDocs); } } } return doc; }
From source file:de.pvsnp.chat.mongodb.Massage.java
public Massage(String key, ChatAPI main) { this.key = key; this.main = main; this.massage = main.getManager().getMassages(); this.find = massage.find(Filters.eq("name", key)); this.document = find.first(); massages = new ArrayList<>(); }
From source file:de.pvsnp.chat.mongodb.Massage.java
public void save() { Document doc = new Document("name", key).append("msg", massages); massage.updateOne(Filters.eq("name", key), new Document("$set", doc)); }
From source file:documentation.ChangeStreamSamples.java
License:Apache License
/** * Run this main method to see the output of this quick example. * * @param args takes an optional single argument for the connection string *//*ww w. j av a 2s .co m*/ public static void main(final String[] args) { MongoClient mongoClient; if (args.length == 0) { // connect to the local database server mongoClient = MongoClients.create("mongodb://localhost:27017,localhost:27018,localhost:27019"); } else { mongoClient = MongoClients.create(args[0]); } // Select the MongoDB database. MongoDatabase database = mongoClient.getDatabase("testChangeStreams"); database.drop(); sleep(); // Select the collection to query. MongoCollection<Document> collection = database.getCollection("documents"); /* * Example 1 * Create a simple change stream against an existing collection. */ System.out.println("1. Initial document from the Change Stream:"); // Create the change stream cursor. MongoChangeStreamCursor<ChangeStreamDocument<Document>> cursor = collection.watch().cursor(); // Insert a test document into the collection. collection.insertOne(Document.parse("{username: 'alice123', name: 'Alice'}")); ChangeStreamDocument<Document> next = cursor.next(); System.out.println(next); cursor.close(); sleep(); /* * Example 2 * Create a change stream with 'lookup' option enabled. * The test document will be returned with a full version of the updated document. */ System.out.println("2. Document from the Change Stream, with lookup enabled:"); // Create the change stream cursor. cursor = collection.watch().fullDocument(FullDocument.UPDATE_LOOKUP).cursor(); // Update the test document. collection.updateOne(Document.parse("{username: 'alice123'}"), Document.parse("{$set : { email: 'alice@example.com'}}")); // Block until the next result is returned next = cursor.next(); System.out.println(next); cursor.close(); sleep(); /* * Example 3 * Create a change stream with 'lookup' option using a $match and ($redact or $project) stage. */ System.out.println( "3. Document from the Change Stream, with lookup enabled, matching `update` operations only: "); // Insert some dummy data. collection.insertMany(asList(Document.parse("{updateMe: 1}"), Document.parse("{replaceMe: 1}"))); // Create $match pipeline stage. List<Bson> pipeline = singletonList( Aggregates.match(Filters.or(Document.parse("{'fullDocument.username': 'alice123'}"), Filters.in("operationType", asList("update", "replace", "delete"))))); // Create the change stream cursor with $match. cursor = collection.watch(pipeline).fullDocument(FullDocument.UPDATE_LOOKUP).cursor(); // Forward to the end of the change stream next = cursor.tryNext(); // Update the test document. collection.updateOne(Filters.eq("updateMe", 1), Updates.set("updated", true)); next = cursor.next(); System.out.println(format("Update operationType: %s %n %s", next.getUpdateDescription(), next)); // Replace the test document. collection.replaceOne(Filters.eq("replaceMe", 1), Document.parse("{replaced: true}")); next = cursor.next(); System.out.println(format("Replace operationType: %s", next)); // Delete the test document. collection.deleteOne(Filters.eq("username", "alice123")); next = cursor.next(); System.out.println(format("Delete operationType: %s", next)); cursor.close(); sleep(); /** * Example 4 * Resume a change stream using a resume token. */ System.out.println("4. Document from the Change Stream including a resume token:"); // Get the resume token from the last document we saw in the previous change stream cursor. BsonDocument resumeToken = cursor.getResumeToken(); System.out.println(resumeToken); // Pass the resume token to the resume after function to continue the change stream cursor. cursor = collection.watch().resumeAfter(resumeToken).cursor(); // Insert a test document. collection.insertOne(Document.parse("{test: 'd'}")); // Block until the next result is returned next = cursor.next(); System.out.println(next); cursor.close(); }
From source file:es.dheraspi.sums.model.DAOMongoDB.java
@Override public void insertSummoner(Summoner summoner, String region) { MongoCredential credential = MongoCredential.createCredential(user, dbname, password.toCharArray()); try (MongoClient mongoClient = new MongoClient(new ServerAddress(host), Arrays.asList(credential))) { db = mongoClient.getDatabase(dbname); MongoCollection<Document> coll = db.getCollection("summoners"); int profileIconID = summoner.getProfileIconID(); Bson doc = new Document("$set", new Document("_id", summoner.getID())) .append("name", summoner.getName()).append("level", summoner.getLevel()) .append("profileIconID", profileIconID < 0 ? 0 : profileIconID); Bson filter = Filters.eq("_id", region); switch (region) { case "EUW": break; case "EUNE": break; case "NA": break; case "LAN": break; case "LAS": break; case "BR": break; case "TR": break; case "RU": break; case "OCE": break; case "KR": break; case "JP": break; }/*from w w w . ja v a 2 s.c o m*/ UpdateOptions options = new UpdateOptions().upsert(true); coll.updateOne(filter, doc, options); } catch (APIException ex) { //Some unknown error when trying to get matchList } }
From source file:eu.project.ttc.models.occstore.MongoDBOccurrenceStore.java
License:Apache License
@Override public void flush() { // bulk write occurrences final List<org.bson.Document> occDocuments = Lists.newArrayListWithCapacity(occurrencesBuffer.size()); for (TermOccurrence o : this.occurrencesBuffer) { occDocuments.add(new org.bson.Document().append(TERM_ID, o.getTerm().getId()) .append(DOC_ID, o.getSourceDocument().getId()).append(BEGIN, o.getBegin()) .append(END, o.getEnd()).append(COVERED_TEXT, o.getCoveredText())); }// ww w .j a v a 2 s.c o m if (!occurrencesBuffer.isEmpty()) executor.execute(new Runnable() { public void run() { occurrenceCollection.insertMany(occDocuments); } }); // bulk write documents final List<WriteModel<org.bson.Document>> documentUrlsOps = Lists .newArrayListWithCapacity(documentsUrls.size()); for (Map.Entry<Integer, String> e : this.documentsUrls.entrySet()) { UpdateOneModel<org.bson.Document> w = new UpdateOneModel<org.bson.Document>(Filters.eq(_ID, e.getKey()), Updates.set(DOC_URL, e.getValue()), new UpdateOptions().upsert(true)); documentUrlsOps.add(w); } if (!documentUrlsOps.isEmpty()) executor.execute(new Runnable() { public void run() { documentUrlCollection.bulkWrite(documentUrlsOps, new BulkWriteOptions().ordered(false)); } }); // bulk write terms final List<WriteModel<org.bson.Document>> termsOps = Lists.newArrayList(); for (Term t : termsBuffer.keySet()) { UpdateOneModel<org.bson.Document> w = new UpdateOneModel<org.bson.Document>(Filters.eq(_ID, t.getId()), Updates.inc(FREQUENCY, termsBuffer.get(t).intValue()), new UpdateOptions().upsert(true)); termsOps.add(w); } if (!termsOps.isEmpty()) executor.execute(new Runnable() { public void run() { termCollection.bulkWrite(termsOps, new BulkWriteOptions().ordered(false)); } }); resetBuffers(); }
From source file:eu.project.ttc.models.occstore.MongoDBOccurrenceStore.java
License:Apache License
@Override public void removeTerm(final Term t) { executor.execute(new Runnable() { public void run() { termCollection.deleteOne(new org.bson.Document(_ID, t.getId())); occurrenceCollection.deleteMany(Filters.eq(_ID, t.getId())); }/* www. j a v a 2 s . c o m*/ }); }
From source file:eu.vital.vitalcep.collector.Collector.java
private Collector() throws IOException { ConfigReader configReader = ConfigReader.getInstance(); mongoURL = configReader.get(ConfigReader.MONGO_URL); mongoDB = configReader.get(ConfigReader.MONGO_DB); getCollectorList();//from w ww .j a v a 2 s.c o m ScheduledExecutorService exec = Executors.newScheduledThreadPool(2); Runnable collectoRunnable; collectoRunnable = new Runnable() { @Override public void run() { if (sensors.length() > 0) { mongo = new MongoClient(new MongoClientURI(mongoURL)); db = mongo.getDatabase(mongoDB); } Date NOW = new Date(); String nowString = getXSDDateTime(NOW); for (int i = 0; i < sensors.length(); i++) { try { String cookie = getListenerCredentials(i); JSONArray aData = new JSONArray(); String type = sensors.getJSONObject(i).getString("cepType"); if (type.equals("CONTINUOUS")) { try { DMSListener oDMS = new DMSListener(cookie); aData = oDMS.getObservations(sensors.getJSONObject(i).getJSONArray("sources"), sensors.getJSONObject(i).getJSONArray("properties"), sensors.getJSONObject(i).getString("lastRequest")); } catch (IOException | KeyManagementException | NoSuchAlgorithmException | KeyStoreException ex) { java.util.logging.Logger.getLogger(Collector.class.getName()).log(Level.SEVERE, null, ex); } if (aData.length() > 0) { sendData2CEP(aData, i); } } else { try { JSONObject sensor = new JSONObject(); sensor = sensors.getJSONObject(i); JSONArray requests = new JSONArray(); requests = sensor.getJSONArray("requests"); PPIListener oPPI = new PPIListener(cookie); aData = oPPI.getObservations(requests, sensor.getString("lastRequest")); if (aData.length() > 0) { sendData2CEP(aData, i); } } catch (IOException | KeyManagementException | NoSuchAlgorithmException | KeyStoreException ex) { java.util.logging.Logger.getLogger(Collector.class.getName()).log(Level.SEVERE, null, ex); } } sensors.getJSONObject(i).put("lastRequest", nowString); if (mongo == null) mongo = new MongoClient(new MongoClientURI(mongoURL)); if (db == null) db = mongo.getDatabase(mongoDB); Bson filter = Filters.eq("_id", new ObjectId(sensors.getJSONObject(i).getString("id"))); Bson update = new Document("$set", new Document("lastRequest", nowString)); UpdateOptions options = new UpdateOptions().upsert(false); UpdateResult updateDoc = db.getCollection("cepinstances").updateOne(filter, update, options); } catch (GeneralSecurityException | IOException | ParseException ex) { java.util.logging.Logger.getLogger(Collector.class.getName()).log(Level.SEVERE, null, ex); } finally { if (db != null) db = null; if (mongo != null) { mongo.close(); mongo = null; } } } if (db != null) db = null; if (mongo != null) { mongo.close(); mongo = null; } } private void sendData2CEP(JSONArray aData, int i) throws JSONException, ParseException { Decoder decoder = new Decoder(); ArrayList<String> simpleEventAL = decoder.JsonldArray2DolceInput(aData); MqttAllInOne oMqtt = new MqttAllInOne(); TMessageProc MsgProcc = new TMessageProc(); //TODO: check the client name. see from cep instances and what happen when if the topic exist String clientName = "collector_" + RandomStringUtils.randomAlphanumeric(4); oMqtt.sendMsg(MsgProcc, clientName, simpleEventAL, sensors.getJSONObject(i).getString("mqin"), sensors.getJSONObject(i).getString("mqout"), true); } private String getListenerCredentials(int i) throws IOException, GeneralSecurityException, JSONException { StringBuilder ck = new StringBuilder(); Security slogin = new Security(); JSONObject credentials = new JSONObject(); // Boolean token = slogin.login(sensors.getJSONArray(i) // .getJSONObject(0) // .getString("username") // ,decrypt(sensors.getJSONArray(i) // .getJSONObject(0) // .getString("password")),false,ck); Boolean token = slogin.login("elisa", "elisotas1", false, ck); if (!token) { //throw new } String cookie = ck.toString(); return cookie; } }; exec.scheduleAtFixedRate(collectoRunnable, 0, 10, TimeUnit.SECONDS); }
From source file:eu.vital.vitalcep.restApp.alert.Alerts.java
/** * Gets a filter.//from w w w .j ava2 s . co m * * @param filterId * @param req * @return the filter */ @DELETE @Path("deletealert") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response deleteAlert(String filterId, @Context HttpServletRequest req) throws IOException { MongoClient mongo = null; MongoDatabase db = null; try { StringBuilder ck = new StringBuilder(); Security slogin = new Security(); Boolean token = slogin.login(req.getHeader("name"), req.getHeader("password"), false, ck); if (!token) { return Response.status(Response.Status.UNAUTHORIZED).build(); } this.cookie = ck.toString(); JSONObject jo = new JSONObject(filterId); String idjo = jo.getString("id"); mongo = new MongoClient(new MongoClientURI(mongoURL)); db = mongo.getDatabase(mongoDB); try { db.getCollection("alerts"); } catch (Exception e) { //System.out.println("Mongo is down"); mongo.close(); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build(); } MongoCollection<Document> coll = db.getCollection("alerts"); Bson filter = Filters.eq("id", idjo); FindIterable<Document> iterable = coll.find(filter); String cepInstance; CEP cepProcess = new CEP(); if (iterable != null && iterable.first() != null) { Document doc = iterable.first(); cepInstance = doc.getString("cepinstance"); MongoCollection<Document> collInstances = db.getCollection("cepinstances"); ObjectId ci = new ObjectId(cepInstance); Bson filterInstances = Filters.eq("_id", ci); FindIterable<Document> iterable2 = collInstances.find(filterInstances); if (iterable2 != null) { Document doc2 = iterable2.first(); cepProcess.PID = doc2.getInteger("PID"); cepProcess.fileName = doc2.getString("fileName"); cepProcess.cepFolder = doc2.getString("cepFolder"); cepProcess.type = CEP.CEPType.ALERT.toString(); CepProcess cp = new CepProcess(null, null, null, null); cp.PID = doc2.getInteger("PID"); cepProcess.cp = cp; if (!cepProcess.cepDispose()) { java.util.logging.Logger.getLogger(Alerts.class.getName()).log(Level.SEVERE, "bcep Instance not terminated"); } else { Bson filter1 = Filters.eq("_id", ci); Bson update = new Document("$set", new Document("status", "terminated")); UpdateOptions options = new UpdateOptions().upsert(false); UpdateResult updateDoc = db.getCollection("cepinstances").updateOne(filter1, update, options); } ; CepContainer.deleteCepProcess(cp.PID); } } else { return Response.status(Response.Status.NOT_FOUND).build(); } DeleteResult deleteResult = coll.deleteOne(eq("id", idjo)); if (deleteResult.getDeletedCount() < 1) { return Response.status(Response.Status.NOT_FOUND).build(); } else { return Response.status(Response.Status.OK).build(); } } catch (Exception e) { return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build(); } finally { db = null; if (mongo != null) { mongo.close(); mongo = null; } } }
From source file:eu.vital.vitalcep.restApp.cepRESTApi.CEPICO.java
/** * Gets a filter./*from ww w .j a va 2 s . c o m*/ * * @param filterId * @param req * @return the filter */ @DELETE @Path("deletecepico") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response deleteCEPICO(String filterId, @Context HttpServletRequest req) throws IOException { StringBuilder ck = new StringBuilder(); Security slogin = new Security(); Boolean token = slogin.login(req.getHeader("name"), req.getHeader("password"), false, ck); if (!token) { return Response.status(Response.Status.UNAUTHORIZED).build(); } this.cookie = ck.toString(); JSONObject jo = new JSONObject(filterId); String idjo = jo.getString("id"); MongoClient mongo = new MongoClient(new MongoClientURI(mongoURL)); MongoDatabase db = mongo.getDatabase(mongoDB); try { db.getCollection("cepicos"); } catch (Exception e) { //System.out.println("Mongo is down"); mongo.close(); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build(); } MongoCollection<Document> coll = db.getCollection("cepicos"); Bson filter = Filters.eq("id", idjo); FindIterable<Document> iterable = coll.find(filter); String cepInstance; CEP cepProcess = new CEP(); if (iterable != null && iterable.first() != null) { Document doc = iterable.first(); cepInstance = doc.getString("cepinstance"); MongoCollection<Document> collInstances = db.getCollection("cepinstances"); ObjectId ci = new ObjectId(cepInstance); Bson filterInstances = Filters.eq("_id", ci); FindIterable<Document> iterable2 = collInstances.find(filterInstances); if (iterable2 != null) { Document doc2 = iterable2.first(); cepProcess.PID = doc2.getInteger("PID"); cepProcess.fileName = doc2.getString("fileName"); cepProcess.cepFolder = doc2.getString("cepFolder"); cepProcess.type = CEP.CEPType.CEPICO.toString(); CepProcess cp = new CepProcess(null, null, null, null); cp.PID = doc2.getInteger("PID"); cepProcess.cp = cp; if (!cepProcess.cepDispose()) { java.util.logging.Logger.getLogger(CEPICO.class.getName()).log(Level.SEVERE, "bcep Instance not terminated"); } else { Bson filter1 = Filters.eq("_id", ci); Bson update = new Document("$set", new Document("status", "terminated")); UpdateOptions options = new UpdateOptions().upsert(false); UpdateResult updateDoc = db.getCollection("cepinstances").updateOne(filter1, update, options); } ; CepContainer.deleteCepProcess(cp.PID); } } else { return Response.status(Response.Status.NOT_FOUND).build(); } DeleteResult deleteResult = coll.deleteOne(eq("id", idjo)); if (deleteResult.getDeletedCount() < 1) { return Response.status(Response.Status.NOT_FOUND).build(); } else { return Response.status(Response.Status.OK).build(); } }