Example usage for com.mongodb Block Block

List of usage examples for com.mongodb Block Block

Introduction

In this page you can find the example usage for com.mongodb Block Block.

Prototype

Block

Source Link

Usage

From source file:kiaanfx.Kiaanfx.java

private static void agg() {
    MongoClient mongoClient = new MongoClient("localhost", 27017);
    MongoDatabase db = mongoClient.getDatabase("kiaan");
    AggregateIterable<Document> iterable = db.getCollection("banks")
            .aggregate(asList(new Document("$unwind", "$branches")));

    iterable.forEach(new Block<Document>() {
        @Override//from   w  w  w  .j ava 2 s  .c om
        public void apply(final Document document) {
            System.out.println(document.toJson());
        }
    });
}

From source file:module.script.QueryAvailableData.java

License:Open Source License

public QueryAvailableData() {

    // ===== Service =====
    FormatService formatService = new FormatService();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collectionSeries = db.getCollection("series");
    MongoCollection<Document> collectionSamples = db.getCollection("samples");

    // ===== Print block =====
    Block<Document> printBlock = new Block<Document>() {
        public void apply(final Document document) {
            System.out.println(document.toJson());
        }/*  w w  w .  j  av  a 2  s  .c  o m*/
    };

    // ===== Group by topology =====
    // db.getCollection('samples').aggregate({ $group: { "_id" : "$exp_group.topology", "total" : {$sum : 1} }}, {$sort : {total : -1}} )
    /*
    List<Document> listDocuments = collectionSamples.aggregate(
    Arrays.asList(
          Aggregates.group("$exp_group.topology", Accumulators.sum("total", 1)),
          Aggregates.sort(Sorts.orderBy(Sorts.descending("total")))
          ))
    .into(new ArrayList<Document>());
     */

    // ===== Group by sample =====
    /*
    List<Document> listSeries = collectionSeries
    .find()
    .projection(Projections.fields(Projections.include("title")))
    .sort(Sorts.ascending("_id"))
    .into(new ArrayList<Document>());
            
    for (Document doc : listSeries) {
            
       String idSeries = doc.getString("_id");
       Long nbSamples = collectionSamples.count((Filters.eq("series", idSeries)));
       doc.append("nbSamples", nbSamples);
    } 
    display(listSeries);
    */

    // === Export Geo for a list of idSeries ===

    // String[] listIdSeries = {"GSE11092","GSE13309", "GSE13159"};

    /*
    List<Document> docExpGroup = collectionSamples
    .find(Filters.in("series", listIdSeries))
    .projection(Projections.fields(Projections.include("exp_group"), Projections.excludeId()))
    .into(new ArrayList<Document>());
    // display(docExpGroup);
            
    List<String> header = formatService.extractHeader(docExpGroup, "exp_group");
    List<Object> data = formatService.extractData(docExpGroup, header, "exp_group");
    System.out.println(header);
    displayMatrix(data);
            
    */
    // List<Object> listObjects = formatService.convertHeterogeneousMongoDocuments(docExpGroup, "exp_group");
    // displayMatrix(listObjects);

    // List<Object> listObjects = formatService.convertHomogeneousMongoDocuments(listDocuments);

    // === Find series ===

    String[] listIdSamples = { "GSM80908", "GSM274639", "GSM274638", "GSM280213" };
    List<Document> listDocuments = collectionSamples
            .aggregate(Arrays.asList(Aggregates.match(Filters.in("_id", listIdSamples)),
                    Aggregates.group("$main_gse_number"),
                    Aggregates.sort(Sorts.orderBy(Sorts.ascending("main_gse_numbe")))))
            .into(new ArrayList<Document>());
    List<Object> listObjects = formatService.convertHomogeneousMongoDocuments(listDocuments);
    displayMatrix(listObjects);

    mongoClient.close();
}

From source file:modules.MongoDBClient.java

private void ReadVals() {
    try {/*from w  ww.  ja va  2  s  .  c om*/

        eKeys = pReadAssociation.keys();
        while (eKeys.hasMoreElements()) {
            try {
                if (iConnected == 0) {
                    break;
                }

                sMongoDBAttr = (String) eKeys.nextElement();
                sIntDataPath = sIntPrefix + pReadAssociation.getProperty(sMongoDBAttr, "");

                sMongoDBAttr = sReadPrefix + sMongoDBAttr;
                ssID = sMongoDBAttr.split(sFind);
                sDataID = ssID[ssID.length - 2];
                sDataAttr = ssID[ssID.length - 1];
                sDataPath = sMongoDBAttr.replaceAll(sFind, sRepl);
                iterable = collection.find(eq(sIDName, sDataID));
                iterable.forEach(new Block<Document>() {
                    @Override
                    public void apply(final Document document) {

                        pDataSet.put(sIntDataPath, (String) document.get(sDataAttr));

                        // System.out.println(document);
                    }
                });
            } catch (Exception ex) {

                if (Debug == 1) {
                    System.out.println(ex.getMessage());
                }
            }
        }

    } catch (Exception e) {

    }
}

From source file:mongodb.QuickTour.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args takes an optional single argument for the connection string
 *///from   w ww  . j  a v a2  s.  c o  m
public static void main(final String[] args) {

    //represents a pool of connections to the database
    MongoClient mongoClient = new MongoClient("10.9.17.105", 27017);

    // get handle to "mydb" database
    MongoDatabase database = mongoClient.getDatabase("test");

    // get a handle to the "test" collection
    MongoCollection<Document> collection = database.getCollection("test");

    // drop all the data in it
    collection.drop();

    // make a document and insert it
    Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info",
            new Document("x", 203).append("y", 102));

    collection.insertOne(doc);

    // get it (since it's the only one in there since we dropped the rest earlier on)
    Document myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // now, lets add lots of little documents to the collection so we can explore queries and cursors
    List<Document> documents = new ArrayList<Document>();
    for (int i = 0; i < 100; i++) {
        documents.add(new Document("i", i));
    }
    collection.insertMany(documents);
    System.out.println(
            "total # of documents after inserting 100 small ones (should be 101) " + collection.count());

    // find first
    myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // lets get all the documents in the collection and print them out
    MongoCursor<Document> cursor = collection.find().iterator();
    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    for (Document cur : collection.find()) {
        System.out.println(cur.toJson());
    }

    // now use a query to get 1 document out
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    cursor = collection.find(gt("i", 50)).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // range query with multiple constraints
    cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // Query Filters
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    Block<Document> printBlock = new Block<Document>() {
        @Override
        public void apply(final Document document) {
            System.out.println(document.toJson());
        }
    };
    collection.find(gt("i", 50)).forEach(printBlock);

    // filter where; 50 < i <= 100
    collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock);

    // Sorting
    myDoc = collection.find(exists("i")).sort(descending("i")).first();
    System.out.println(myDoc.toJson());

    // Projection
    myDoc = collection.find().projection(excludeId()).first();
    System.out.println(myDoc.toJson());

    // Update One
    collection.updateOne(eq("i", 10), new Document("$set", new Document("i", 110)));

    // Update Many
    UpdateResult updateResult = collection.updateMany(lt("i", 100),
            new Document("$inc", new Document("i", 100)));
    System.out.println(updateResult.getModifiedCount());

    // Delete One
    collection.deleteOne(eq("i", 110));

    // Delete Many
    DeleteResult deleteResult = collection.deleteMany(gte("i", 100));
    System.out.println(deleteResult.getDeletedCount());

    collection.drop();

    // ordered bulk writes
    List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>();
    writes.add(new InsertOneModel<Document>(new Document("_id", 4)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 5)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 6)));
    writes.add(
            new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2))));
    writes.add(new DeleteOneModel<Document>(new Document("_id", 2)));
    writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4)));

    collection.bulkWrite(writes);

    collection.drop();

    collection.bulkWrite(writes, new BulkWriteOptions().ordered(false));
    collection.find().forEach(printBlock);

    // Clean up
    //        database.drop();

    // release resources
    mongoClient.close();
}

From source file:mongoSample.MongoSample.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args/*from   w ww  . j a  v  a  2s. c o m*/
 *            takes an optional single argument for the connection string
 */
public static void main(final String[] args) {
    String mongoServer = args[0];

    MongoClient mongoClient = new MongoClient(mongoServer);
    MongoDatabase database = mongoClient.getDatabase("sakila");
    MongoCollection<Document> collection = database.getCollection("test");

    // drop all the data in it
    collection.drop();

    // make a document and insert it
    Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info",
            new Document("x", 203).append("y", 102));

    collection.insertOne(doc);

    // get it (since it's the only one in there since we dropped the rest
    // earlier on)
    Document myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // now, lets add lots of little documents to the collection so we can
    // explore queries and cursors
    List<Document> documents = new ArrayList<Document>();
    for (int i = 0; i < 100; i++) {
        documents.add(new Document("i", i));
    }
    collection.insertMany(documents);
    System.out.println(
            "total # of documents after inserting 100 small ones (should be 101) " + collection.count());

    // find first
    myDoc = collection.find().first();
    System.out.println(myDoc);
    System.out.println(myDoc.toJson());

    // lets get all the documents in the collection and print them out
    MongoCursor<Document> cursor = collection.find().iterator();
    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    for (Document cur : collection.find()) {
        System.out.println(cur.toJson());
    }

    // now use a query to get 1 document out
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    cursor = collection.find(gt("i", 50)).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // range query with multiple constraints
    cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // Query Filters
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    Block<Document> printBlock = new Block<Document>() {
        @Override
        public void apply(final Document document) {
            System.out.println(document.toJson());
        }
    };
    collection.find(gt("i", 50)).forEach(printBlock);

    // filter where; 50 < i <= 100
    collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock);

    // Sorting
    myDoc = collection.find(exists("i")).sort(descending("i")).first();
    System.out.println(myDoc.toJson());

    // Projection
    myDoc = collection.find().projection(excludeId()).first();
    System.out.println(myDoc.toJson());

    // Update One
    collection.updateOne(eq("i", 10), new Document("$set", new Document("i", 110)));

    // Update Many
    UpdateResult updateResult = collection.updateMany(lt("i", 100),
            new Document("$inc", new Document("i", 100)));
    System.out.println(updateResult.getModifiedCount());

    // Delete One
    collection.deleteOne(eq("i", 110));

    // Delete Many
    DeleteResult deleteResult = collection.deleteMany(gte("i", 100));
    System.out.println(deleteResult.getDeletedCount());

    collection.drop();

    // ordered bulk writes
    List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>();
    writes.add(new InsertOneModel<Document>(new Document("_id", 4)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 5)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 6)));
    writes.add(
            new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2))));
    writes.add(new DeleteOneModel<Document>(new Document("_id", 2)));
    writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4)));

    collection.bulkWrite(writes);

    collection.drop();

    collection.bulkWrite(writes, new BulkWriteOptions().ordered(false));
    // collection.find().forEach(printBlock);

    // Clean up
    //database.drop();

    // release resources
    mongoClient.close();
}

From source file:mx.com.tecnomotum.testmongodb.Principal.java

public static void main(String args[]) {
    MongoClient mongoClient = new MongoClient("localhost", 27017);
    MongoDatabase db = mongoClient.getDatabase("test");
    MongoCollection<Document> coleccion = db.getCollection("restaurants");
    long totalElementos = coleccion.count();
    System.out.println("Total de elementos en la coleccin:" + totalElementos);

    // Obtener el primer elemento de la coleccin
    Document myDoc = coleccion.find().first();
    System.out.println("Primer object:" + myDoc.toJson());

    //Crear y aadir un nuevo documento a la coleccin
    Document nuevoDoc = new Document("name", "CARLITOS buf");
    nuevoDoc.append("borough", "Elvia");
    nuevoDoc.append("cuisine", "Gourmet");
    List<Document> puntuaciones = new ArrayList<>();
    Document punt = new Document();
    punt.append("grade", "A");
    punt.append("date", new Date());
    punt.append("score", 9);
    puntuaciones.add(punt);//w  w w.  j  a v a 2  s .c  o  m
    nuevoDoc.append("grades", puntuaciones);
    coleccion.insertOne(nuevoDoc);
    System.out.println("Total de elementos en la coleccin:" + coleccion.count());

    //OBtener un objeto de una coleccin
    Document objetoResp = coleccion.find(eq("name", "CARLITOS buf")).first();
    System.out.println("OBjeto encontrado:" + objetoResp.toJson());

    //OBtener la proyeccin del documento
    Document objetoResp2 = coleccion.find(eq("name", "CARLITOS buf"))
            .projection(fields(excludeId(), include("name"), include("grades.score"))).first();
    System.out.println("OBjeto encontrado:" + objetoResp2.toJson());

    //OBtener conjuntos de datos
    Block<Document> printBlock = new Block<Document>() {

        @Override
        public void apply(final Document doc) {
            System.out.println(doc.toJson());
        }
    };

    coleccion.find(eq("cuisine", "Hamburgers")).projection(fields(excludeId(), include("name")))
            .sort(Sorts.ascending("name")).forEach(printBlock);

}

From source file:MyLibrary.DoMongodb.java

public List<Document> doMongodbAggregation(MongoDatabase MongoDatabase1, String collections,
        JSONObject matchJSONObject, JSONObject JsonObject1) throws Exception {
    final List<Document> result = new ArrayList<>();
    Document groupDocument = JsonToDocument(JsonObject1);
    Document matchDocument = JsonToDocument(matchJSONObject);
    AggregateIterable<Document> AggregateIterable1 = MongoDatabase1.getCollection(collections)
            .aggregate(asList(new Document("$match", matchDocument), new Document("$group", groupDocument)));
    AggregateIterable1.forEach(new Block<Document>() {
        @Override/* w w  w . ja v  a  2s.  c  om*/
        public void apply(final Document document) {
            result.add(document);
        }
    });
    return result;
}

From source file:org.apache.eagle.alert.engine.publisher.dedup.MongoDedupEventsStore.java

License:Apache License

@Override
public Map<EventUniq, ConcurrentLinkedDeque<DedupValue>> getEvents() {
    try {/*from w w  w.  j  a  v a2s  .  co  m*/
        Map<EventUniq, ConcurrentLinkedDeque<DedupValue>> result = new ConcurrentHashMap<EventUniq, ConcurrentLinkedDeque<DedupValue>>();
        BsonDocument filter = new BsonDocument();
        filter.append(DEDUP_PUBLISH_ID, new BsonString(this.publishName));
        stateCollection.find(filter).forEach(new Block<Document>() {
            @Override
            public void apply(final Document doc) {
                DedupEntity entity = TransformerUtils.transform(DedupEntity.class,
                        BsonDocument.parse(doc.toJson()));
                result.put(entity.getEventEniq(), entity.getDedupValuesInConcurrentLinkedDeque());
            }
        });
        if (LOG.isDebugEnabled()) {
            LOG.debug("Found {} dedup events from mongoDB", result.size());
        }
        return result;
    } catch (Exception e) {
        LOG.error("find dedup state failed, but the state in memory is good, could be ingored.", e);
    }
    return new HashMap<EventUniq, ConcurrentLinkedDeque<DedupValue>>();
}

From source file:org.apache.eagle.alert.metadata.impl.MongoMetadataDaoImpl.java

License:Apache License

private <T> Map<String, T> maps(MongoCollection<Document> collection, Class<T> clz, String version) {
    BsonDocument doc = new BsonDocument();
    doc.append("version", new BsonString(version));

    Map<String, T> maps = new HashMap<String, T>();
    String mapKey = (clz == SpoutSpec.class) ? "topologyId" : "topologyName";
    collection.find(doc).forEach(new Block<Document>() {
        @Override//from   w  w  w  .  j  a  v a2 s. co m
        public void apply(Document document) {
            String json = document.toJson();
            try {
                //Due to some field name in SpoutSpec contains dot(.) which is invalid Mongo Field name,
                // we need to transform the format while reading from Mongo.
                if (clz == SpoutSpec.class) {
                    Document doc = Document.parse(json);
                    String[] metadataMapArrays = { "kafka2TupleMetadataMap", "tuple2StreamMetadataMap",
                            "streamRepartitionMetadataMap" };
                    for (String metadataMapName : metadataMapArrays) {
                        ArrayList<Document> subDocs = (ArrayList) doc.get(metadataMapName);
                        doc.remove(metadataMapName);

                        Document replaceDoc = new Document();
                        for (Document subDoc : subDocs) {
                            replaceDoc.put((String) subDoc.get("topicName"), subDoc.get(metadataMapName));
                        }
                        doc.put(metadataMapName, replaceDoc);
                    }

                    json = doc.toJson();
                }
                T t = mapper.readValue(json, clz);
                maps.put(document.getString(mapKey), t);
            } catch (IOException e) {
                LOG.error("deserialize config item failed!", e);
            }
        }
    });

    return maps;
}

From source file:org.apache.rya.forwardchain.strategy.MongoPipelineStrategy.java

License:Apache License

/**
 * Execute a CONSTRUCT rule by converting it into a pipeline, iterating
 * through the resulting documents, and inserting them back to the data
 * store as new triples. If pipeline conversion fails, falls back on
 * default execution strategy.//from   w  ww  .  j  a  v a 2 s .  c  o  m
 * @param rule A construct query rule; not null.
 * @param metadata StatementMetadata to attach to new triples; not null.
 * @return The number of new triples inferred.
 * @throws ForwardChainException if execution fails.
 */
@Override
public long executeConstructRule(AbstractConstructRule rule, StatementMetadata metadata)
        throws ForwardChainException {
    Preconditions.checkNotNull(rule);
    logger.info("Applying inference rule " + rule + "...");
    long timestamp = System.currentTimeMillis();
    // Get a pipeline that turns individual matches into triples
    List<Bson> pipeline = null;
    try {
        int requireSourceLevel = 0;
        if (!usedBackup) {
            // If we can assume derivation levels are set properly, we can optimize by
            // pruning any derived fact whose sources are all old information. (i.e. we can
            // infer that the pruned fact would have already been derived in a previous
            // step.) But if the backup strategy has ever been used, the source triples aren't
            // guaranteed to have derivation level set.
            requireSourceLevel = requiredLevel;
        }
        pipeline = toPipeline(rule, requireSourceLevel, timestamp);
    } catch (ForwardChainException e) {
        logger.error(e);
    }
    if (pipeline == null) {
        if (backup == null) {
            logger.error("Couldn't convert " + rule + " to pipeline:");
            for (String line : rule.getQuery().toString().split("\n")) {
                logger.error("\t" + line);
            }
            throw new UnsupportedOperationException("Couldn't convert query to pipeline.");
        } else {
            logger.debug("Couldn't convert " + rule + " to pipeline:");
            for (String line : rule.getQuery().toString().split("\n")) {
                logger.debug("\t" + line);
            }
            logger.debug("Using fallback strategy.");
            usedBackup = true;
            return backup.executeConstructRule(rule, metadata);
        }
    }
    // Execute the pipeline
    for (Bson step : pipeline) {
        logger.debug("\t" + step.toString());
    }
    LongAdder count = new LongAdder();
    baseCollection.aggregate(pipeline).allowDiskUse(true).batchSize(PIPELINE_BATCH_SIZE)
            .forEach(new Block<Document>() {
                @Override
                public void apply(Document doc) {
                    final DBObject dbo = (DBObject) JSON.parse(doc.toJson());
                    RyaStatement rstmt = storageStrategy.deserializeDBObject(dbo);
                    if (!statementExists(rstmt)) {
                        count.increment();
                        doc.replace(SimpleMongoDBStorageStrategy.STATEMENT_METADATA, metadata.toString());
                        try {
                            batchWriter.addObjectToQueue(doc);
                        } catch (MongoDbBatchWriterException e) {
                            logger.error("Couldn't insert " + rstmt, e);
                        }
                    }
                }
            });
    try {
        batchWriter.flush();
    } catch (MongoDbBatchWriterException e) {
        throw new ForwardChainException("Error writing to Mongo", e);
    }
    logger.info("Added " + count + " new statements.");
    executionTimes.compute(rule, (r, previous) -> {
        if (previous != null && previous > timestamp) {
            return previous;
        } else {
            return timestamp;
        }
    });
    return count.longValue();
}