Example usage for com.mongodb.client.model Aggregates match

List of usage examples for com.mongodb.client.model Aggregates match

Introduction

In this page you can find the example usage for com.mongodb.client.model Aggregates match.

Prototype

public static Bson match(final Bson filter) 

Source Link

Document

Creates a $match pipeline stage for the specified filter

Usage

From source file:com.epam.dlab.backendapi.dao.azure.AzureBillingDAO.java

License:Apache License

public Document getReport(UserInfo userInfo, AzureBillingFilter filter) {

    boolean isFullReport = UserRoles.checkAccess(userInfo, RoleType.PAGE,
            "/api/infrastructure_provision/billing");
    if (isFullReport) {
        if (filter.getUser() != null) {
            filter.getUser().replaceAll(String::toLowerCase);
        }/* w w  w .  j a  va2  s . c  om*/
    } else {
        filter.setUser(Lists.newArrayList(userInfo.getName().toLowerCase()));
    }

    List<Bson> matchCriteria = matchCriteria(filter);
    List<Bson> pipeline = new ArrayList<>();
    if (!matchCriteria.isEmpty()) {
        pipeline.add(Aggregates.match(Filters.and(matchCriteria)));
    }
    pipeline.add(groupCriteria());
    pipeline.add(sortCriteria());

    return prepareReport(filter.getStatuses(), filter.getNodeSize() != null && !filter.getNodeSize().isEmpty(),
            getCollection(MongoKeyWords.BILLING_DETAILS).aggregate(pipeline), getShapes(filter.getNodeSize()))
                    .append(FULL_REPORT, isFullReport);
}

From source file:com.epam.dlab.billing.azure.AzureBillingDetailsService.java

License:Apache License

public void updateBillingDetails(String user) {
    log.debug("Updating billing details for user {}", user);

    try {/* w ww  . j av  a2 s.c  o m*/
        AggregateIterable<Document> aggregateIterable = mongoDbBillingClient.getDatabase()
                .getCollection(MongoKeyWords.BILLING_DETAILS)
                .aggregate(Lists.newArrayList(
                        Aggregates.match(Filters.and(Filters.eq(MongoKeyWords.DLAB_USER, user),
                                Filters.in(MongoKeyWords.RESOURCE_TYPE, DlabResourceType.EXPLORATORY.toString(),
                                        DlabResourceType.COMPUTATIONAL.toString(),
                                        DlabResourceType.VOLUME.toString()))),

                        Aggregates.group(
                                getGroupingFields(MongoKeyWords.DLAB_ID, MongoKeyWords.DLAB_USER,
                                        MongoKeyWords.EXPLORATORY_ID, MongoKeyWords.RESOURCE_TYPE,
                                        MongoKeyWords.RESOURCE_NAME, MongoKeyWords.COMPUTATIONAL_ID,
                                        MongoKeyWords.METER_CATEGORY),
                                Accumulators.sum(MongoKeyWords.COST,
                                        MongoKeyWords.prepend$(MongoKeyWords.COST)),
                                Accumulators.min(MongoKeyWords.USAGE_FROM,
                                        MongoKeyWords.prepend$(MongoKeyWords.USAGE_DAY)),
                                Accumulators.max(MongoKeyWords.USAGE_TO,
                                        MongoKeyWords.prepend$(MongoKeyWords.USAGE_DAY))),

                        Aggregates.sort(Sorts.ascending(MongoKeyWords.prependId(MongoKeyWords.RESOURCE_NAME),
                                MongoKeyWords.prependId(MongoKeyWords.METER_CATEGORY)))));

        updateBillingDetails(user, mapToDetails(aggregateIterable));
    } catch (RuntimeException e) {
        log.error("Updating billing details for user {} is failed", user, e);
    }
}

From source file:documentation.ChangeStreamSamples.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args takes an optional single argument for the connection string
 *///from  w  w w.  java2 s.  c o  m
public static void main(final String[] args) {
    MongoClient mongoClient;

    if (args.length == 0) {
        // connect to the local database server
        mongoClient = MongoClients.create("mongodb://localhost:27017,localhost:27018,localhost:27019");
    } else {
        mongoClient = MongoClients.create(args[0]);
    }

    // Select the MongoDB database.
    MongoDatabase database = mongoClient.getDatabase("testChangeStreams");
    database.drop();
    sleep();

    // Select the collection to query.
    MongoCollection<Document> collection = database.getCollection("documents");

    /*
     * Example 1
     * Create a simple change stream against an existing collection.
     */
    System.out.println("1. Initial document from the Change Stream:");

    // Create the change stream cursor.
    MongoChangeStreamCursor<ChangeStreamDocument<Document>> cursor = collection.watch().cursor();

    // Insert a test document into the collection.
    collection.insertOne(Document.parse("{username: 'alice123', name: 'Alice'}"));
    ChangeStreamDocument<Document> next = cursor.next();
    System.out.println(next);
    cursor.close();
    sleep();

    /*
     * Example 2
     * Create a change stream with 'lookup' option enabled.
     * The test document will be returned with a full version of the updated document.
     */
    System.out.println("2. Document from the Change Stream, with lookup enabled:");

    // Create the change stream cursor.
    cursor = collection.watch().fullDocument(FullDocument.UPDATE_LOOKUP).cursor();

    // Update the test document.
    collection.updateOne(Document.parse("{username: 'alice123'}"),
            Document.parse("{$set : { email: 'alice@example.com'}}"));

    // Block until the next result is returned
    next = cursor.next();
    System.out.println(next);
    cursor.close();
    sleep();

    /*
     * Example 3
     * Create a change stream with 'lookup' option using a $match and ($redact or $project) stage.
     */
    System.out.println(
            "3. Document from the Change Stream, with lookup enabled, matching `update` operations only: ");

    // Insert some dummy data.
    collection.insertMany(asList(Document.parse("{updateMe: 1}"), Document.parse("{replaceMe: 1}")));

    // Create $match pipeline stage.
    List<Bson> pipeline = singletonList(
            Aggregates.match(Filters.or(Document.parse("{'fullDocument.username': 'alice123'}"),
                    Filters.in("operationType", asList("update", "replace", "delete")))));

    // Create the change stream cursor with $match.
    cursor = collection.watch(pipeline).fullDocument(FullDocument.UPDATE_LOOKUP).cursor();

    // Forward to the end of the change stream
    next = cursor.tryNext();

    // Update the test document.
    collection.updateOne(Filters.eq("updateMe", 1), Updates.set("updated", true));
    next = cursor.next();
    System.out.println(format("Update operationType: %s %n %s", next.getUpdateDescription(), next));

    // Replace the test document.
    collection.replaceOne(Filters.eq("replaceMe", 1), Document.parse("{replaced: true}"));
    next = cursor.next();
    System.out.println(format("Replace operationType: %s", next));

    // Delete the test document.
    collection.deleteOne(Filters.eq("username", "alice123"));
    next = cursor.next();
    System.out.println(format("Delete operationType: %s", next));
    cursor.close();
    sleep();

    /**
     * Example 4
     * Resume a change stream using a resume token.
     */
    System.out.println("4. Document from the Change Stream including a resume token:");

    // Get the resume token from the last document we saw in the previous change stream cursor.
    BsonDocument resumeToken = cursor.getResumeToken();
    System.out.println(resumeToken);

    // Pass the resume token to the resume after function to continue the change stream cursor.
    cursor = collection.watch().resumeAfter(resumeToken).cursor();

    // Insert a test document.
    collection.insertOne(Document.parse("{test: 'd'}"));

    // Block until the next result is returned
    next = cursor.next();
    System.out.println(next);
    cursor.close();
}

From source file:module.script.epilung.SearchSamples.java

License:Open Source License

public SearchSamples() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");
    MongoCollection<Document> collectionSamples = db.getCollection("samples");
    MongoCollection<Document> collectionPlatforms = db.getCollection("platforms");

    Bson filters = Filters.and(//from w w w  . j  a  v  a2s.c o  m
            Filters.in("exp_group.id_platform", new String[] { "GPL13534", "GPL8490", "GPL21145" }),
            Filters.eq("exp_group.id_tissue_status", 1), Filters.ne("exp_group.id_topology", null));

    /*
    List<Document> list = collectionSamples
    .find(filters)
    .into(new ArrayList<Document>());
    */

    List<Document> list = collectionSamples.aggregate(Arrays.asList(Aggregates.match(filters),
            Aggregates.group("$exp_group.topology", Accumulators.sum("total", 1)),
            Aggregates.sort(Sorts.orderBy(Sorts.descending("total"))))).into(new ArrayList<Document>());

    for (int i = 0; i < list.size(); i++) {
        System.out.println((i + 1) + " " + list.get(i));
    }

    collectionPlatforms.find(Filters.regex("title", ".*ethyl.*")).forEach(printBlock);

    mongoClient.close();

}

From source file:module.script.QueryAvailableData.java

License:Open Source License

public QueryAvailableData() {

    // ===== Service =====
    FormatService formatService = new FormatService();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collectionSeries = db.getCollection("series");
    MongoCollection<Document> collectionSamples = db.getCollection("samples");

    // ===== Print block =====
    Block<Document> printBlock = new Block<Document>() {
        public void apply(final Document document) {
            System.out.println(document.toJson());
        }/* w  ww.  j a va2  s  . c om*/
    };

    // ===== Group by topology =====
    // db.getCollection('samples').aggregate({ $group: { "_id" : "$exp_group.topology", "total" : {$sum : 1} }}, {$sort : {total : -1}} )
    /*
    List<Document> listDocuments = collectionSamples.aggregate(
    Arrays.asList(
          Aggregates.group("$exp_group.topology", Accumulators.sum("total", 1)),
          Aggregates.sort(Sorts.orderBy(Sorts.descending("total")))
          ))
    .into(new ArrayList<Document>());
     */

    // ===== Group by sample =====
    /*
    List<Document> listSeries = collectionSeries
    .find()
    .projection(Projections.fields(Projections.include("title")))
    .sort(Sorts.ascending("_id"))
    .into(new ArrayList<Document>());
            
    for (Document doc : listSeries) {
            
       String idSeries = doc.getString("_id");
       Long nbSamples = collectionSamples.count((Filters.eq("series", idSeries)));
       doc.append("nbSamples", nbSamples);
    } 
    display(listSeries);
    */

    // === Export Geo for a list of idSeries ===

    // String[] listIdSeries = {"GSE11092","GSE13309", "GSE13159"};

    /*
    List<Document> docExpGroup = collectionSamples
    .find(Filters.in("series", listIdSeries))
    .projection(Projections.fields(Projections.include("exp_group"), Projections.excludeId()))
    .into(new ArrayList<Document>());
    // display(docExpGroup);
            
    List<String> header = formatService.extractHeader(docExpGroup, "exp_group");
    List<Object> data = formatService.extractData(docExpGroup, header, "exp_group");
    System.out.println(header);
    displayMatrix(data);
            
    */
    // List<Object> listObjects = formatService.convertHeterogeneousMongoDocuments(docExpGroup, "exp_group");
    // displayMatrix(listObjects);

    // List<Object> listObjects = formatService.convertHomogeneousMongoDocuments(listDocuments);

    // === Find series ===

    String[] listIdSamples = { "GSM80908", "GSM274639", "GSM274638", "GSM280213" };
    List<Document> listDocuments = collectionSamples
            .aggregate(Arrays.asList(Aggregates.match(Filters.in("_id", listIdSamples)),
                    Aggregates.group("$main_gse_number"),
                    Aggregates.sort(Sorts.orderBy(Sorts.ascending("main_gse_numbe")))))
            .into(new ArrayList<Document>());
    List<Object> listObjects = formatService.convertHomogeneousMongoDocuments(listDocuments);
    displayMatrix(listObjects);

    mongoClient.close();
}

From source file:mongodb_teste.Jmongo.java

private void jBProfSearchActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jBProfSearchActionPerformed
    // TODO add your handling code here:
    jshowProfSearch.setText("");
    MongoClient mongoClient = new MongoClient();
    MongoDatabase db = mongoClient.getDatabase("BDprject");
    Iterable<Document> iterable = db.getCollection("Professores")
            .aggregate(Arrays.asList((Aggregates.match(new Document("nome", jSearchName.getText()))),
                    Aggregates.unwind("$materias"),
                    Aggregates.group("$materias.nota", Accumulators.sum("total", 1)),
                    Aggregates.sort(new Document("total", -1))));
    Iterator<Document> it = iterable.iterator();
    while (it.hasNext()) {
        jshowProfSearch.append(it.next().toString() + "\n");
    }//  ww  w. j av a 2  s  . c om
    mongoClient.close();
}

From source file:mongodb_teste.MongoDB_teste.java

/**
 * @param args the command line arguments
 *//*ww w .j a v  a 2 s .  c  o m*/
public static void teste(String[] args) throws ParseException {
    // TODO code application logic here
    MongoClient mongoClient = new MongoClient();
    MongoDatabase db = mongoClient.getDatabase("BDprject");
    DateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'", Locale.ENGLISH);
    /*db.getCollection("Professores").insertOne(
    new Document("nome", "Lana")
        .append("email", "123@ufu.com")
        .append("materias", asList(
                new Document()
                        .append("nome", "EDG")
                        .append("nota", "B"),
                new Document()
                        .append("nome", "BD")
                        .append("nota", "B")))
        .append("telefone", "435341543"));*/
    Iterable<Document> iterable = db.getCollection("Professores")
            .aggregate(Arrays.asList((Aggregates.match(new Document("nome", "Miguel"))),
                    Aggregates.unwind("$materias"),
                    Aggregates.group("$materias.nota", Accumulators.sum("total", 1)),
                    Aggregates.sort(new Document("total", -1))));
    Iterator<Document> it = iterable.iterator();
    while (it.hasNext()) {
        System.out.println(it.next().get("_id"));
    }
}

From source file:org.apache.rya.mongodb.aggregation.AggregationPipelineQueryNode.java

License:Apache License

/**
 * Create a pipeline query node based on a StatementPattern.
 * @param collection The collection of triples to query.
 * @param baseSP The leaf node in the query tree.
 *///from w ww  . jav  a 2s. c  om
public AggregationPipelineQueryNode(final MongoCollection<Document> collection, final StatementPattern baseSP) {
    this.collection = Preconditions.checkNotNull(collection);
    Preconditions.checkNotNull(baseSP);
    this.varToOriginalName = HashBiMap.create();
    final StatementVarMapping mapping = new StatementVarMapping(baseSP, varToOriginalName);
    this.assuredBindingNames = new HashSet<>(mapping.varNames());
    this.bindingNames = new HashSet<>(mapping.varNames());
    this.pipeline = new LinkedList<>();
    this.pipeline.add(Aggregates.match(getMatchExpression(baseSP)));
    this.pipeline.add(Aggregates.project(mapping.getProjectExpression()));
}

From source file:org.apache.rya.mongodb.aggregation.AggregationPipelineQueryNode.java

License:Apache License

/**
 * Add a join with an individual {@link StatementPattern} to the pipeline.
 * @param sp The statement pattern to join with
 * @return true if the join was successfully added to the pipeline.
 *///from   w ww  . j  a  va  2s .  co  m
public boolean joinWith(final StatementPattern sp) {
    Preconditions.checkNotNull(sp);
    // 1. Determine shared variables and new variables
    final StatementVarMapping spMap = new StatementVarMapping(sp, varToOriginalName);
    final NavigableSet<String> sharedVars = new ConcurrentSkipListSet<>(spMap.varNames());
    sharedVars.retainAll(assuredBindingNames);
    // 2. Join on one shared variable
    final String joinKey = sharedVars.pollFirst();
    final String collectionName = collection.getNamespace().getCollectionName();
    Bson join;
    if (joinKey == null) {
        return false;
    } else {
        join = Aggregates.lookup(collectionName, HASHES + "." + joinKey, spMap.hashField(joinKey),
                JOINED_TRIPLE);
    }
    pipeline.add(join);
    // 3. Unwind the joined triples so each document represents a binding
    //   set (solution) from the base branch and a triple that may match.
    pipeline.add(Aggregates.unwind("$" + JOINED_TRIPLE));
    // 4. (Optional) If there are any shared variables that weren't used as
    //   the join key, project all existing fields plus a new field that
    //   tests the equality of those shared variables.
    final BasicDBObject matchOpts = getMatchExpression(sp, JOINED_TRIPLE);
    if (!sharedVars.isEmpty()) {
        final List<Bson> eqTests = new LinkedList<>();
        for (final String varName : sharedVars) {
            final String oldField = valueFieldExpr(varName);
            final String newField = joinFieldExpr(spMap.valueField(varName));
            final Bson eqTest = new Document("$eq", Arrays.asList(oldField, newField));
            eqTests.add(eqTest);
        }
        final Bson eqProjectOpts = Projections.fields(Projections.computed(FIELDS_MATCH, Filters.and(eqTests)),
                Projections.include(JOINED_TRIPLE, VALUES, HASHES, TYPES, LEVEL, TIMESTAMP));
        pipeline.add(Aggregates.project(eqProjectOpts));
        matchOpts.put(FIELDS_MATCH, true);
    }
    // 5. Filter for solutions whose triples match the joined statement
    //  pattern, and, if applicable, whose additional shared variables
    //  match the current solution.
    pipeline.add(Aggregates.match(matchOpts));
    // 6. Project the results to include variables from the new SP (with
    // appropriate renaming) and variables referenced only in the base
    // pipeline (with previous names).
    final Bson finalProjectOpts = new StatementVarMapping(sp, varToOriginalName)
            .getProjectExpression(assuredBindingNames, str -> joinFieldExpr(str));
    assuredBindingNames.addAll(spMap.varNames());
    bindingNames.addAll(spMap.varNames());
    pipeline.add(Aggregates.project(finalProjectOpts));
    return true;
}

From source file:org.apache.rya.mongodb.aggregation.AggregationPipelineQueryNode.java

License:Apache License

/**
 * Add a SPARQL filter to the pipeline, if possible. A filter eliminates
 * results that don't satisfy a given condition. Not all conditional
 * expressions are supported. If unsupported expressions are used in the
 * filter, the pipeline will remain unchanged and this method will return
 * false. Currently only supports binary {@link Compare} conditions among
 * variables and/or literals.//from  ww  w. j a v a  2s .c  o m
 * @param condition The filter condition
 * @return True if the filter was successfully converted into a pipeline
 *  step, false otherwise.
 */
public boolean filter(final ValueExpr condition) {
    if (condition instanceof Compare) {
        final Compare compare = (Compare) condition;
        final Compare.CompareOp operator = compare.getOperator();
        final Object leftArg = valueFieldExpr(compare.getLeftArg());
        final Object rightArg = valueFieldExpr(compare.getRightArg());
        if (leftArg == null || rightArg == null) {
            // unsupported value expression, can't convert filter
            return false;
        }
        final String opFunc;
        switch (operator) {
        case EQ:
            opFunc = "$eq";
            break;
        case NE:
            opFunc = "$ne";
            break;
        case LT:
            opFunc = "$lt";
            break;
        case LE:
            opFunc = "$le";
            break;
        case GT:
            opFunc = "$gt";
            break;
        case GE:
            opFunc = "$ge";
            break;
        default:
            // unrecognized comparison operator, can't convert filter
            return false;
        }
        final Document compareDoc = new Document(opFunc, Arrays.asList(leftArg, rightArg));
        pipeline.add(Aggregates.project(Projections.fields(Projections.computed("FILTER", compareDoc),
                Projections.include(VALUES, HASHES, TYPES, LEVEL, TIMESTAMP))));
        pipeline.add(Aggregates.match(new Document("FILTER", true)));
        pipeline.add(Aggregates
                .project(Projections.fields(Projections.include(VALUES, HASHES, TYPES, LEVEL, TIMESTAMP))));
        return true;
    }
    return false;
}