Example usage for com.mongodb.client.model Projections fields

List of usage examples for com.mongodb.client.model Projections fields

Introduction

In this page you can find the example usage for com.mongodb.client.model Projections fields.

Prototype

public static Bson fields(final List<? extends Bson> projections) 

Source Link

Document

Creates a projection that combines the list of projections into a single one.

Usage

From source file:io.lumeer.storage.mongodb.MongoDbStorage.java

License:Open Source License

@Override
public List<DataDocument> search(String collectionName, DataFilter filter, final DataSort sort,
        List<String> attributes, final int skip, int limit) {
    MongoCollection<Document> collection = database.getCollection(collectionName);
    FindIterable<Document> documents = filter != null ? collection.find(filter.<Bson>get()) : collection.find();
    if (sort != null) {
        documents = documents.sort(sort.<Bson>get());
    }// w w  w .j  a  v a 2 s  . c o  m
    if (attributes != null && !attributes.isEmpty()) {
        documents.projection(Projections.fields(Projections.include(attributes)));
    }
    if (skip > 0) {
        documents = documents.skip(skip);
    }
    if (limit > 0) {
        documents = documents.limit(limit);
    }

    return MongoUtils.convertIterableToList(documents);
}

From source file:mongodb.clients.percunia.mongo.Criteria.java

License:Apache License

public Bson getProjections() {
    return Projections.fields(projections);
}

From source file:org.apache.rya.mongodb.aggregation.AggregationPipelineQueryNode.java

License:Apache License

/**
 * Given that the current state of the pipeline produces data that can be
 * interpreted as triples, add a project step to map each result from the
 * intermediate result structure to a structure that can be stored in the
 * triple store. Does not modify the internal pipeline, which will still
 * produce intermediate results suitable for query evaluation.
 * @param timestamp Attach this timestamp to the resulting triples.
 * @param requireNew If true, add an additional step to check constructed
 *  triples against existing triples and only include new ones in the
 *  result. Adds a potentially expensive $lookup step.
 * @throws IllegalStateException if the results produced by the current
 *  pipeline do not have variable names allowing them to be interpreted as
 *  triples (i.e. "subject", "predicate", and "object").
 *///w w w  . java  2s  .  c  om
public List<Bson> getTriplePipeline(final long timestamp, final boolean requireNew) {
    if (!assuredBindingNames.contains(SUBJECT) || !assuredBindingNames.contains(PREDICATE)
            || !assuredBindingNames.contains(OBJECT)) {
        throw new IllegalStateException("Current pipeline does not produce "
                + "records that can be converted into triples.\n" + "Required variable names: <" + SUBJECT
                + ", " + PREDICATE + ", " + OBJECT + ">\nCurrent variable names: " + assuredBindingNames);
    }
    final List<Bson> triplePipeline = new LinkedList<>(pipeline);
    final List<Bson> fields = new LinkedList<>();
    fields.add(Projections.computed(SUBJECT, valueFieldExpr(SUBJECT)));
    fields.add(Projections.computed(SUBJECT_HASH, hashFieldExpr(SUBJECT)));
    fields.add(Projections.computed(PREDICATE, valueFieldExpr(PREDICATE)));
    fields.add(Projections.computed(PREDICATE_HASH, hashFieldExpr(PREDICATE)));
    fields.add(Projections.computed(OBJECT, valueFieldExpr(OBJECT)));
    fields.add(Projections.computed(OBJECT_HASH, hashFieldExpr(OBJECT)));
    fields.add(Projections.computed(OBJECT_TYPE,
            ConditionalOperators.ifNull(typeFieldExpr(OBJECT), DEFAULT_TYPE)));
    fields.add(Projections.computed(OBJECT_LANGUAGE, hashFieldExpr(OBJECT)));
    fields.add(Projections.computed(CONTEXT, DEFAULT_CONTEXT));
    fields.add(Projections.computed(STATEMENT_METADATA, DEFAULT_METADATA));
    fields.add(DEFAULT_DV);
    fields.add(Projections.computed(TIMESTAMP, new Document("$literal", timestamp)));
    fields.add(Projections.computed(LEVEL, new Document("$add", Arrays.asList("$" + LEVEL, 1))));
    triplePipeline.add(Aggregates.project(Projections.fields(fields)));
    if (requireNew) {
        // Prune any triples that already exist in the data store
        final String collectionName = collection.getNamespace().getCollectionName();
        final Bson includeAll = Projections.include(SUBJECT, SUBJECT_HASH, PREDICATE, PREDICATE_HASH, OBJECT,
                OBJECT_HASH, OBJECT_TYPE, OBJECT_LANGUAGE, CONTEXT, STATEMENT_METADATA, DOCUMENT_VISIBILITY,
                TIMESTAMP, LEVEL);
        final List<Bson> eqTests = new LinkedList<>();
        eqTests.add(new Document("$eq", Arrays.asList("$$this." + PREDICATE_HASH, "$" + PREDICATE_HASH)));
        eqTests.add(new Document("$eq", Arrays.asList("$$this." + OBJECT_HASH, "$" + OBJECT_HASH)));
        final Bson redundantFilter = new Document("$filter", new Document("input", "$" + JOINED_TRIPLE)
                .append("as", "this").append("cond", new Document("$and", eqTests)));
        triplePipeline.add(Aggregates.lookup(collectionName, SUBJECT_HASH, SUBJECT_HASH, JOINED_TRIPLE));
        final String numRedundant = "REDUNDANT";
        triplePipeline.add(Aggregates.project(Projections.fields(includeAll,
                Projections.computed(numRedundant, new Document("$size", redundantFilter)))));
        triplePipeline.add(Aggregates.match(Filters.eq(numRedundant, 0)));
        triplePipeline.add(Aggregates.project(Projections.fields(includeAll)));
    }
    return triplePipeline;
}

From source file:org.opencb.commons.datastore.mongodb.MongoDBQueryUtils.java

License:Apache License

protected static Bson getProjection(Bson projection, QueryOptions options) {
    Bson projectionResult = null;/*from  w ww .  j  a v a2 s.  c  om*/
    List<Bson> projections = new ArrayList<>();

    // It is too risky to merge projections, if projection alrady exists we return it as it is, otherwise we create a new one.
    if (projection != null) {
        //            projections.add(projection);
        return projection;
    }

    if (options != null) {
        // Select which fields are excluded and included in the query
        // Read and process 'include'/'exclude'/'elemMatch' field from 'options' object

        Bson include = null;
        if (options.containsKey(QueryOptions.INCLUDE)) {
            Object includeObject = options.get(QueryOptions.INCLUDE);
            if (includeObject != null) {
                if (includeObject instanceof Bson) {
                    include = (Bson) includeObject;
                } else {
                    List<String> includeStringList = options.getAsStringList(QueryOptions.INCLUDE, ",");
                    if (includeStringList != null && includeStringList.size() > 0) {
                        include = Projections.include(includeStringList);
                    }
                }
            }
        }

        Bson exclude = null;
        boolean excludeId = false;
        if (options.containsKey(QueryOptions.EXCLUDE)) {
            Object excludeObject = options.get(QueryOptions.EXCLUDE);
            if (excludeObject != null) {
                if (excludeObject instanceof Bson) {
                    exclude = (Bson) excludeObject;
                } else {
                    List<String> excludeStringList = options.getAsStringList(QueryOptions.EXCLUDE, ",");
                    if (excludeStringList != null && excludeStringList.size() > 0) {
                        exclude = Projections.exclude(excludeStringList);
                        excludeId = excludeStringList.contains("_id");
                    }
                }
            }
        }

        // If both include and exclude exist we only add include
        if (include != null) {
            projections.add(include);
            // MongoDB allows to exclude _id when include is present
            if (excludeId) {
                projections.add(Projections.excludeId());
            }
        } else {
            if (exclude != null) {
                projections.add(exclude);
            }
        }

        if (options.containsKey(MongoDBCollection.ELEM_MATCH)) {
            Object elemMatch = options.get(MongoDBCollection.ELEM_MATCH);
            if (elemMatch != null && elemMatch instanceof Bson) {
                projections.add((Bson) elemMatch);
            }
        }

        //            List<String> includeStringList = options.getAsStringList(MongoDBCollection.INCLUDE, ",");
        //            if (includeStringList != null && includeStringList.size() > 0) {
        //                projections.add(Projections.include(includeStringList));
        ////                for (Object field : includeStringList) {
        ////                    projection.put(field.toString(), 1);
        ////                }
        //            } else {
        //                List<String> excludeStringList = options.getAsStringList(MongoDBCollection.EXCLUDE, ",");
        //                if (excludeStringList != null && excludeStringList.size() > 0) {
        //                    projections.add(Projections.exclude(excludeStringList));
        ////                    for (Object field : excludeStringList) {
        ////                        projection.put(field.toString(), 0);
        ////                    }
        //                }
        //            }
    }

    if (projections.size() > 0) {
        projectionResult = Projections.fields(projections);
    }

    return projectionResult;
}

From source file:org.opencb.opencga.catalog.db.mongodb.MongoDBAdaptor.java

License:Apache License

protected QueryResult groupBy(MongoDBCollection collection, Bson query, List<String> groupByField,
        String idField, QueryOptions options) {
    if (groupByField == null || groupByField.isEmpty()) {
        return new QueryResult();
    }//from   ww w .java2s  . c  o m

    List<String> groupByFields = new ArrayList<>(groupByField);
    //        if (groupByField.size() == 1) {
    //            // if only one field then we call to simple groupBy
    //            return groupBy(collection, query, groupByField.get(0), idField, options);
    //        } else {
    Bson match = Aggregates.match(query);

    // add all group-by fields to the projection together with the aggregation field name
    List<String> includeGroupByFields = new ArrayList<>(groupByField);
    includeGroupByFields.add(idField);
    List<Bson> projections = new ArrayList<>();
    addDateProjection(projections, includeGroupByFields, groupByFields);
    projections.add(Projections.include(includeGroupByFields));
    Bson project = Aggregates.project(Projections.fields(projections));
    //            Bson project = Aggregates.project(Projections.include(groupByFields));

    // _id document creation to have the multiple id
    Document id = new Document();
    for (String s : groupByFields) {
        id.append(s, "$" + s);
    }
    Bson group;
    if (options.getBoolean("count", false)) {
        group = Aggregates.group(id, Accumulators.sum("count", 1));
    } else {
        group = Aggregates.group(id, Accumulators.addToSet("features", "$" + idField));
    }
    return collection.aggregate(Arrays.asList(match, project, group), options);
    //        }
}