List of usage examples for com.mongodb.client.model Filters eq
public static <TItem> Bson eq(final String fieldName, @Nullable final TItem value)
From source file:org.apache.rya.indexing.entity.storage.mongo.MongoEntityStorage.java
License:Apache License
private static Stream<Bson> makePropertyFilters(final RyaURI typeId, final Property property) { final String propertyName = property.getName().getData(); final String encodedPropertyName = MongoDbSafeKey.encodeKey(propertyName); // Must match the property's data type. final String dataTypePath = Joiner.on(".").join(new String[] { EntityDocumentConverter.PROPERTIES, typeId.getData(), encodedPropertyName, RyaTypeDocumentConverter.DATA_TYPE }); final String propertyDataType = property.getValue().getDataType().stringValue(); final Bson dataTypeFilter = Filters.eq(dataTypePath, propertyDataType); // Must match the property's value. final String valuePath = Joiner.on(".").join(new String[] { EntityDocumentConverter.PROPERTIES, typeId.getData(), encodedPropertyName, RyaTypeDocumentConverter.VALUE }); final String propertyValue = property.getValue().getData(); final Bson valueFilter = Filters.eq(valuePath, propertyValue); return Stream.of(dataTypeFilter, valueFilter); }
From source file:org.apache.rya.indexing.entity.storage.mongo.MongoTypeStorage.java
License:Apache License
@Override public ConvertingCursor<Type> search(final RyaURI propertyName) throws TypeStorageException { requireNonNull(propertyName);/* ww w. j a va 2s . c o m*/ try { // Create a Filter that finds Types who have the provided property names. final Bson byPropertyName = Filters.eq(TypeDocumentConverter.PROPERTY_NAMES, propertyName.getData()); final MongoCursor<Document> cursor = mongo.getDatabase(ryaInstanceName).getCollection(COLLECTION_NAME) .find(byPropertyName).iterator(); return new ConvertingCursor<Type>(document -> { try { return TYPE_CONVERTER.fromDocument(document); } catch (final Exception e) { throw new RuntimeException("Could not convert the Document '" + document + "' into a Type.", e); } }, cursor); } catch (final MongoException e) { throw new TypeStorageException( "Could not fetch Types that include the property '" + propertyName.getData() + "'.", e); } }
From source file:org.apache.rya.indexing.entity.storage.mongo.MongoTypeStorage.java
License:Apache License
private static Bson makeIdFilter(final RyaURI typeId) { return Filters.eq(TypeDocumentConverter.ID, typeId.getData()); }
From source file:org.apache.rya.indexing.geotemporal.mongo.MongoEventStorage.java
License:Apache License
private static Bson makeSubjectFilter(final RyaURI subject) { return Filters.eq(EventDocumentConverter.SUBJECT, subject.getData()); }
From source file:org.apache.rya.mongodb.aggregation.AggregationPipelineQueryNode.java
License:Apache License
/** * Given that the current state of the pipeline produces data that can be * interpreted as triples, add a project step to map each result from the * intermediate result structure to a structure that can be stored in the * triple store. Does not modify the internal pipeline, which will still * produce intermediate results suitable for query evaluation. * @param timestamp Attach this timestamp to the resulting triples. * @param requireNew If true, add an additional step to check constructed * triples against existing triples and only include new ones in the * result. Adds a potentially expensive $lookup step. * @throws IllegalStateException if the results produced by the current * pipeline do not have variable names allowing them to be interpreted as * triples (i.e. "subject", "predicate", and "object"). *//*from w ww .j a v a2 s. co m*/ public List<Bson> getTriplePipeline(final long timestamp, final boolean requireNew) { if (!assuredBindingNames.contains(SUBJECT) || !assuredBindingNames.contains(PREDICATE) || !assuredBindingNames.contains(OBJECT)) { throw new IllegalStateException("Current pipeline does not produce " + "records that can be converted into triples.\n" + "Required variable names: <" + SUBJECT + ", " + PREDICATE + ", " + OBJECT + ">\nCurrent variable names: " + assuredBindingNames); } final List<Bson> triplePipeline = new LinkedList<>(pipeline); final List<Bson> fields = new LinkedList<>(); fields.add(Projections.computed(SUBJECT, valueFieldExpr(SUBJECT))); fields.add(Projections.computed(SUBJECT_HASH, hashFieldExpr(SUBJECT))); fields.add(Projections.computed(PREDICATE, valueFieldExpr(PREDICATE))); fields.add(Projections.computed(PREDICATE_HASH, hashFieldExpr(PREDICATE))); fields.add(Projections.computed(OBJECT, valueFieldExpr(OBJECT))); fields.add(Projections.computed(OBJECT_HASH, hashFieldExpr(OBJECT))); fields.add(Projections.computed(OBJECT_TYPE, ConditionalOperators.ifNull(typeFieldExpr(OBJECT), DEFAULT_TYPE))); fields.add(Projections.computed(OBJECT_LANGUAGE, hashFieldExpr(OBJECT))); fields.add(Projections.computed(CONTEXT, DEFAULT_CONTEXT)); fields.add(Projections.computed(STATEMENT_METADATA, DEFAULT_METADATA)); fields.add(DEFAULT_DV); fields.add(Projections.computed(TIMESTAMP, new Document("$literal", timestamp))); fields.add(Projections.computed(LEVEL, new Document("$add", Arrays.asList("$" + LEVEL, 1)))); triplePipeline.add(Aggregates.project(Projections.fields(fields))); if (requireNew) { // Prune any triples that already exist in the data store final String collectionName = collection.getNamespace().getCollectionName(); final Bson includeAll = Projections.include(SUBJECT, SUBJECT_HASH, PREDICATE, PREDICATE_HASH, OBJECT, OBJECT_HASH, OBJECT_TYPE, OBJECT_LANGUAGE, CONTEXT, STATEMENT_METADATA, DOCUMENT_VISIBILITY, TIMESTAMP, LEVEL); final List<Bson> eqTests = new LinkedList<>(); eqTests.add(new Document("$eq", Arrays.asList("$$this." + PREDICATE_HASH, "$" + PREDICATE_HASH))); eqTests.add(new Document("$eq", Arrays.asList("$$this." + OBJECT_HASH, "$" + OBJECT_HASH))); final Bson redundantFilter = new Document("$filter", new Document("input", "$" + JOINED_TRIPLE) .append("as", "this").append("cond", new Document("$and", eqTests))); triplePipeline.add(Aggregates.lookup(collectionName, SUBJECT_HASH, SUBJECT_HASH, JOINED_TRIPLE)); final String numRedundant = "REDUNDANT"; triplePipeline.add(Aggregates.project(Projections.fields(includeAll, Projections.computed(numRedundant, new Document("$size", redundantFilter))))); triplePipeline.add(Aggregates.match(Filters.eq(numRedundant, 0))); triplePipeline.add(Aggregates.project(Projections.fields(includeAll))); } return triplePipeline; }
From source file:org.apache.sling.nosql.mongodb.resourceprovider.impl.MongoDBNoSqlAdapter.java
License:Apache License
@Override public NoSqlData get(String path) { Document envelope = collection.find(Filters.eq(PN_PATH, path)).first(); if (envelope == null) { return null; } else {/*from w ww.j a v a 2 s . c om*/ return new NoSqlData(path, envelope.get(PN_DATA, Document.class), MultiValueMode.LISTS); } }
From source file:org.apache.sling.nosql.mongodb.resourceprovider.impl.MongoDBNoSqlAdapter.java
License:Apache License
@Override public Iterator<NoSqlData> getChildren(String parentPath) { List<NoSqlData> children = new ArrayList<>(); FindIterable<Document> result = collection.find(Filters.eq(PN_PARENT_PATH, parentPath)); try (MongoCursor<Document> envelopes = result.iterator()) { while (envelopes.hasNext()) { Document envelope = envelopes.next(); String path = envelope.get(PN_PATH, String.class); Document data = envelope.get(PN_DATA, Document.class); children.add(new NoSqlData(path, data, MultiValueMode.LISTS)); }/*from www . j a v a 2 s . c om*/ } return children.iterator(); }
From source file:org.apache.sling.nosql.mongodb.resourceprovider.impl.MongoDBNoSqlAdapter.java
License:Apache License
@Override public boolean store(NoSqlData data) { Document envelope = new Document(); envelope.put(PN_PATH, data.getPath()); envelope.put(PN_DATA, new Document(data.getProperties(MultiValueMode.LISTS))); // for list-children query efficiency store parent path as well String parentPath = ResourceUtil.getParent(data.getPath()); if (parentPath != null) { envelope.put(PN_PARENT_PATH, parentPath); }//from ww w. j a va2s . com UpdateResult result = collection.replaceOne(Filters.eq(PN_PATH, data.getPath()), envelope, new UpdateOptions().upsert(true)); // return true if a new entry was inserted, false if an existing was replaced return (result.getMatchedCount() == 0); }
From source file:org.apache.sling.nosql.mongodb.resourceprovider.impl.MongoDBNoSqlAdapter.java
License:Apache License
@Override public void checkConnection() throws LoginException { // the query is not relevant, just the successful round-trip try {//from w ww . jav a 2s . co m collection.find(Filters.eq(PN_PATH, "/")).first(); } catch (MongoException e) { throw new LoginException(e); } }
From source file:org.apache.storm.mongodb.common.SimpleQueryFilterCreator.java
License:Apache License
@Override public Bson createFilter(ITuple tuple) { return Filters.eq(field, tuple.getValueByField(field)); }