Example usage for com.mongodb.client.model Filters gt

List of usage examples for com.mongodb.client.model Filters gt

Introduction

In this page you can find the example usage for com.mongodb.client.model Filters gt.

Prototype

public static <TItem> Bson gt(final String fieldName, final TItem value) 

Source Link

Document

Creates a filter that matches all documents where the value of the given field is greater than the specified value.

Usage

From source file:com.avbravo.ejbjmoordb.mongodb.repository.Repository.java

/**
 * Devuelve true si no hay registros con la condicion fechay hora de inicio y fin  y el filtro que se pasa como parametro
 *
 * @param filter// w  w  w .j av  a2s  .c  om
 * @param namefieldOfStart
 * @param valueStart
 * @param namefieldOfEnd
 * @param valueEnd
 * @return
 */
public Boolean isAvailableBetweenDateHour(Bson filter, String namefieldOfStart, Date valueStart,
        String namefieldOfEnd, Date valueEnd) {
    try {
        //Vehiculos en viajes

        Integer count = count();
        if (count.equals(0)) {
            return true;
        }
        //inicio

        Bson b = Filters.and(Filters.gt(namefieldOfStart, valueStart), Filters.lt(namefieldOfStart, valueEnd),
                Filters.gt(namefieldOfEnd, valueStart), Filters.gt(namefieldOfEnd, valueEnd));

        Bson c_e_f_g_h_l = Filters.or(Filters.eq(namefieldOfStart, valueStart),
                Filters.eq(namefieldOfStart, valueEnd), Filters.eq(namefieldOfEnd, valueStart),
                Filters.eq(namefieldOfEnd, valueEnd));

        Bson j = Filters.and(Filters.lt(namefieldOfStart, valueStart), Filters.lt(namefieldOfStart, valueEnd),
                Filters.gt(namefieldOfEnd, valueStart), Filters.eq(namefieldOfEnd, valueEnd));

        Bson d = Filters.and(Filters.gt(namefieldOfStart, valueStart), Filters.lt(namefieldOfStart, valueEnd),
                Filters.gt(namefieldOfEnd, valueStart), Filters.lt(namefieldOfEnd, valueEnd));
        Bson i = Filters.and(Filters.lt(namefieldOfStart, valueStart), Filters.lt(namefieldOfStart, valueEnd),
                Filters.gt(namefieldOfEnd, valueStart), Filters.gt(namefieldOfEnd, valueEnd));
        Bson k = Filters.and(Filters.lt(namefieldOfStart, valueStart), Filters.lt(namefieldOfStart, valueEnd),
                Filters.gt(namefieldOfEnd, valueStart), Filters.lt(namefieldOfEnd, valueEnd));

        Bson _filter = Filters.and(filter, or(b, c_e_f_g_h_l, d, i, j, k));

        List<T> list = findBy(_filter);

        if (list.isEmpty()) {
            return true;
        }

    } catch (Exception e) {
        Logger.getLogger(Repository.class.getName() + "isAvailableBetweenDate()").log(Level.SEVERE, null, e);
        exception = new Exception("isAvailableBetweenDate() ", e);

    }
    return false;
}

From source file:com.avbravo.ejbjmoordb.mongodb.repository.Repository.java

/**
 * Devuelve una lista de los elementos que estan en ese rango de fechas y que cumplan la condicion del filtro que se
 * pasa como parametro/*w w w.  j  a v  a 2s  . c o m*/
 *
 * @param filter
 * @param namefieldOfStart
 * @param valueStart
 * @param namefieldOfEnd
 * @param valueEnd
 * @return Devuelve una lista de los elementos que estan en ese rango de fechas y que cumplan la condicion del filtro que se
 * pasa como parametro
 */
public List<T> notAvailableBetweenDateHour(Bson filter, String namefieldOfStart, Date valueStart,
        String namefieldOfEnd, Date valueEnd) {
    try {
        //Vehiculos en viajes
        list = new ArrayList<>();
        Integer count = count();
        if (count.equals(0)) {
            return list;
        }
        //inicio

        Bson b = Filters.and(Filters.gt(namefieldOfStart, valueStart), Filters.lt(namefieldOfStart, valueEnd),
                Filters.gt(namefieldOfEnd, valueStart), Filters.gt(namefieldOfEnd, valueEnd));

        Bson c_e_f_g_h_l = Filters.or(Filters.eq(namefieldOfStart, valueStart),
                Filters.eq(namefieldOfStart, valueEnd), Filters.eq(namefieldOfEnd, valueStart),
                Filters.eq(namefieldOfEnd, valueEnd));

        Bson j = Filters.and(Filters.lt(namefieldOfStart, valueStart), Filters.lt(namefieldOfStart, valueEnd),
                Filters.gt(namefieldOfEnd, valueStart), Filters.eq(namefieldOfEnd, valueEnd));

        Bson d = Filters.and(Filters.gt(namefieldOfStart, valueStart), Filters.lt(namefieldOfStart, valueEnd),
                Filters.gt(namefieldOfEnd, valueStart), Filters.lt(namefieldOfEnd, valueEnd));
        Bson i = Filters.and(Filters.lt(namefieldOfStart, valueStart), Filters.lt(namefieldOfStart, valueEnd),
                Filters.gt(namefieldOfEnd, valueStart), Filters.gt(namefieldOfEnd, valueEnd));
        Bson k = Filters.and(Filters.lt(namefieldOfStart, valueStart), Filters.lt(namefieldOfStart, valueEnd),
                Filters.gt(namefieldOfEnd, valueStart), Filters.lt(namefieldOfEnd, valueEnd));

        Bson _filter = Filters.and(filter, or(b, c_e_f_g_h_l, d, i, j, k));

        list = findBy(_filter);

    } catch (Exception e) {
        Logger.getLogger(Repository.class.getName() + "isAvailableBetweenDate()").log(Level.SEVERE, null, e);
        exception = new Exception("isAvailableBetweenDate() ", e);

    }
    return list;
}

From source file:com.egopulse.querydsl.mongodb.MongodbSerializer.java

License:Apache License

@SuppressWarnings("unchecked")
@Override/*from w ww  .  j ava 2 s.com*/
public Object visit(Operation<?> expr, Void context) {
    Operator op = expr.getOperator();

    /**
     * user.firstName.eq("test")
     * user.addresses.size().eq(20)
     */
    if (op == Ops.EQ) {
        return handleEqOperation(expr);
    }

    /**
     * user.firstName.ne("test")
     */
    else if (op == Ops.NE) {
        return Filters.ne(asDBKey(expr, 0), asDBValue(expr, 1));

    }

    /**
     * user.firstName.isEmpty()
      */
    else if (op == Ops.STRING_IS_EMPTY) {
        return Filters.eq(asDBKey(expr, 0), new BsonString(""));
    }

    /**
     * user.firstName.eq("test").and(user.lastName.eq("blah"))
     */
    else if (op == Ops.AND) {
        Bson leftOperation = (Bson) handle(expr.getArg(0));
        Bson rightOperation = (Bson) handle(expr.getArg(1));

        return Filters.and(leftOperation, rightOperation);
    }

    /**
     * user.firstName.not[Operation]
     */
    else if (op == Ops.NOT) {
        //Handle the not's child
        Operation<?> subOperation = (Operation<?>) expr.getArg(0);
        Operator subOp = subOperation.getOperator();
        if (subOp == Ops.IN) {
            return visit(ExpressionUtils.operation(Boolean.class, Ops.NOT_IN, subOperation.getArg(0),
                    subOperation.getArg(1)), context);
        } else {
            Bson arg = (Bson) handle(expr.getArg(0));
            return Filters.not(arg);
        }
    }

    /**
     * user.firstName.eq("test").or(user.firstName.eq("else"))
     */
    else if (op == Ops.OR) {
        Bson leftOperation = (Bson) handle(expr.getArg(0));
        Bson rightOperation = (Bson) handle(expr.getArg(1));
        return Filters.or(leftOperation, rightOperation);
    }

    /**
     * Text matching operations
     */
    else if (op == Ops.STARTS_WITH) {
        return Filters.regex(asDBKey(expr, 0), Pattern.compile("^" + regexValue(expr, 1)));

    }

    else if (op == Ops.STARTS_WITH_IC) {
        return Filters.regex(asDBKey(expr, 0),
                Pattern.compile("^" + regexValue(expr, 1), Pattern.CASE_INSENSITIVE));

    }

    else if (op == Ops.ENDS_WITH) {
        return Filters.regex(asDBKey(expr, 0), Pattern.compile(regexValue(expr, 1) + "$"));

    }

    else if (op == Ops.ENDS_WITH_IC) {
        return Filters.regex(asDBKey(expr, 0),
                Pattern.compile(regexValue(expr, 1) + "$", Pattern.CASE_INSENSITIVE));

    }

    else if (op == Ops.EQ_IGNORE_CASE) {
        return Filters.regex(asDBKey(expr, 0),
                Pattern.compile("^" + regexValue(expr, 1) + "$", Pattern.CASE_INSENSITIVE));

    }

    else if (op == Ops.STRING_CONTAINS) {
        return Filters.regex(asDBKey(expr, 0), Pattern.compile(".*" + regexValue(expr, 1) + ".*"));

    }

    else if (op == Ops.STRING_CONTAINS_IC) {
        return Filters.regex(asDBKey(expr, 0),
                Pattern.compile(".*" + regexValue(expr, 1) + ".*", Pattern.CASE_INSENSITIVE));

    }

    else if (op == Ops.MATCHES) {
        return Filters.regex(asDBKey(expr, 0), Pattern.compile(asDBValue(expr, 1).toString()));
    }

    else if (op == Ops.MATCHES_IC) {
        return Filters.regex(asDBKey(expr, 0),
                Pattern.compile(asDBValue(expr, 1).toString(), Pattern.CASE_INSENSITIVE));

    }

    else if (op == Ops.LIKE) {
        String regex = ExpressionUtils.likeToRegex((Expression) expr.getArg(1)).toString();
        return Filters.regex(asDBKey(expr, 0), Pattern.compile(regex));

    }

    else if (op == Ops.BETWEEN) {
        return Filters.and(Filters.gte(asDBKey(expr, 0), asDBValue(expr, 1)),
                Filters.lte(asDBKey(expr, 0), asDBValue(expr, 2)));
    }

    else if (op == Ops.IN) {
        int constIndex = 0;
        int exprIndex = 1;
        if (expr.getArg(1) instanceof Constant<?>) {
            constIndex = 1;
            exprIndex = 0;
        }
        if (Collection.class.isAssignableFrom(expr.getArg(constIndex).getType())) {
            @SuppressWarnings("unchecked") //guarded by previous check
            Collection<?> values = ((Constant<? extends Collection<?>>) expr.getArg(constIndex)).getConstant();
            return Filters.in(asDBKey(expr, exprIndex), values);
        }

        /**
         * user.firstName.in(user.lastName)
         */

        else {
            throw new UnsupportedOperationException();
            //                Path<?> path = (Path<?>) expr.getArg(exprIndex);
            //                Constant<?> constant = (Constant<?>) expr.getArg(constIndex);
            //                return asDBObject(asDBKey(expr, exprIndex), convert(path, constant));
        }

    }

    else if (op == Ops.NOT_IN) {
        int constIndex = 0;
        int exprIndex = 1;
        if (expr.getArg(1) instanceof Constant<?>) {
            constIndex = 1;
            exprIndex = 0;
        }
        if (Collection.class.isAssignableFrom(expr.getArg(constIndex).getType())) {
            @SuppressWarnings("unchecked") //guarded by previous check
            Collection<?> values = ((Constant<? extends Collection<?>>) expr.getArg(constIndex)).getConstant();
            return Filters.nin(asDBKey(expr, exprIndex), values);
        } else {
            throw new UnsupportedOperationException();
            //                Path<?> path = (Path<?>) expr.getArg(exprIndex);
            //                Constant<?> constant = (Constant<?>) expr.getArg(constIndex);
            //                return asDBObject(asDBKey(expr, exprIndex), asDBObject("$ne", convert(path, constant)));
        }

    }

    else if (op == Ops.COL_IS_EMPTY) {
        String field = asDBKey(expr, 0);
        return Filters.or(Filters.exists(field, false), Filters.size(field, 0));
    }

    else if (op == Ops.LT) {
        return Filters.lt(asDBKey(expr, 0), asDBValue(expr, 1));

    } else if (op == Ops.GT) {
        return Filters.gt(asDBKey(expr, 0), asDBValue(expr, 1));

    } else if (op == Ops.LOE) {
        return Filters.lte(asDBKey(expr, 0), asDBValue(expr, 1));

    } else if (op == Ops.GOE) {
        return Filters.gte(asDBKey(expr, 0), asDBValue(expr, 1));

    } else if (op == Ops.IS_NULL) {
        return Filters.exists(asDBKey(expr, 0), false);

    } else if (op == Ops.IS_NOT_NULL) {
        return Filters.exists(asDBKey(expr, 0), true);

    }

    else if (op == Ops.CONTAINS_KEY) {
        Path<?> path = (Path<?>) expr.getArg(0);
        Expression<?> key = expr.getArg(1);
        return Filters.exists(visit(path, context) + "." + key.toString(), true);

    }
    //        else if (op == MongodbOps.NEAR) {
    //            return asDBObject(asDBKey(expr, 0), asDBObject("$near", asDBValue(expr, 1)));
    //
    //        } else if (op == MongodbOps.NEAR_SPHERE) {
    //            return asDBObject(asDBKey(expr, 0), asDBObject("$nearSphere", asDBValue(expr, 1)));
    //
    //        }
    //        else if (op == MongodbOps.ELEM_MATCH) {
    //            return Filters.elemMatch(asDBKey(expr, 0), asDBValue(expr, 1));
    //        }

    throw new UnsupportedOperationException("Illegal operation " + expr);
}

From source file:com.erudika.para.persistence.MongoDBDAO.java

License:Apache License

@Override
public <P extends ParaObject> List<P> readPage(String appid, Pager pager) {
    LinkedList<P> results = new LinkedList<P>();
    if (StringUtils.isBlank(appid)) {
        return results;
    }//from  ww w  .  j a va2  s  .c o  m
    if (pager == null) {
        pager = new Pager();
    }
    try {
        String lastKey = pager.getLastKey();
        MongoCursor<Document> cursor;
        Bson filter = Filters.gt(_OBJECT_ID, lastKey);
        if (lastKey == null) {
            cursor = getTable(appid).find().batchSize(pager.getLimit()).limit(pager.getLimit()).iterator();
        } else {
            cursor = getTable(appid).find(filter).batchSize(pager.getLimit()).limit(pager.getLimit())
                    .iterator();
        }
        while (cursor.hasNext()) {
            Map<String, Object> row = documentToMap(cursor.next());
            P obj = fromRow(row);
            if (obj != null) {
                results.add(obj);
                pager.setLastKey((String) row.get(_OBJECT_ID));
            }
        }
        if (!results.isEmpty()) {
            pager.setCount(pager.getCount() + results.size());
        }
    } catch (Exception e) {
        logger.error(null, e);
    }
    logger.debug("readPage() page: {}, results:", pager.getPage(), results.size());
    return results;
}

From source file:com.px100systems.data.plugin.storage.mongo.FilterQueryBuilder.java

License:Open Source License

@Override
public Bson convert(gt predicate) {
    return Filters.gt(predicate.getMember(), predicate.getValue());
}

From source file:com.streamsets.pipeline.stage.origin.mongodb.MongoDBSource.java

License:Apache License

private void prepareCursor(int maxBatchSize, String offsetField, String lastSourceOffset) {
    createMongoClient();//  w w w  .j a  v  a  2s  .co m

    ObjectId offset;
    if (null == cursor) {
        if (null == lastSourceOffset || lastSourceOffset.isEmpty()) {
            offset = initialObjectId;
        } else {
            offset = new ObjectId(lastSourceOffset);
        }
        LOG.debug("Getting new cursor with params: {} {} {}", maxBatchSize, offsetField, lastSourceOffset);
        if (isCapped) {
            cursor = mongoCollection.find().filter(Filters.gt(offsetField, offset))
                    .cursorType(CursorType.TailableAwait).batchSize(maxBatchSize).iterator();
        } else {
            cursor = mongoCollection.find().filter(Filters.gt(offsetField, offset))
                    .sort(Sorts.ascending(offsetField)).cursorType(CursorType.NonTailable)
                    .batchSize(maxBatchSize).iterator();
        }
    }
}

From source file:com.streamsets.pipeline.stage.origin.mongodb.oplog.MongoDBOplogSource.java

License:Apache License

private void prepareCursor(int timestampSeconds, int ordinal, List<OplogOpType> filterOplogTypes,
        int batchSize) {
    LOG.debug("Getting new cursor with offset - TimeStampInSeconds:'{}', Ordinal : '{}' and Batch Size : '{}'",
            timestampSeconds, ordinal, batchSize);
    FindIterable<Document> mongoCursorIterable = mongoCollection.find()
            //As the collection is a capped collection we use Tailable cursor which will return results in natural order in this case
            //based on ts timestamp field.
            //Tailable Await does not return and blocks, so we are using tailable.
            .cursorType(CursorType.Tailable).batchSize(batchSize);

    List<Bson> andFilters = new ArrayList<>();
    //Only filter if we already have saved/initial offset specified or else both time_t and ordinal will not be -1.
    if (timestampSeconds > 0 && ordinal >= 0) {
        andFilters.add(Filters.gt(TIMESTAMP_FIELD, new BsonTimestamp(timestampSeconds, ordinal)));
    }/*from ww w.  ja v  a 2 s . co  m*/

    if (!filterOplogTypes.isEmpty()) {
        List<Bson> oplogOptypeFilters = new ArrayList<>();
        Set<OplogOpType> oplogOpTypesSet = new HashSet<>();
        for (OplogOpType filterOplogopType : filterOplogTypes) {
            if (oplogOpTypesSet.add(filterOplogopType)) {
                oplogOptypeFilters.add(Filters.eq(OP_TYPE_FIELD, filterOplogopType.getOp()));
            }
        }
        //Add an or filter for filtered Or Types
        andFilters.add(Filters.or(oplogOptypeFilters));
    }
    //Finally and timestamp with oplog filters
    if (!andFilters.isEmpty()) {
        mongoCursorIterable = mongoCursorIterable.filter(Filters.and(andFilters));
    }
    cursor = mongoCursorIterable.iterator();
}

From source file:es.omarall.mtc.TailingTask.java

License:Apache License

/**
 * Builds a tailable & awaitdata cursor to fetch documents from the
 * documents collection.//ww  w . ja va 2  s .  co m
 * 
 * @return
 */
public MongoCursor<Document> buildCursor() {

    if (lastTrackedId == null) {
        return cappedCollection.find().sort(new Document("$natural", 1)).cursorType(CursorType.TailableAwait)
                .iterator();
    } else {

        // we know we processed the document with "_id": lastTrackedId
        // We are interested in the first document with id greater than
        // lastTrackedId
        return cappedCollection

                .find(Filters.gt("_id", lastTrackedId))

                .sort(new Document("$natural", 1)).cursorType(CursorType.TailableAwait).iterator();
    }
}

From source file:io.debezium.connector.mongodb.Replicator.java

License:Apache License

/**
 * Use the given primary to read the oplog.
 * /*from   w ww .  ja v  a 2 s.  c  o m*/
 * @param primary the connection to the replica set's primary node; may not be null
 */
protected void readOplog(MongoClient primary) {
    BsonTimestamp oplogStart = source.lastOffsetTimestamp(replicaSet.replicaSetName());
    logger.info("Reading oplog for '{}' primary {} starting at {}", replicaSet, primary.getAddress(),
            oplogStart);

    // Include none of the cluster-internal operations and only those events since the previous timestamp ...
    MongoCollection<Document> oplog = primary.getDatabase("local").getCollection("oplog.rs");
    Bson filter = Filters.and(Filters.gt("ts", oplogStart), // start just after our last position
            Filters.exists("fromMigrate", false)); // skip internal movements across shards
    FindIterable<Document> results = oplog.find(filter).sort(new Document("$natural", 1)) // force forwards collection scan
            .oplogReplay(true) // tells Mongo to not rely on indexes
            .noCursorTimeout(true) // don't timeout waiting for events
            .cursorType(CursorType.TailableAwait); // tail and await new data
    // Read as much of the oplog as we can ...
    ServerAddress primaryAddress = primary.getAddress();
    try (MongoCursor<Document> cursor = results.iterator()) {
        while (running.get() && cursor.hasNext()) {
            if (!handleOplogEvent(primaryAddress, cursor.next())) {
                // Something happened, and we're supposed to stop reading
                return;
            }
        }
    }
}

From source file:io.lumeer.storage.mongodb.dao.collection.MongoDataDao.java

License:Open Source License

private Bson attributeFilter(AttributeFilter filter) {
    switch (filter.getConditionType()) {
    case EQUALS://from ww w .j a  va  2s . c  o m
        return Filters.eq(filter.getAttributeName(), filter.getValue());
    case NOT_EQUALS:
        return Filters.ne(filter.getAttributeName(), filter.getValue());
    case LOWER_THAN:
        return Filters.lt(filter.getAttributeName(), filter.getValue());
    case LOWER_THAN_EQUALS:
        return Filters.lte(filter.getAttributeName(), filter.getValue());
    case GREATER_THAN:
        return Filters.gt(filter.getAttributeName(), filter.getValue());
    case GREATER_THAN_EQUALS:
        return Filters.gte(filter.getAttributeName(), filter.getValue());
    }
    return null;
}