List of usage examples for com.mongodb.client.model Filters lte
public static <TItem> Bson lte(final String fieldName, final TItem value)
From source file:com.avbravo.ejbjmoordb.mongodb.repository.Repository.java
/** * crea un filtro con paginacion de fechas and otro atributo * * @param primarykeyfield// www .jav a2s .c o m * @param primarykeyvalue * @param fieldnamestart * @param datestartvalue * @param pageNumber * @param rowsForPage * @param docSort * @return */ public List<T> filterDayWithoutHourPagination(String secondaryfield, String secondaryfieldvalue, String fielddate, Date datevalue, Integer pageNumber, Integer rowsForPage, Document... docSort) { list = new ArrayList<>(); try { Document sortQuery = new Document(); if (docSort.length != 0) { sortQuery = docSort[0]; } Bson filter = Filters.and(Filters.eq(secondaryfield, secondaryfieldvalue), Filters.gte(fielddate, datevalue), Filters.lte(fielddate, datevalue)); list = filtersPagination(filter, pageNumber, rowsForPage, sortQuery); } catch (Exception e) { Logger.getLogger(Repository.class.getName() + "filterBetweenDatePagination()").log(Level.SEVERE, null, e); exception = new Exception("filterBetweenDatePagination ", e); } return list; }
From source file:com.avbravo.ejbjmoordb.mongodb.repository.Repository.java
/** * crea un filtro con paginacion de fechas and otro atributo * * @param primarykeyfield/*w w w . j ava 2 s . c om*/ * @param primarykeyvalue * @param fieldnamestart * @param datestartvalue * @param pageNumber * @param rowsForPage * @param docSort * @return */ public List<T> filterDayWithoutHourPagination(Bson myfilter, String fielddate, Date datevalue, Integer pageNumber, Integer rowsForPage, Document... docSort) { list = new ArrayList<>(); try { Document sortQuery = new Document(); if (docSort.length != 0) { sortQuery = docSort[0]; } Bson filter = Filters.and(myfilter, Filters.gte(fielddate, datevalue), Filters.lte(fielddate, datevalue)); list = filtersPagination(filter, pageNumber, rowsForPage, sortQuery); } catch (Exception e) { Logger.getLogger(Repository.class.getName() + "filterBetweenDatePagination()").log(Level.SEVERE, null, e); exception = new Exception("filterBetweenDatePagination ", e); } return list; }
From source file:com.avbravo.ejbjmoordb.mongodb.repository.Repository.java
/** * crea un filtro con paginacion de fechas and otro atributo * * @param primarykeyfield//from www . j a v a 2 s .c o m * @param primarykeyvalue * @param fieldnamestart * @param datestartvalue * @param pageNumber * @param rowsForPage * @param docSort * @return */ public List<T> filterDayWithoutHourPaginationOR(Bson myfilter, String fielddate, Date datevalue, Integer pageNumber, Integer rowsForPage, Document... docSort) { list = new ArrayList<>(); try { Document sortQuery = new Document(); if (docSort.length != 0) { sortQuery = docSort[0]; } Bson filter = Filters.and(myfilter, Filters.gte(fielddate, datevalue), Filters.lte(fielddate, datevalue)); list = filtersPagination(filter, pageNumber, rowsForPage, sortQuery); } catch (Exception e) { Logger.getLogger(Repository.class.getName() + "filterBetweenDatePagination()").log(Level.SEVERE, null, e); exception = new Exception("filterBetweenDatePagination ", e); } return list; }
From source file:com.avbravo.ejbjmoordb.mongodb.repository.Repository.java
/** * * @param primarykeyfield//from w w w .jav a 2s .c o m * @param primarykeyvalue * @param fieldnamestart * @param datestartvalue * @param pageNumber * @param rowsForPage * @param docSort * @return */ public List<T> filterDayWithoutHourPagination(String secondaryfield, Integer secondaryfieldvalue, String fielddate, Date datevalue, Integer pageNumber, Integer rowsForPage, Document... docSort) { list = new ArrayList<>(); try { Document sortQuery = new Document(); if (docSort.length != 0) { sortQuery = docSort[0]; } Bson filter = Filters.and(Filters.eq(secondaryfield, secondaryfieldvalue), Filters.gte(fielddate, datevalue), Filters.lte(fielddate, datevalue)); list = filtersPagination(filter, pageNumber, rowsForPage, sortQuery); } catch (Exception e) { Logger.getLogger(Repository.class.getName() + "filterDayWithoutHourPagination()").log(Level.SEVERE, null, e); exception = new Exception("filterDayWithoutHourPagination ", e); } return list; }
From source file:com.egopulse.querydsl.mongodb.MongodbSerializer.java
License:Apache License
@SuppressWarnings("unchecked") @Override// w w w. j av a 2 s .co m public Object visit(Operation<?> expr, Void context) { Operator op = expr.getOperator(); /** * user.firstName.eq("test") * user.addresses.size().eq(20) */ if (op == Ops.EQ) { return handleEqOperation(expr); } /** * user.firstName.ne("test") */ else if (op == Ops.NE) { return Filters.ne(asDBKey(expr, 0), asDBValue(expr, 1)); } /** * user.firstName.isEmpty() */ else if (op == Ops.STRING_IS_EMPTY) { return Filters.eq(asDBKey(expr, 0), new BsonString("")); } /** * user.firstName.eq("test").and(user.lastName.eq("blah")) */ else if (op == Ops.AND) { Bson leftOperation = (Bson) handle(expr.getArg(0)); Bson rightOperation = (Bson) handle(expr.getArg(1)); return Filters.and(leftOperation, rightOperation); } /** * user.firstName.not[Operation] */ else if (op == Ops.NOT) { //Handle the not's child Operation<?> subOperation = (Operation<?>) expr.getArg(0); Operator subOp = subOperation.getOperator(); if (subOp == Ops.IN) { return visit(ExpressionUtils.operation(Boolean.class, Ops.NOT_IN, subOperation.getArg(0), subOperation.getArg(1)), context); } else { Bson arg = (Bson) handle(expr.getArg(0)); return Filters.not(arg); } } /** * user.firstName.eq("test").or(user.firstName.eq("else")) */ else if (op == Ops.OR) { Bson leftOperation = (Bson) handle(expr.getArg(0)); Bson rightOperation = (Bson) handle(expr.getArg(1)); return Filters.or(leftOperation, rightOperation); } /** * Text matching operations */ else if (op == Ops.STARTS_WITH) { return Filters.regex(asDBKey(expr, 0), Pattern.compile("^" + regexValue(expr, 1))); } else if (op == Ops.STARTS_WITH_IC) { return Filters.regex(asDBKey(expr, 0), Pattern.compile("^" + regexValue(expr, 1), Pattern.CASE_INSENSITIVE)); } else if (op == Ops.ENDS_WITH) { return Filters.regex(asDBKey(expr, 0), Pattern.compile(regexValue(expr, 1) + "$")); } else if (op == Ops.ENDS_WITH_IC) { return Filters.regex(asDBKey(expr, 0), Pattern.compile(regexValue(expr, 1) + "$", Pattern.CASE_INSENSITIVE)); } else if (op == Ops.EQ_IGNORE_CASE) { return Filters.regex(asDBKey(expr, 0), Pattern.compile("^" + regexValue(expr, 1) + "$", Pattern.CASE_INSENSITIVE)); } else if (op == Ops.STRING_CONTAINS) { return Filters.regex(asDBKey(expr, 0), Pattern.compile(".*" + regexValue(expr, 1) + ".*")); } else if (op == Ops.STRING_CONTAINS_IC) { return Filters.regex(asDBKey(expr, 0), Pattern.compile(".*" + regexValue(expr, 1) + ".*", Pattern.CASE_INSENSITIVE)); } else if (op == Ops.MATCHES) { return Filters.regex(asDBKey(expr, 0), Pattern.compile(asDBValue(expr, 1).toString())); } else if (op == Ops.MATCHES_IC) { return Filters.regex(asDBKey(expr, 0), Pattern.compile(asDBValue(expr, 1).toString(), Pattern.CASE_INSENSITIVE)); } else if (op == Ops.LIKE) { String regex = ExpressionUtils.likeToRegex((Expression) expr.getArg(1)).toString(); return Filters.regex(asDBKey(expr, 0), Pattern.compile(regex)); } else if (op == Ops.BETWEEN) { return Filters.and(Filters.gte(asDBKey(expr, 0), asDBValue(expr, 1)), Filters.lte(asDBKey(expr, 0), asDBValue(expr, 2))); } else if (op == Ops.IN) { int constIndex = 0; int exprIndex = 1; if (expr.getArg(1) instanceof Constant<?>) { constIndex = 1; exprIndex = 0; } if (Collection.class.isAssignableFrom(expr.getArg(constIndex).getType())) { @SuppressWarnings("unchecked") //guarded by previous check Collection<?> values = ((Constant<? extends Collection<?>>) expr.getArg(constIndex)).getConstant(); return Filters.in(asDBKey(expr, exprIndex), values); } /** * user.firstName.in(user.lastName) */ else { throw new UnsupportedOperationException(); // Path<?> path = (Path<?>) expr.getArg(exprIndex); // Constant<?> constant = (Constant<?>) expr.getArg(constIndex); // return asDBObject(asDBKey(expr, exprIndex), convert(path, constant)); } } else if (op == Ops.NOT_IN) { int constIndex = 0; int exprIndex = 1; if (expr.getArg(1) instanceof Constant<?>) { constIndex = 1; exprIndex = 0; } if (Collection.class.isAssignableFrom(expr.getArg(constIndex).getType())) { @SuppressWarnings("unchecked") //guarded by previous check Collection<?> values = ((Constant<? extends Collection<?>>) expr.getArg(constIndex)).getConstant(); return Filters.nin(asDBKey(expr, exprIndex), values); } else { throw new UnsupportedOperationException(); // Path<?> path = (Path<?>) expr.getArg(exprIndex); // Constant<?> constant = (Constant<?>) expr.getArg(constIndex); // return asDBObject(asDBKey(expr, exprIndex), asDBObject("$ne", convert(path, constant))); } } else if (op == Ops.COL_IS_EMPTY) { String field = asDBKey(expr, 0); return Filters.or(Filters.exists(field, false), Filters.size(field, 0)); } else if (op == Ops.LT) { return Filters.lt(asDBKey(expr, 0), asDBValue(expr, 1)); } else if (op == Ops.GT) { return Filters.gt(asDBKey(expr, 0), asDBValue(expr, 1)); } else if (op == Ops.LOE) { return Filters.lte(asDBKey(expr, 0), asDBValue(expr, 1)); } else if (op == Ops.GOE) { return Filters.gte(asDBKey(expr, 0), asDBValue(expr, 1)); } else if (op == Ops.IS_NULL) { return Filters.exists(asDBKey(expr, 0), false); } else if (op == Ops.IS_NOT_NULL) { return Filters.exists(asDBKey(expr, 0), true); } else if (op == Ops.CONTAINS_KEY) { Path<?> path = (Path<?>) expr.getArg(0); Expression<?> key = expr.getArg(1); return Filters.exists(visit(path, context) + "." + key.toString(), true); } // else if (op == MongodbOps.NEAR) { // return asDBObject(asDBKey(expr, 0), asDBObject("$near", asDBValue(expr, 1))); // // } else if (op == MongodbOps.NEAR_SPHERE) { // return asDBObject(asDBKey(expr, 0), asDBObject("$nearSphere", asDBValue(expr, 1))); // // } // else if (op == MongodbOps.ELEM_MATCH) { // return Filters.elemMatch(asDBKey(expr, 0), asDBValue(expr, 1)); // } throw new UnsupportedOperationException("Illegal operation " + expr); }
From source file:com.naryx.tagfusion.cfm.application.sessionstorage.SessionStorageMongoImpl.java
License:Open Source License
public void onExpireAll(cfApplicationData applicationData) { col.deleteMany(Filters.lte("et", new Date())); }
From source file:com.naryx.tagfusion.cfm.cache.impl.MongoCacheImpl.java
License:Open Source License
@Override public void clockEvent(int eventType) { col.deleteMany(Filters.lte("ct", new Date())); }
From source file:com.px100systems.data.plugin.storage.mongo.FilterQueryBuilder.java
License:Open Source License
@Override public Bson convert(le predicate) { return Filters.lte(predicate.getMember(), predicate.getValue()); }
From source file:com.px100systems.data.plugin.storage.mongo.FilterQueryBuilder.java
License:Open Source License
@Override public Bson convert(between predicate) { return Filters.and(Filters.gte(predicate.getMember(), predicate.getMin()), Filters.lte(predicate.getMember(), predicate.getMax())); }
From source file:info.bunji.mongodb.synces.OplogExtractor.java
License:Apache License
@Override protected void execute() throws Exception { Set<String> includeFields = config.getIncludeFields(); Set<String> excludeFields = config.getExcludeFields(); String index = config.getDestDbName(); String syncName = config.getSyncName(); // oplog????//w w w. j a v a 2 s . c o m int checkPoint = 0; int retryCnt = 0; while (true) { try (MongoClient client = MongoClientService.getClient(config)) { retryCnt = 0; logger.info("[{}] starting oplog sync.", syncName); // check oplog timestamp outdated MongoCollection<Document> oplogCollection = client.getDatabase("local").getCollection("oplog.rs"); FindIterable<Document> results; if (timestamp != null) { results = oplogCollection.find().filter(Filters.lte("ts", timestamp)) .sort(new Document("$natural", -1)).limit(1); if (results.first() == null) { throw new IllegalStateException("[" + syncName + "] oplog outdated.[" + DocumentUtils.toDateStr(timestamp) + "(" + timestamp + ")]"); } //logger.trace("[{}] start oplog timestamp = [{}]", config.getSyncName(), timestamp); //config.addSyncCount(-1); // ????????????? BsonTimestamp tmpTs = results.first().get("ts", BsonTimestamp.class); if (!tmpTs.equals(timestamp)) { // ?????mongo???????????? // ?????????? timestamp = tmpTs; config.setStatus(Status.RUNNING); config.setLastOpTime(timestamp); append(SyncOperation.fromConfig(config)); } } // oplog?? targetDb = client.getDatabase(config.getMongoDbName()); results = oplogCollection.find().filter(Filters.gte("ts", timestamp)) .sort(new Document("$natural", 1)).cursorType(CursorType.TailableAwait) .noCursorTimeout(true).oplogReplay(true); logger.info("[{}] started oplog sync. [oplog {} ({})]", syncName, DocumentUtils.toDateStr(timestamp), timestamp); // get document from oplog for (Document oplog : results) { // TODO ???SyncOperation??????? SyncOperation op = null; timestamp = oplog.get("ts", BsonTimestamp.class); if (!"c".equals(oplog.get("op"))) { //if (!Operation.COMMAND.equals(Operation.valueOf(oplog.get("op")))) { // cmd String ns = oplog.getString("ns"); String[] nsVals = ns.split("\\.", 2); if (!config.getMongoDbName().equals(nsVals[0]) || !config.isTargetCollection(nsVals[1])) { if (++checkPoint >= 10000) { // ???????? config.setLastOpTime(timestamp); op = SyncOperation.fromConfig(config); checkPoint = 0; // clear check count append(op); } continue; } else { op = new SyncOperation(oplog, index); checkPoint = 0; } } else { // cmd?????????? op = new SyncOperation(oplog, index); if (!config.getMongoDbName().equals(op.getSrcDbName()) || !config.isTargetCollection(op.getCollection())) { checkPoint++; continue; } } /* SyncOperation op = new SyncOperation(oplog, index); timestamp = op.getTimestamp(); // check target database and collection if(!config.getMongoDbName().equals(op.getSrcDbName()) || !config.isTargetCollection(op.getCollection())) { if (++checkPoint >= 10000) { // ???????? config.setLastOpTime(timestamp); op = SyncOperation.fromConfig(config); checkPoint = 0; // clear check count append(op); } continue; } else { checkPoint = 0; } */ if (op.isPartialUpdate()) { // get full document MongoCollection<Document> collection = getMongoCollection(op.getCollection()); Document updateDoc = collection.find(oplog.get("o2", Document.class)).first(); if (updateDoc == null) { checkPoint++; continue; // deleted document } op.setDoc(updateDoc); } // filter document(insert or update) if (op.getDoc() != null) { Document filteredDoc = DocumentUtils.applyFieldFilter(op.getDoc(), includeFields, excludeFields); if (filteredDoc.isEmpty()) { checkPoint++; continue; // no change sync fields } op.setDoc(filteredDoc); } // emit sync data append(op); } } catch (MongoClientException mce) { // do nothing. } catch (UnknownHostException | MongoSocketException mse) { retryCnt++; if (retryCnt >= MAX_RETRY) { logger.error(String.format("[%s] mongo connect failed. (RETRY=%d)", syncName, retryCnt), mse); throw mse; } long waitSec = (long) Math.min(60, Math.pow(2, retryCnt)); logger.warn("[{}] waiting mongo connect retry. ({}/{}) [{}sec]", syncName, retryCnt, MAX_RETRY, waitSec); Thread.sleep(waitSec * 1000); } catch (MongoInterruptedException mie) { // interrupt oplog tailable process. break; } catch (Throwable t) { logger.error(String.format("[%s] error. [msg:%s](%s)", syncName, t.getMessage(), t.getClass().getSimpleName()), t); throw t; } } }