List of usage examples for org.springframework.data.mongodb.core.query Criteria where
public static Criteria where(String key)
From source file:app.data.local.CollectionBindingRepositoryImpl.java
@Nullable @Override//from w w w. j a v a 2 s . c o m public CollectionBinding findByCollectionId(long colId) { Query query = new Query(); query.addCriteria(Criteria.where("collectionId").is(colId)); return mMongoTemplate.findOne(query, CollectionBinding.class); }
From source file:org.cbioportal.session_service.domain.internal.SessionRepositoryImpl.java
public int deleteBySourceAndTypeAndId(String source, String type, String id) { return this.mongoTemplate .remove(new Query(Criteria.where("source").is(source).and("type").is(type).and("id").is(id)), Session.class, type) .getN();/*from w w w. j ava 2 s. co m*/ }
From source file:com.enitalk.opentok.OpenTokListener.java
@Override @RabbitListener(queues = "tokbox") public void onMessage(Message msg) { try {/*from w w w. j a va2 s.c o m*/ ObjectNode tree = (ObjectNode) jackson.readTree(msg.getBody()); logger.info("Tokbox status came {}", tree); Query q = Query.query(Criteria.where("sessionId").is(tree.path("sessionId").asText())); HashMap event = mongo.findOne(q, HashMap.class, "events"); if (event != null) { String status = tree.path("status").asText(); ObjectNode evJson = jackson.convertValue(event, ObjectNode.class); HashMap item = jackson.convertValue(tree, HashMap.class); item.put("came", new Date()); Update update = new Update().push("opentok", item); switch (status) { case "uploaded": logger.info("Video uploaded for event {} ", evJson.path("ii").asText()); break; case "paused": logger.info("Paused event {}", evJson.path("ii").asText()); break; default: } mongo.updateFirst(q, update, "events"); } } catch (Exception e) { logger.info(ExceptionUtils.getFullStackTrace(e)); } finally { } }
From source file:no.nlf.dal.ParachutistController.java
public Parachutist getOneWithMelwinId(String melwinId) { Query query = new Query(Criteria.where("melwinId").is(melwinId)); mongoParachutist = appContext.mongoOperation().findOne(query, MongoParachutist.class); if (mongoParachutist == null) { return new Parachutist(); }//ww w . j a v a2 s .c o m Parachutist parachutist = mongoParachutist.toParachutist(); for (Integer clubId : mongoParachutist.getMemberclubs()) { parachutist.getMemberclubs().add(clubController.getOne(clubId)); } for (Integer licenseId : mongoParachutist.getLicenses()) { parachutist.getLicenses().add(licenseController.getOne(licenseId)); } return parachutist; }
From source file:quanlyhocvu.api.mongodb.DAO.LopHocDAO.java
public List<LopHocDTO> getLopHocTheoNamHocKhoiLop(String idNamHoc, String idKhoiLop) { Query query = Query.query(Criteria.where("khoiLop.$id").is(new ObjectId(idKhoiLop)).and("namHoc.$id") .is(new ObjectId(idNamHoc))); return mongoOperation.find(query, LopHocDTO.class); }
From source file:com.sangupta.jerry.mongodb.MongoTemplateBasicOperations.java
/** * /*from w ww . j a v a 2s . c o m*/ */ @Override public List<T> getForIdentifiers(Collection<X> ids) { if (AssertUtils.isEmpty(ids)) { return null; } if (this.idKey == null) { fetchMappingContextAndConversionService(); } Query query = new Query(Criteria.where(this.idKey).in(ids)); return this.mongoTemplate.find(query, this.entityClass); }
From source file:eu.trentorise.smartcampus.profileservice.storage.ProfileStorage.java
/** * @param entityId/*from ww w.j a v a 2 s .com*/ */ public ExtendedProfile getObjectByEntityId(Long entityId, String profileId) { Criteria criteria = new Criteria(); criteria = Criteria.where("content.socialId").is(entityId); if (profileId != null) criteria.and("content.profileId").is(profileId); criteria.and("type").is(ExtendedProfile.class.getCanonicalName()); criteria.and("deleted").is(false); List<ExtendedProfile> profiles = find(Query.query(criteria), ExtendedProfile.class); return profiles == null || profiles.isEmpty() ? null : profiles.get(0); }
From source file:org.maodian.flyingcat.im.repository.AccountRepositoryImpl.java
@Override public void persistContact(String uid, SimpleUser su) { Query query = Query.query(Criteria.where(Account.USERNAME).is(uid)); Update update = new Update().addToSet(Account.CONTACTS, su); getMongoTemplate().updateFirst(query, update, Account.class); }
From source file:fr.cirad.mgdb.exporting.individualoriented.DARwinExportHandler.java
@Override public void exportData(OutputStream outputStream, String sModule, Collection<File> individualExportFiles, boolean fDeleteSampleExportFilesOnExit, ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms, Map<String, InputStream> readyToExportFiles) throws Exception { MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); GenotypingProject aProject = mongoTemplate.findOne( new Query(Criteria.where(GenotypingProject.FIELDNAME_PLOIDY_LEVEL).exists(true)), GenotypingProject.class); if (aProject == null) LOG.warn("Unable to find a project containing ploidy level information! Assuming ploidy level is 2."); int ploidy = aProject == null ? 2 : aProject.getPloidyLevel(); File warningFile = File.createTempFile("export_warnings_", ""); FileWriter warningFileWriter = new FileWriter(warningFile); int markerCount = markerCursor.count(); ZipOutputStream zos = new ZipOutputStream(outputStream); if (readyToExportFiles != null) for (String readyToExportFile : readyToExportFiles.keySet()) { zos.putNextEntry(new ZipEntry(readyToExportFile)); InputStream inputStream = readyToExportFiles.get(readyToExportFile); byte[] dataBlock = new byte[1024]; int count = inputStream.read(dataBlock, 0, 1024); while (count != -1) { zos.write(dataBlock, 0, count); count = inputStream.read(dataBlock, 0, 1024); }/*from ww w . ja v a 2s . c o m*/ } String exportName = sModule + "_" + markerCount + "variants_" + individualExportFiles.size() + "individuals"; StringBuffer donFileContents = new StringBuffer( "@DARwin 5.0 - DON -" + LINE_SEPARATOR + individualExportFiles.size() + "\t" + 1 + LINE_SEPARATOR + "N" + "\t" + "individual" + LINE_SEPARATOR); int count = 0; String missingGenotype = ""; for (int j = 0; j < ploidy; j++) missingGenotype += "\tN"; zos.putNextEntry(new ZipEntry(exportName + ".var")); zos.write(("@DARwin 5.0 - ALLELIC - " + ploidy + LINE_SEPARATOR + individualExportFiles.size() + "\t" + markerCount * ploidy + LINE_SEPARATOR + "N").getBytes()); DBCursor markerCursorCopy = markerCursor.copy(); // dunno how expensive this is, but seems safer than keeping all IDs in memory at any time short nProgress = 0, nPreviousProgress = 0; int avgObjSize = (Integer) mongoTemplate .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize"); int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize; markerCursorCopy.batchSize(nChunkSize); int nMarkerIndex = 0; while (markerCursorCopy.hasNext()) { DBObject exportVariant = markerCursorCopy.next(); Comparable markerId = (Comparable) exportVariant.get("_id"); if (markerSynonyms != null) { Comparable syn = markerSynonyms.get(markerId); if (syn != null) markerId = syn; } for (int j = 0; j < ploidy; j++) zos.write(("\t" + markerId).getBytes()); } TreeMap<Integer, Comparable> problematicMarkerIndexToNameMap = new TreeMap<Integer, Comparable>(); ArrayList<String> distinctAlleles = new ArrayList<String>(); // the index of each allele will be used as its code int i = 0; for (File f : individualExportFiles) { BufferedReader in = new BufferedReader(new FileReader(f)); try { String individualId, line = in.readLine(); // read sample id if (line != null) individualId = line; else throw new Exception("Unable to read first line of temp export file " + f.getName()); donFileContents.append(++count + "\t" + individualId + LINE_SEPARATOR); zos.write((LINE_SEPARATOR + count).getBytes()); nMarkerIndex = 0; while ((line = in.readLine()) != null) { List<String> genotypes = MgdbDao.split(line, "|"); HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes int highestGenotypeCount = 0; String mostFrequentGenotype = null; for (String genotype : genotypes) { if (genotype.length() == 0) continue; /* skip missing genotypes */ int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype); if (gtCount > highestGenotypeCount) { highestGenotypeCount = gtCount; mostFrequentGenotype = genotype; } genotypeCounts.put(genotype, gtCount); } if (genotypeCounts.size() > 1) { warningFileWriter.write("- Dissimilar genotypes found for variant __" + nMarkerIndex + "__, individual " + individualId + ". Exporting most frequent: " + mostFrequentGenotype + "\n"); problematicMarkerIndexToNameMap.put(nMarkerIndex, ""); } String codedGenotype = ""; if (mostFrequentGenotype != null) for (String allele : mostFrequentGenotype.split(" ")) { if (!distinctAlleles.contains(allele)) distinctAlleles.add(allele); codedGenotype += "\t" + distinctAlleles.indexOf(allele); } else codedGenotype = missingGenotype.replaceAll("N", "-1"); // missing data is coded as -1 zos.write(codedGenotype.getBytes()); nMarkerIndex++; } } catch (Exception e) { LOG.error("Error exporting data", e); progress.setError("Error exporting data: " + e.getClass().getSimpleName() + (e.getMessage() != null ? " - " + e.getMessage() : "")); return; } finally { in.close(); } if (progress.hasAborted()) return; nProgress = (short) (++i * 100 / individualExportFiles.size()); if (nProgress > nPreviousProgress) { // LOG.debug("============= doDARwinExport (" + i + "): " + nProgress + "% ============="); progress.setCurrentStepProgress(nProgress); nPreviousProgress = nProgress; } if (!f.delete()) { f.deleteOnExit(); LOG.info("Unable to delete tmp export file " + f.getAbsolutePath()); } } zos.putNextEntry(new ZipEntry(exportName + ".don")); zos.write(donFileContents.toString().getBytes()); // now read variant names for those that induced warnings nMarkerIndex = 0; markerCursor.batchSize(nChunkSize); while (markerCursor.hasNext()) { DBObject exportVariant = markerCursor.next(); if (problematicMarkerIndexToNameMap.containsKey(nMarkerIndex)) { Comparable markerId = (Comparable) exportVariant.get("_id"); if (markerSynonyms != null) { Comparable syn = markerSynonyms.get(markerId); if (syn != null) markerId = syn; } for (int j = 0; j < ploidy; j++) zos.write(("\t" + markerId).getBytes()); problematicMarkerIndexToNameMap.put(nMarkerIndex, markerId); } } warningFileWriter.close(); if (warningFile.length() > 0) { zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt")); int nWarningCount = 0; BufferedReader in = new BufferedReader(new FileReader(warningFile)); String sLine; while ((sLine = in.readLine()) != null) { for (Integer aMarkerIndex : problematicMarkerIndexToNameMap.keySet()) sLine = sLine.replaceAll("__" + aMarkerIndex + "__", problematicMarkerIndexToNameMap.get(aMarkerIndex).toString()); zos.write((sLine + "\n").getBytes()); in.readLine(); nWarningCount++; } LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount); in.close(); } warningFile.delete(); zos.close(); progress.setCurrentStepProgress((short) 100); }
From source file:br.com.ezequieljuliano.argos.template.StandardDAO.java
public List<DomainClass> findByFullText(String collectionName, String searchString, Criteria filterCriteria, long limit) { CommandResult commandResult = executeFullTextSearch(collectionName, searchString, filterCriteria, limit); Collection<ObjectId> searchResultIds = extractSearchResultIds(commandResult); Query mongoQuery = Query.query(Criteria.where("_id").in(searchResultIds)); return getMongoOperations().find(mongoQuery, getDomainClass()); }