List of usage examples for com.mongodb.client.result UpdateResult getMatchedCount
public abstract long getMatchedCount();
From source file:module.ImportArrayExpressInit.java
License:Open Source License
public ImportArrayExpressInit() { // ===== Connection ===== MongoClient mongoClient = MongoUtil.buildMongoClient(); MongoDatabase db = mongoClient.getDatabase("epimed_experiments"); MongoCollection<Document> collectionSeries = db.getCollection("series"); MongoCollection<Document> collectionSamples = db.getCollection("sample"); // ===== Pattern ===== String patternText = "\\[[\\p{Print}\\p{Space}]+\\]"; ;/* w w w .j a va 2s . c o m*/ Pattern pattern = Pattern.compile(patternText); // ===== Series ===== for (String accession : listAccessions) { String urlString = "https://www.ebi.ac.uk/arrayexpress/files/" + accession + "/" + accession + ".idf.txt"; System.out.println(urlString); String text = webService.loadUrl(urlString); String[] parts = text.split(lineSeparator); List<String> dataSeries = new ArrayList<String>(Arrays.asList(parts)); AESeries series = new AESeries(dataSeries); System.out.println(series); // ===== Check if already imported as a GSE ===== boolean isGseFound = false; String gseNumber = null; for (String secondaryAccession : series.getListAccessions()) { if (secondaryAccession.startsWith("GSE")) { gseNumber = secondaryAccession; Document gse = db.getCollection("series").find(Filters.eq("_id", secondaryAccession)).first(); isGseFound = gse != null; // System.out.println("GEO accession " + gseNumber + " found: " + isGseFound); } } if (!isGseFound) { // ===== Create Mongo series ===== List<String> listSeriesAcc = new ArrayList<String>(); listSeriesAcc.add(accession); Document docSeries = mongoService.createSeries(accession, series.getTitle(), null, series.getSubmissionDate(), series.getSubmissionDate()); if (series.getListAccessions() != null && !series.getListAccessions().isEmpty()) { listSeriesAcc.addAll(series.getListAccessions()); } docSeries.put("accessions", listSeriesAcc); UpdateResult updateResult = collectionSeries.updateOne(Filters.eq("_id", accession), new Document("$set", docSeries)); if (updateResult.getMatchedCount() == 0) { collectionSeries.insertOne(docSeries); } System.out.println(docSeries); // ===== Import clinical data ===== String url = "https://www.ebi.ac.uk/arrayexpress/files/" + accession + "/" + series.getSdrf(); System.out.println(url); String clindata = webService.loadUrl(url); String[] clinparts = clindata.split(lineSeparator); List<String> data = new ArrayList<String>(Arrays.asList(clinparts)); // ===== Samples ===== List<String> header = this.createHeader(data.get(0), pattern); System.out.println(header); for (int i = 1; i < data.size(); i++) { Integer nbSamples = data.size() - 1; Map<String, Object> mapParameters = this.createParameters(data.get(i), header); String idSample = this.createIdSample(mapParameters); if (idSample == null) { System.err.println("idSample is not recongnized for " + mapParameters); mongoClient.close(); System.exit(0); } String organism = (String) mapParameters.get("organism"); if (organism == null || organism.isEmpty()) { organism = "Homo sapiens"; } String platform = (String) mapParameters.get("LIBRARY_STRATEGY"); if (platform != null && !platform.isEmpty()) { platform = platform.toLowerCase().trim(); } else { platform = "rna-seq"; } String layout = (String) mapParameters.get("LIBRARY_LAYOUT"); if (layout != null && !layout.isEmpty()) { layout = layout.toLowerCase().trim(); } Document docSampleExist = collectionSamples.find(Filters.eq("_id", idSample)).first(); boolean docAlreadyExist = docSampleExist != null; boolean analysed = false; if (docAlreadyExist) { analysed = (Boolean) docSampleExist.get("analyzed"); System.out.println(i + "/" + nbSamples + "\t " + docSeries.get("_id") + "\t " + idSample + ": already exists in the database, analyzed=" + analysed); } else { System.out.println(i + "/" + nbSamples + "\t " + docSeries.get("_id") + "\t " + idSample); } // ===== Sample Document ===== Document docSample = mongoService.createSample(idSample, (String) docSeries.get("_id"), listSeriesAcc, organism, (Date) docSeries.get("submission_date"), (Date) docSeries.get("last_update"), analysed); // ===== Mandatory parameters ===== // Preserve "exp_group" if the document exists already Document expGroup = null; if (docAlreadyExist) { expGroup = (Document) docSampleExist.get("exp_group"); } else { expGroup = mongoService.createExpGroup(docSample, platform, (String) mapParameters.get("organism part"), (String) mapParameters.get("Source Name"), organism); if (layout != null) { expGroup.append("layout", layout); // run_name int j = 0; boolean isFound = false; String runName = null; while (!isFound && j < listRunNameParameters.length) { runName = (String) mapParameters.get(listRunNameParameters[j]); isFound = runName != null; j++; } if (runName != null) { expGroup.append("run_name", runName); } } } docSample.append("exp_group", expGroup); // ===== Supplementary parameters ===== Document parameters = mongoService.createParameters(docSample, mapParameters); docSample.append("parameters", parameters); // === Delete if already exist === collectionSamples.deleteOne(eq("_id", idSample)); // ===== Insert data ===== collectionSamples.insertOne(docSample); // ===== Update series for platforms ===== List<String> listPlatforms = collectionSamples .distinct("exp_group.id_platform", Filters.in("series", accession), String.class) .into(new ArrayList<String>()); docSeries.append("platforms", listPlatforms); collectionSeries.updateOne(Filters.eq("_id", accession), new Document("$set", docSeries)); } } else { System.out.println("GEO accession " + gseNumber + " corresponding to " + accession + " exists already. Skip import."); } } mongoClient.close(); }
From source file:module.ImportGeo.java
License:Open Source License
public ImportGeo() { // ===== Connection ===== MongoClient mongoClient = MongoUtil.buildMongoClient(); MongoDatabase db = mongoClient.getDatabase("epimed_experiments"); // ===== Insert data ===== for (int k = 0; k < listGseNumber.length; k++) { String gseNumber = listGseNumber[k]; System.out.println("------------------------------------------"); System.out.println(k + " Import " + gseNumber); // ===== Load GSE ===== NcbiGeoGse gse = new NcbiGeoGse(webService.loadGeo(gseNumber)); System.out.println(gse);//from www . j a v a2s . co m // ===== Series ===== MongoCollection<Document> collectionSeries = db.getCollection("series"); Document docSeries = mongoService.createSeries(gse.getGseNumber(), gse.getTitle(), gse.getListGpl(), gse.getSubmissionDate(), gse.getLastUpdate()); UpdateResult updateResult = collectionSeries.updateOne(Filters.eq("_id", gse.getGseNumber()), new Document("$set", docSeries)); if (updateResult.getMatchedCount() == 0) { collectionSeries.insertOne(docSeries); } // ===== Platforms ===== MongoCollection<Document> collectionPlatforms = db.getCollection("platform"); for (int i = 0; i < gse.getListGpl().size(); i++) { NcbiGeoGpl gpl = new NcbiGeoGpl(webService.loadGeo(gse.getListGpl().get(i))); System.out.println("\t Import platform " + gpl.getGplNumber()); Document docPlatforms = mongoService.createPlatform(gpl.getGplNumber(), gpl.getTitle(), gpl.getTaxid(), gpl.getOrganism(), gpl.getManufacturer(), gpl.getSubmissionDate(), gpl.getLastUpdate(), gpl.getTechnology()); UpdateResult res = collectionPlatforms.updateOne(Filters.eq("_id", gpl.getGplNumber()), new Document("$set", docPlatforms)); if (res.getMatchedCount() == 0) { collectionPlatforms.insertOne(docPlatforms); } } // ===== Samples ====== MongoCollection<Document> collectionSamples = db.getCollection("sample"); // for (int i=0; i<1; i++) { for (int i = 0; i < gse.getListGsm().size(); i++) { NcbiGeoGsm gsm = new NcbiGeoGsm(webService.loadGeo(gse.getListGsm().get(i))); Document docSampleExist = collectionSamples.find(Filters.eq("_id", gsm.getGsmNumber())).first(); boolean docAlreadyExist = docSampleExist != null; boolean analysed = false; if (docAlreadyExist) { analysed = (Boolean) docSampleExist.get("analyzed"); System.out.println(i + "/" + gse.getListGsm().size() + "\t " + gse.getGseNumber() + "\t " + gsm.getGsmNumber() + ": already exists in the database, analyzed=" + analysed); } else { System.out.println(i + "/" + gse.getListGsm().size() + "\t " + gse.getGseNumber() + "\t " + gsm.getGsmNumber()); } // ===== Sample Document ===== Document docSample = mongoService.createSample(gsm.getGsmNumber(), gse.getGseNumber(), gsm.getListGse(), gsm.getOrganism(), gsm.getSubmissionDate(), gsm.getLastUpdate(), analysed); // ===== Mandatory parameters ===== // Preserve "exp_group" if the document exists already Document expGroup = null; if (docAlreadyExist) { expGroup = (Document) docSampleExist.get("exp_group"); } else { expGroup = mongoService.createExpGroup(docSample, gsm.getGplNumber(), gsm.getTitle(), gsm.getSourceName(), gsm.getOrganism()); } docSample.append("exp_group", expGroup); // ===== Supplementary parameters ===== Document parameters = generateParameters(gsm); docSample.append("parameters", parameters); // === Delete if already exist === collectionSamples.deleteOne(eq("_id", gsm.getGsmNumber())); // ===== Insert data ===== collectionSamples.insertOne(docSample); } } mongoClient.close(); }
From source file:module.script.emtab365.ImportSamplesEMTAB365.java
License:Open Source License
public ImportSamplesEMTAB365() { // ===== Connection ===== MongoClient mongoClient = MongoUtil.buildMongoClient(); MongoDatabase db = mongoClient.getDatabase("epimed_experiments"); // ===== Collections ====== MongoCollection<Document> collectionPlatforms = db.getCollection("platforms"); MongoCollection<Document> collectionSeries = db.getCollection("series"); MongoCollection<Document> collectionSamples = db.getCollection("samples"); // ===== Excel data loader ===== String inputfile = this.getInputDirectory() + this.getDirSeparator() + "E-MTAB-365.sdrf.xlsx"; System.out.println("LOADING \t " + inputfile); excelService.load(inputfile);/* w w w . j a v a2 s . com*/ // ===== Init values ====== String idSeries = "E-MTAB-365"; List<String> listSeries = new ArrayList<String>(); listSeries.add(idSeries); Document docSeries = collectionSeries.find(Filters.eq("_id", idSeries)).first(); String organism = "Homo sapiens"; // ==== Header processing ==== Map<Integer, String> mapHeader = new HashMap<Integer, String>(); for (int i = 0; i < excelService.getHeader().size(); i++) { String headerItem = (String) excelService.getHeader().get(i); if (headerItem != null && headerItem.contains("[")) { String[] parts = headerItem.split("[\\[\\]]"); headerItem = parts[1]; headerItem = headerItem.replaceAll("[:_\\.]", " "); } mapHeader.put(i, headerItem.trim()); } System.out.println(mapHeader); for (int i = 0; i < excelService.getData().size(); i++) { // for (int i=0; i<1; i++) { List<Object> dataline = excelService.getData().get(i); String idSample = (String) dataline.get(0); if (!idSample.equals("pool XX")) { String idPlatform = ((String) dataline.get(54)).trim(); if (idPlatform.contains("A-AFFY-44")) { idPlatform = "GPL570"; } else { Document docPlatform = mongoService.createPlatform(idPlatform, null, "9606", "Homo sapiens", null, null, null, null); UpdateResult res = collectionPlatforms.updateOne( Filters.eq("_id", docPlatform.getString("_id")), new Document("$set", docPlatform)); if (res.getMatchedCount() == 0) { collectionPlatforms.insertOne(docPlatform); } } Document docSample = mongoService.createSample(idSample, idSeries, listSeries, organism, (Date) docSeries.get("submission_date"), (Date) docSeries.get("last_update"), false); // === exp_group === Document expgroup = mongoService.createExpGroup(docSample, idPlatform, null, null, organism); docSample.append("exp_group", expgroup); // === parameters === Map<String, Object> mapParameters = new HashMap<String, Object>(); for (int j = 0; j < dataline.size(); j++) { String key = mapHeader.get(j); Object value = dataline.get(j); if (value instanceof String) { String valueString = ((String) value).trim(); if (valueString != null && !valueString.isEmpty() && !valueString.equals("NA") && !valueString.equals("ND")) { value = valueString; } else { value = null; } } if (key != null && value != null) { mapParameters.put(key, value); // System.out.println(key + "='" + value+"'"); } } Document parameters = mongoService.createParameters(docSample, mapParameters); docSample.append("parameters", parameters); // === Delete if already exist === collectionSamples.deleteOne(Filters.eq("_id", docSample.getString("_id"))); // ===== Insert data ===== collectionSamples.insertOne(docSample); System.out.println(docSample); } } mongoClient.close(); }
From source file:module.script.ImportArrayExpress1733.java
License:Open Source License
public ImportArrayExpress1733() { // ===== Connection ===== MongoClient mongoClient = MongoUtil.buildMongoClient(); MongoDatabase db = mongoClient.getDatabase("epimed_experiments"); MongoCollection<Document> collectionSeries = db.getCollection("series"); MongoCollection<Document> collectionSamples = db.getCollection("samples"); // ===== Pattern ===== String patternText = "\\[[\\p{Print}\\p{Space}]+\\]"; ;//from www .j a v a 2 s . c o m Pattern pattern = Pattern.compile(patternText); // ===== Series ===== for (String accession : listAccessions) { List<String> accessionAsList = new ArrayList<String>(); accessionAsList.add(accession); String urlString = "https://www.ebi.ac.uk/arrayexpress/files/" + accession + "/" + accession + ".idf.txt"; System.out.println(urlString); String text = webService.loadUrl(urlString); String[] parts = text.split(lineSeparator); List<String> dataSeries = new ArrayList<String>(Arrays.asList(parts)); AESeries series = new AESeries(dataSeries); System.out.println(series); // ===== Check if already imported as a GSE ===== boolean isGseFound = false; String gseNumber = null; for (String secondaryAccession : series.getListAccessions()) { if (secondaryAccession.startsWith("GSE")) { gseNumber = secondaryAccession; Document gse = db.getCollection("series").find(Filters.eq("_id", secondaryAccession)).first(); isGseFound = gse != null; } } int nbImportedSamples = 0; if (!isGseFound) { // ===== Create Mongo series ===== Document docSeries = mongoService.createSeries(accession, series.getTitle(), null, series.getSubmissionDate(), series.getSubmissionDate()); if (series.getListAccessions() != null && !series.getListAccessions().isEmpty()) { docSeries.put("secondary_accessions", series.getListAccessions()); } if (false) { UpdateResult updateResult = collectionSeries.updateOne(Filters.eq("_id", accession), new Document("$set", docSeries)); if (updateResult.getMatchedCount() == 0) { collectionSeries.insertOne(docSeries); } } System.out.println(docSeries); // ===== Import clinical data ===== String url = "https://www.ebi.ac.uk/arrayexpress/files/" + accession + "/" + series.getSdrf(); System.out.println(url); String clindata = webService.loadUrl(url); String[] clinparts = clindata.split(lineSeparator); List<String> data = new ArrayList<String>(Arrays.asList(clinparts)); // ===== Recognize samples ===== List<String> header = this.createHeader(data.get(0), pattern); System.out.println(header); for (int i = 1; i < data.size(); i++) { Integer nbSamples = data.size() - 1; Map<String, Object> mapParameters = this.createMapParameters(data.get(i), header); String idSample = this.createIdSample(mapParameters); if (idSample == null) { System.err.println("ERROR: idSample is not recongnized for " + accession); System.out.println("Line " + i); System.out.println(mapParameters); mongoClient.close(); System.exit(0); } else { if (formatIdSample) { idSample = "E-MTAB-2836" + "-" + idSample; idSample = idSample.trim().replaceAll(" ", "-"); } } idSample = idSample.split(" ")[0].trim(); // === Organism === String organism = (String) mapParameters.get("organism"); if (organism == null || organism.isEmpty()) { organism = defaultOrganism; } // === Platform === String platform = (String) mapParameters.get("LIBRARY_STRATEGY"); if (platform != null && !platform.isEmpty()) { platform = platform.toLowerCase().trim(); } else { platform = defaultPlatform; } Document docSampleExist = collectionSamples.find(Filters.eq("_id", idSample)).first(); boolean docAlreadyExist = docSampleExist != null; System.out.println("docAlreadyExist " + docAlreadyExist); // === Delete old if already exist === if (docAlreadyExist) { List<String> listSeries = (List<String>) docSampleExist.get("series"); Set<String> setSeries = new HashSet<String>(); listSeries.add(accession); setSeries.addAll(listSeries); listSeries.clear(); listSeries.addAll(setSeries); docSampleExist.append("series", listSeries); System.out.println(docSampleExist); if (commit) { collectionSamples.deleteOne(eq("_id", docSampleExist.get("_id"))); collectionSamples.insertOne(docSampleExist); } } } } else { System.out.println("GEO accession " + gseNumber + " corresponding to " + accession + " exists already. Skip import."); } System.out.println("Number of imported samples: " + nbImportedSamples); } mongoClient.close(); }
From source file:net.springfieldusa.storage.mongodb.comp.MongoStorageComponent.java
License:Open Source License
@Override public <T extends EntityObject> long update(String collection, T data) { MongoCollection<Document> mongoCollection = getCollection(collection); Document document = new Document(data.getAttributes()); document.put(ID, data.getId());//from w ww .j av a2 s. c om document.put(META, data.getMeta()); document.put(RELATIONSHIPS, createRelationships(data)); UpdateResult result = mongoCollection.replaceOne(eq(ID, data.getId()), document); return result.getMatchedCount(); }
From source file:net.springfieldusa.storage.mongodb.comp.MongoStorageComponent.java
License:Open Source License
@Override public <T extends EntityObject> long update(String collection, String query, T data) { Document jsonQuery = Document.parse(query); MongoCollection<Document> mongoCollection = getCollection(collection); Document document = new Document(data.getAttributes()); document.put(ID, data.getId());/* w w w.j av a 2s .c om*/ document.put(META, data.getMeta()); document.put(RELATIONSHIPS, createRelationships(data)); UpdateResult result = mongoCollection.updateMany(jsonQuery, document); return result.getMatchedCount(); }
From source file:net.springfieldusa.storage.mongodb.comp.MongoStorageComponent.java
License:Open Source License
@Override public <T extends EntityObject> long patch(String collection, T data) { MongoCollection<Document> mongoCollection = getCollection(collection); List<Bson> updates = new ArrayList<>(); if (data.getAttributes() != null) { for (Entry<String, Object> entry : data.getAttributes().entrySet()) updates.add(set(entry.getKey(), entry.getValue())); }//from w w w. j a v a2 s . co m if (data.getMeta() != null) { for (Entry<String, Object> entry : data.getMeta().entrySet()) updates.add(set(META + "." + entry.getKey(), entry.getValue())); } if (data.getRelationships() != null) { for (Relationship relationship : data.getRelationships()) { if (relationship.isMany()) { Collection<Document> dbReferences = new ArrayList<>(); updates.add(set(RELATIONSHIPS + "." + relationship.getType(), dbReferences)); relationship.getObjectReferences().forEach((reference) -> { if (reference.getId() != null) dbReferences.add(createReference(reference)); }); } else { if (relationship.getObjectReference().getId() != null) updates.add(set(RELATIONSHIPS + "." + relationship.getType(), createReference(relationship.getObjectReference()))); } } } UpdateResult result = mongoCollection.updateOne(eq(ID, data.getId()), combine(updates)); return result.getMatchedCount(); }
From source file:org.apache.sling.nosql.mongodb.resourceprovider.impl.MongoDBNoSqlAdapter.java
License:Apache License
@Override public boolean store(NoSqlData data) { Document envelope = new Document(); envelope.put(PN_PATH, data.getPath()); envelope.put(PN_DATA, new Document(data.getProperties(MultiValueMode.LISTS))); // for list-children query efficiency store parent path as well String parentPath = ResourceUtil.getParent(data.getPath()); if (parentPath != null) { envelope.put(PN_PARENT_PATH, parentPath); }//from ww w . j a v a 2 s . c o m UpdateResult result = collection.replaceOne(Filters.eq(PN_PATH, data.getPath()), envelope, new UpdateOptions().upsert(true)); // return true if a new entry was inserted, false if an existing was replaced return (result.getMatchedCount() == 0); }
From source file:org.axonframework.mongo.eventsourcing.tokenstore.MongoTokenStore.java
License:Apache License
@Override public void extendClaim(String processorName, int segment) throws UnableToClaimTokenException { UpdateResult updateResult = mongoTemplate.trackingTokensCollection().updateOne( and(eq("processorName", processorName), eq("segment", segment), eq("owner", nodeId)), set("timestamp", TokenEntry.clock.instant().toEpochMilli())); if (updateResult.getMatchedCount() == 0) { throw new UnableToClaimTokenException( format("Unable to extend claim on token token '%s[%s]'. It is owned " + "by another segment.", processorName, segment)); }/*from www. j a v a 2 s. c o m*/ }
From source file:org.axonframework.mongo.eventsourcing.tokenstore.MongoTokenStore.java
License:Apache License
/** * {@inheritDoc}/* www . ja v a 2s . c o m*/ */ @Override public void releaseClaim(String processorName, int segment) { UpdateResult updateResult = mongoTemplate.trackingTokensCollection().updateOne( and(eq("processorName", processorName), eq("segment", segment), eq("owner", nodeId)), set("owner", null)); if (updateResult.getMatchedCount() == 0) { logger.warn("Releasing claim of token {}/{} failed. It was owned by another node.", processorName, segment); } }