Example usage for com.mongodb.client.model Filters eq

List of usage examples for com.mongodb.client.model Filters eq

Introduction

In this page you can find the example usage for com.mongodb.client.model Filters eq.

Prototype

public static <TItem> Bson eq(final String fieldName, @Nullable final TItem value) 

Source Link

Document

Creates a filter that matches all documents where the value of the field name equals the specified value.

Usage

From source file:io.mandrel.metrics.impl.MongoMetricsRepository.java

License:Apache License

@Override
public Timeserie serie(String name) {
    Set<Data> results = StreamSupport.stream(timeseries.find(Filters.eq("type", name))
            .sort(Sorts.ascending("timestamp_hour")).limit(3).map(doc -> {
                LocalDateTime hour = LocalDateTime
                        .ofEpochSecond(((Date) doc.get("timestamp_hour")).getTime() / 1000, 0, ZoneOffset.UTC);
                Map<String, Long> values = (Map<String, Long>) doc.get("values");

                List<Data> mapped = values.entrySet().stream()
                        .map(elt -> Data.of(hour.plusMinutes(Long.valueOf(elt.getKey())), elt.getValue()))
                        .collect(Collectors.toList());
                return mapped;
            }).spliterator(), true).flatMap(elts -> elts.stream())
            .collect(TreeSet::new, Set::add, (left, right) -> {
                left.addAll(right);//w  w w. ja va 2 s.co  m
            });

    Timeserie timeserie = new Timeserie();
    timeserie.addAll(results);
    return timeserie;
}

From source file:io.mandrel.spider.impl.MongoSpiderRepository.java

License:Apache License

public void delete(long id) {
    collection.deleteOne(Filters.eq("_id", id));
}

From source file:io.mandrel.spider.impl.MongoSpiderRepository.java

License:Apache License

public Optional<Spider> get(long id) {
    Document doc = collection.find(Filters.eq("_id", id)).first();
    return doc == null ? Optional.empty() : Optional.of(JsonBsonCodec.fromBson(mapper, doc, Spider.class));
}

From source file:io.mandrel.spider.impl.MongoSpiderRepository.java

License:Apache License

protected Bson activeFilter() {
    return Filters.or(Filters.eq("status", SpiderStatuses.STARTED), Filters.eq("status", SpiderStatuses.PAUSED),
            Filters.eq("status", SpiderStatuses.INITIATED));
}

From source file:io.sip3.tapir.twig.mongo.query.SipSearchQuery.java

License:Apache License

private Bson filter(String field, String value) {
    if (isBlank(value)) {
        return null;
    }/*from   w w w  .  j  a  va2  s.c  o m*/
    if (isRegex(value)) {
        return Filters.regex(field, value.replaceAll("\\*", "\\.\\*"));
    }
    return Filters.eq(field, value);
}

From source file:it.av.fac.webserver.handlers.WikiPageFetcher.java

/**
 * TODO: Add more query functionalities.
 *
 * @param page/*from   w w w  . java  2  s .  co  m*/
 * @return
 */
public JSONArray fetchPage(String page) {
    JSONArray ret = new JSONArray();

    List<Bson> filters = new ArrayList<>();
    filters.add(Filters.eq("_id", page));

    FindIterable<Document> documents = this.collection.find(Filters.and(filters));

    documents.forEach(new Consumer<Document>() {
        @Override
        public void accept(Document doc) {
            ret.put(new JSONObject(doc.toJson()));
        }
    });
    return ret;
}

From source file:module.AnalyseGeo.java

License:Open Source License

public AnalyseGeo() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("sample");
    List<Document> listDocuments = collection.find(Filters.in("series", gseNumber))
            // .find(Filters.and(Filters.in("series", gseNumber), Filters.eq("analyzed", false)))
            .into(new ArrayList<Document>());

    // ===== Service =====
    OntologyService ontologyService = new OntologyService(session);
    DispatcherFactory dispatcherFactory = new DispatcherFactory(session);

    // ===== Begin transaction =====
    session.beginTransaction();//from   w  w w.  j  a  v a  2  s .c  om

    // ===== Analyse ======

    for (int i = 0; i < listDocuments.size(); i++) {
        // for (int i=0; i<1; i++) {
        Document doc = listDocuments.get(i);
        Document expGroup = (Document) doc.get("exp_group");

        String gsmNumber = doc.getString("_id");

        List<String> listEntries = new ArrayList<String>();
        List<String> parameters = new ArrayList<String>();

        String title = (String) expGroup.get("sample_title");
        String source = (String) expGroup.get("sample_source");
        listEntries.add(title);
        listEntries.add(source);

        Map<String, Object> mapParameters = (Map<String, Object>) doc.get("parameters");
        parameters.addAll(mapParameters.keySet());
        parameters.remove("id_sample");
        parameters.remove("extract_protocol");

        // To remove
        parameters.remove("lab description");

        for (int j = 0; j < parameters.size(); j++) {
            listEntries.add(parameters.get(j) + ": " + mapParameters.get(parameters.get(j)));
        }

        // === Clear already filled fields (only if necessary) ===
        // this.clear(expGroup);

        Map<String, List<Object>> mapOntologyObjects = ontologyService.recognizeOntologyObjects(listEntries);
        // Map <ClOntologyCategory, Set<String>> mapOntologyCategories = ontologyService.getMapOntologyCategories();
        // this.generateSummary(ontologyService, mapOntologyCategories, mapOntologyObjects);

        System.out.println("------------------------------------------------------------");
        System.out.println(i + " " + gsmNumber + " " + listEntries);
        System.out.println(ontologyService.toString());

        // ===== Create mapping objects and making links =====

        try {

            // === Dispatcher ===
            for (int j = 0; j < categories.length; j++) {

                dispatcherFactory.getObject(expGroup, mapOntologyObjects, categories[j]);

                System.out.print(categories[j]);
                if (expGroup.getString(categories[j]) != null) {
                    System.out.print(" " + expGroup.getString(categories[j]) + "\n");
                } else {
                    System.out.print("\n");
                }

            }

            System.out.println(expGroup);

            // Update Mongo document
            doc.put("exp_group", expGroup);
            doc.put("analyzed", true);
            if (commit) {
                UpdateResult updateResult = collection.updateOne(Filters.eq("_id", gsmNumber),
                        new Document("$set", doc));

            }

        } catch (DispatcherException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

    }

    if (commit) {
        MongoCollection<Document> collectionSeries = db.getCollection("series");
        Document series = collectionSeries.find(Filters.eq("_id", gseNumber)).first();
        series.put("status", "analyzed");
        collectionSeries.updateOne(Filters.eq("_id", gseNumber), new Document("$set", series));
    }

    // === Commit transaction ===
    session.getTransaction().commit();
    // session.getTransaction().rollback();

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}

From source file:module.ClearGeoExpGroup.java

License:Open Source License

public ClearGeoExpGroup() {

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("samples");
    List<Document> listDocuments = collection.find(Filters.in("series", gseNumber))
            .into(new ArrayList<Document>());

    // ===== Analyse ======

    for (int i = 0; i < listDocuments.size(); i++) {

        Document doc = listDocuments.get(i);
        String id = doc.getString("_id");
        Document expGroup = (Document) doc.get("exp_group");
        this.clear(expGroup);
        expGroup.remove("er");
        expGroup.remove("pr");
        expGroup.remove("her2");
        expGroup.remove("triple_negative");

        // Update Mongo document
        doc.put("exp_group", expGroup);
        doc.put("analyzed", false);
        if (commit) {
            UpdateResult updateResult = collection.updateOne(Filters.eq("_id", id), new Document("$set", doc));

        }//from w w w  .j a  va  2 s .  co m

    }

    mongoClient.close();
}

From source file:module.CreateStudy.java

License:Open Source License

@SuppressWarnings("unchecked")
public CreateStudy() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    // === Excel data loader ===

    String inputfile = this.getInputDirectory() + this.getDirSeparator() + "prolung2_expgrp4.xlsx";
    System.out.println("LOADING \t " + inputfile);
    ExcelService excelService = new ExcelService();
    excelService.load(inputfile);/*from  w ww. j ava2s  .c o m*/
    List<Object> listCel = excelService.extractColumn(0);

    Integer indCel = excelService.getHeaderMap().get("gse8894_sample_cel");

    // ===  New Series === 
    MongoCollection<Document> collectionSeries = db.getCollection("series");
    Document docSeries = new Document();
    docSeries.append("_id", "PROLUNG").append("title", "Lung cancerous and non-cancerous samples")
            .append("platforms", null).append("submission_date", today).append("last_update", today)
            .append("import_date", today);

    UpdateResult updateResult = collectionSeries.updateOne(Filters.eq("_id", docSeries.get("_id")),
            new Document("$set", docSeries));
    if (updateResult.getMatchedCount() == 0) {
        collectionSeries.insertOne(docSeries);
    }

    // === Add samples to new series ===
    MongoCollection<Document> collectionSamples = db.getCollection("samples");
    for (int i = 0; i < listCel.size(); i++) {

        String gsm = this.getGsm(listCel.get(i));

        Document docSample = collectionSamples.find(Filters.eq("_id", gsm)).first();

        if (docSample == null) {
            System.err.println("ERROR! Sample " + gsm + "doesn't exist. Try another column.");

            gsm = this.getGsm(excelService.getData().get(i).get(indCel));
            docSample = collectionSamples.find(Filters.eq("_id", gsm)).first();

            if (docSample == null) {
                System.err.println("ERROR! Sample " + gsm + " doesn't exist. Exit.");
                System.exit(0);
            } else {
                System.err.println("Found " + gsm);
            }
        }

        Document expGroup = (Document) docSample.get("exp_group");
        setGpl.add(expGroup.get("id_platform").toString());

        List<String> listSeries = (List<String>) docSample.get("series");
        listSeries.add(docSeries.getString("_id"));
        docSample.put("series", listSeries);

        System.out.println(docSample);
        // updateResult = collectionSamples.updateOne(Filters.eq("_id", docSample.get("_id")), new Document("$set", docSample));
    }

    // === Update platforms of the series ===

    System.out.println(setGpl);

    docSeries.put("platforms", setGpl);
    updateResult = collectionSeries.updateOne(Filters.eq("_id", docSeries.get("_id")),
            new Document("$set", docSeries));
    if (updateResult.getMatchedCount() == 0) {
        collectionSeries.insertOne(docSeries);
    }

}

From source file:module.ImportArrayExpress.java

License:Open Source License

public ImportArrayExpress() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");
    MongoCollection<Document> collectionSeries = db.getCollection("series");
    MongoCollection<Document> collectionSamples = db.getCollection("sample");

    // ===== Pattern =====
    String patternText = "\\[[\\p{Print}\\p{Space}]+\\]";
    ;/*w  w  w. j a  va2s.  c  o  m*/
    Pattern pattern = Pattern.compile(patternText);

    // ===== Series =====

    for (String accession : listAccessions) {

        List<String> accessionAsList = new ArrayList<String>();
        accessionAsList.add(accession);

        String urlString = "https://www.ebi.ac.uk/arrayexpress/files/" + accession + "/" + accession
                + ".idf.txt";
        System.out.println(urlString);
        String text = webService.loadUrl(urlString);

        String[] parts = text.split(lineSeparator);
        List<String> dataSeries = new ArrayList<String>(Arrays.asList(parts));

        AESeries series = new AESeries(dataSeries);
        System.out.println(series);

        // ===== Check if already imported as a GSE ===== 
        boolean isGseFound = false;
        String gseNumber = null;
        for (String secondaryAccession : series.getListAccessions()) {
            if (secondaryAccession.startsWith("GSE")) {
                gseNumber = secondaryAccession;
                Document gse = db.getCollection("series").find(Filters.eq("_id", secondaryAccession)).first();
                isGseFound = gse != null;

            }
        }

        int nbImportedSamples = 0;

        if (!isGseFound) {

            // ===== Create Mongo series =====

            Document docSeries = mongoService.createSeries(accession, series.getTitle(), null,
                    series.getSubmissionDate(), series.getSubmissionDate());

            if (series.getListAccessions() != null && !series.getListAccessions().isEmpty()) {
                docSeries.put("secondary_accessions", series.getListAccessions());
            }

            if (commit) {
                UpdateResult updateResult = collectionSeries.updateOne(Filters.eq("_id", accession),
                        new Document("$set", docSeries));
                if (updateResult.getMatchedCount() == 0) {
                    collectionSeries.insertOne(docSeries);
                }
            }

            System.out.println(docSeries);

            // ===== Import clinical data =====

            String url = "https://www.ebi.ac.uk/arrayexpress/files/" + accession + "/" + series.getSdrf();
            System.out.println(url);
            String clindata = webService.loadUrl(url);

            String[] clinparts = clindata.split(lineSeparator);
            List<String> data = new ArrayList<String>(Arrays.asList(clinparts));

            // ===== Recognize samples =====

            List<String> header = this.createHeader(data.get(0), pattern);
            System.out.println(header);

            for (int i = 1; i < data.size(); i++) {

                Integer nbSamples = data.size() - 1;

                Map<String, Object> mapParameters = this.createMapParameters(data.get(i), header);
                String idSample = this.createIdSample(mapParameters);

                if (idSample == null) {
                    System.err.println("ERROR: idSample is not recongnized for " + accession);
                    System.out.println("Line " + i);
                    System.out.println(mapParameters);
                    mongoClient.close();
                    System.exit(0);
                } else {
                    if (formatIdSample) {
                        idSample = accession + "-" + idSample;
                        idSample = idSample.trim().replaceAll(" ", "-");
                    }
                }
                idSample = idSample.split(" ")[0].trim();

                // === Organism ===
                String organism = (String) mapParameters.get("organism");
                if (organism == null || organism.isEmpty()) {
                    organism = defaultOrganism;
                }

                // === Platform ===
                String platform = (String) mapParameters.get("LIBRARY_STRATEGY");
                if (platform != null && !platform.isEmpty()) {
                    platform = platform.toLowerCase().trim();
                } else {
                    platform = defaultPlatform;
                }

                Document docSampleExist = collectionSamples.find(Filters.eq("_id", idSample)).first();
                boolean docAlreadyExist = docSampleExist != null;

                boolean analysed = false;

                if (docAlreadyExist) {
                    analysed = (Boolean) docSampleExist.get("analyzed");
                }

                // ===== Sample Document =====

                Document docSample = mongoService.createSample(idSample, (String) docSeries.get("_id"),
                        accessionAsList, organism, (Date) docSeries.get("submission_date"),
                        (Date) docSeries.get("last_update"), analysed);

                Document expGroup = null;
                Document parameters = null;

                // System.out.println("------------------------------------------------------------------");

                if (docAlreadyExist) {
                    // === ID sample alredy exists ===
                    System.out.println(i + "/" + nbSamples + "\t " + docSeries.get("_id") + "\t " + idSample
                            + ":  already exists in the database, analyzed=" + analysed);
                    expGroup = docSampleExist.get("exp_group", Document.class);
                    parameters = mongoService.updateParameters(docSampleExist, mapParameters);
                } else {
                    // === New sample ===
                    System.out.println(i + "/" + nbSamples + "\t " + docSeries.get("_id") + "\t " + idSample);
                    expGroup = mongoService.createExpGroup(docSample, platform, null, null, organism);
                    parameters = mongoService.createParameters(docSample, mapParameters);
                    nbImportedSamples++;
                }

                // === Update sample_title, sample_source, layout ===
                expGroup.put("sample_title", parameters.getString("organism part"));
                expGroup.put("sample_source", parameters.getString("Source Name"));
                expGroup.put("layout", parameters.getString("LIBRARY_LAYOUT"));

                docSample.append("exp_group", expGroup);
                docSample.append("parameters", parameters);

                if (commit) {

                    // === Update old if already exist ===
                    if (docAlreadyExist) {
                        // collectionSamples.deleteOne(eq("_id", idSample));
                        collectionSamples.updateOne(Filters.eq("_id", idSample),
                                new Document("$set", docSample));
                    } else {
                        // ===== Insert data =====
                        collectionSamples.insertOne(docSample);
                    }

                    // ===== Update series for platforms =====
                    List<String> listPlatforms = collectionSamples
                            .distinct("exp_group.id_platform", Filters.in("series", accession), String.class)
                            .into(new ArrayList<String>());
                    docSeries.append("platforms", listPlatforms);
                    collectionSeries.updateOne(Filters.eq("_id", accession), new Document("$set", docSeries));
                }

            }

        } else {
            System.out.println("GEO accession " + gseNumber + " corresponding to  " + accession
                    + " exists already. Skip import.");
        }

        System.out.println("Number of imported samples: " + nbImportedSamples);

    }

    mongoClient.close();

}