Example usage for com.mongodb.client.model Filters eq

List of usage examples for com.mongodb.client.model Filters eq

Introduction

In this page you can find the example usage for com.mongodb.client.model Filters eq.

Prototype

public static <TItem> Bson eq(final String fieldName, @Nullable final TItem value) 

Source Link

Document

Creates a filter that matches all documents where the value of the field name equals the specified value.

Usage

From source file:module.script.epimed_ontology.UpdateFetalAdultOvary.java

License:Open Source License

public UpdateFetalAdultOvary() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();
    ClTopologyDao topologyDao = new ClTopologyDao(session);
    ClTopology adultOvary = topologyDao.find("C56.9");
    ClTopology fetalOvary = topologyDao.find("E56.9");

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");
    MongoCollection<Document> collectionSample = db.getCollection("sample");

    Bson filters = Filters.and(Filters.eq("exp_group.id_topology", "C56.9"), // ovary
            Filters.eq("exp_group.id_tissue_stage", 1) // adult
    );//from w ww  . jav a 2 s  .co m

    List<Document> samples = collectionSample.find(filters).into(new ArrayList<Document>());

    for (Document sample : samples) {
        Document expgroup = sample.get("exp_group", Document.class);
        expgroup.append("id_topology", adultOvary.getIdTopology());
        expgroup.append("topology", adultOvary.getName());
        sample.append("exp_group", expgroup);
        collectionSample.updateOne(Filters.eq("_id", sample.getString("_id")), new Document("$set", sample));
    }
    System.out.println(samples.size());

    // === Commit transaction ===
    // session.getTransaction().commit();
    session.getTransaction().rollback();

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}

From source file:module.script.ImportArrayExpress1733.java

License:Open Source License

public ImportArrayExpress1733() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");
    MongoCollection<Document> collectionSeries = db.getCollection("series");
    MongoCollection<Document> collectionSamples = db.getCollection("samples");

    // ===== Pattern =====
    String patternText = "\\[[\\p{Print}\\p{Space}]+\\]";
    ;/*from w  w w.  j  a va  2s .com*/
    Pattern pattern = Pattern.compile(patternText);

    // ===== Series =====

    for (String accession : listAccessions) {

        List<String> accessionAsList = new ArrayList<String>();
        accessionAsList.add(accession);

        String urlString = "https://www.ebi.ac.uk/arrayexpress/files/" + accession + "/" + accession
                + ".idf.txt";
        System.out.println(urlString);
        String text = webService.loadUrl(urlString);

        String[] parts = text.split(lineSeparator);
        List<String> dataSeries = new ArrayList<String>(Arrays.asList(parts));

        AESeries series = new AESeries(dataSeries);
        System.out.println(series);

        // ===== Check if already imported as a GSE ===== 
        boolean isGseFound = false;
        String gseNumber = null;
        for (String secondaryAccession : series.getListAccessions()) {
            if (secondaryAccession.startsWith("GSE")) {
                gseNumber = secondaryAccession;
                Document gse = db.getCollection("series").find(Filters.eq("_id", secondaryAccession)).first();
                isGseFound = gse != null;

            }
        }

        int nbImportedSamples = 0;

        if (!isGseFound) {

            // ===== Create Mongo series =====

            Document docSeries = mongoService.createSeries(accession, series.getTitle(), null,
                    series.getSubmissionDate(), series.getSubmissionDate());

            if (series.getListAccessions() != null && !series.getListAccessions().isEmpty()) {
                docSeries.put("secondary_accessions", series.getListAccessions());
            }

            if (false) {
                UpdateResult updateResult = collectionSeries.updateOne(Filters.eq("_id", accession),
                        new Document("$set", docSeries));
                if (updateResult.getMatchedCount() == 0) {
                    collectionSeries.insertOne(docSeries);
                }
            }

            System.out.println(docSeries);

            // ===== Import clinical data =====

            String url = "https://www.ebi.ac.uk/arrayexpress/files/" + accession + "/" + series.getSdrf();
            System.out.println(url);
            String clindata = webService.loadUrl(url);

            String[] clinparts = clindata.split(lineSeparator);
            List<String> data = new ArrayList<String>(Arrays.asList(clinparts));

            // ===== Recognize samples =====

            List<String> header = this.createHeader(data.get(0), pattern);
            System.out.println(header);

            for (int i = 1; i < data.size(); i++) {

                Integer nbSamples = data.size() - 1;

                Map<String, Object> mapParameters = this.createMapParameters(data.get(i), header);
                String idSample = this.createIdSample(mapParameters);

                if (idSample == null) {
                    System.err.println("ERROR: idSample is not recongnized for " + accession);
                    System.out.println("Line " + i);
                    System.out.println(mapParameters);
                    mongoClient.close();
                    System.exit(0);
                } else {
                    if (formatIdSample) {
                        idSample = "E-MTAB-2836" + "-" + idSample;
                        idSample = idSample.trim().replaceAll(" ", "-");
                    }
                }
                idSample = idSample.split(" ")[0].trim();

                // === Organism ===
                String organism = (String) mapParameters.get("organism");
                if (organism == null || organism.isEmpty()) {
                    organism = defaultOrganism;
                }

                // === Platform ===
                String platform = (String) mapParameters.get("LIBRARY_STRATEGY");
                if (platform != null && !platform.isEmpty()) {
                    platform = platform.toLowerCase().trim();
                } else {
                    platform = defaultPlatform;
                }

                Document docSampleExist = collectionSamples.find(Filters.eq("_id", idSample)).first();
                boolean docAlreadyExist = docSampleExist != null;

                System.out.println("docAlreadyExist " + docAlreadyExist);

                // === Delete old if already exist ===
                if (docAlreadyExist) {
                    List<String> listSeries = (List<String>) docSampleExist.get("series");
                    Set<String> setSeries = new HashSet<String>();
                    listSeries.add(accession);
                    setSeries.addAll(listSeries);
                    listSeries.clear();
                    listSeries.addAll(setSeries);
                    docSampleExist.append("series", listSeries);

                    System.out.println(docSampleExist);

                    if (commit) {
                        collectionSamples.deleteOne(eq("_id", docSampleExist.get("_id")));
                        collectionSamples.insertOne(docSampleExist);
                    }

                }

            }

        } else {
            System.out.println("GEO accession " + gseNumber + " corresponding to  " + accession
                    + " exists already. Skip import.");
        }

        System.out.println("Number of imported samples: " + nbImportedSamples);

    }

    mongoClient.close();

}

From source file:module.script.ImportSupplementaryGSE20711.java

License:Open Source License

public ImportSupplementaryGSE20711() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("samples");

    // ===== Excel data loader =====

    String inputfile = this.getInputDirectory() + this.getDirSeparator() + "GSE20711_emmm0003-0726-SD2.xlsx";
    System.out.println("LOADING \t " + inputfile);
    ExcelService excelService = new ExcelService();
    excelService.load(inputfile);//  www .ja va 2s.  c om

    String gseNumber = "GSE20711";

    for (int i = 0; i < excelService.getData().size(); i++) {
        List<Object> dataLine = excelService.getData().get(i);

        String bcString = (String) dataLine.get(0);
        bcString = bcString.replaceAll("BC", "");

        Integer bcNumber = Integer.parseInt(bcString);

        Document docSample = collection
                .find(Filters
                        .and(Filters.in("series", gseNumber),
                                Filters.eq("exp_group.sample_title",
                                        "Breast tumor from patient P_" + bcNumber + " (expression data)")))
                .first();

        System.out.println("-------------------------------------------");
        System.out.println(dataLine);
        System.out.println(docSample);

    }

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}

From source file:module.script.ImportSupplementaryGSE25219.java

License:Open Source License

@SuppressWarnings({ "unused", "unchecked" })
public ImportSupplementaryGSE25219() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("samples");

    // ===== Excel data loader =====

    String inputfile = this.getInputDirectory() + this.getDirSeparator() + "NIHMS321722-supplement-7.xlsx";
    System.out.println("LOADING \t " + inputfile);
    ExcelService excelService = new ExcelService();
    excelService.load(inputfile);// ww w.  ja  v  a  2  s  .  co m

    // ===== Format raw data into data structures ======

    List<Map<String, String>> listMap = new ArrayList<Map<String, String>>();
    List<String> headerMap = new ArrayList<String>();
    Map<String, String> mapBrain = new HashMap<String, String>();

    for (int i = 0; i < excelService.getData().size(); i++) {
        List<Object> dataLine = excelService.getData().get(i);

        String brainCode = (String) dataLine.get(0);
        if (brainCode != null) {
            mapBrain = new HashMap<String, String>();
        }

        // Existing brain code
        if (dataLine != null && dataLine.size() > 2 && dataLine.get(1) != null && dataLine.get(2) != null) {
            // System.out.println(dataLine.get(1) + " = " + dataLine.get(2));
            mapBrain.put(dataLine.get(1).toString().trim(), dataLine.get(2).toString().trim());
        }

        if (brainCode != null) {
            // New Brain code

            // System.out.println("brain code " + brainCode);
            headerMap.add(brainCode);
            listMap.add(mapBrain);
        }
    }

    // ===== Recognize data =====

    for (int i = 0; i < headerMap.size(); i++) {
        System.out.println("----------------------------");
        String code = headerMap.get(i);
        System.out.println(i + " " + code);
        Map<String, String> map = listMap.get(i);

        Map<String, String> updatedMap = new HashMap<String, String>();

        for (Map.Entry<String, String> entry : map.entrySet()) {
            String key = entry.getKey();
            String value = entry.getValue();

            if (!key.toLowerCase().equals("age")
                    // && !key.toLowerCase().equals("ethnicity")
                    // && !key.toLowerCase().equals("sex")
                    && !value.toLowerCase().equals("no data")) {
                updatedMap.put(key, value);
            }

            // System.out.println(key + " -> " + value);
        }

        List<Document> listDocuments = collection
                .find(Filters.and(Filters.eq("exp_group.main_gse_number", "GSE25219"),
                        Filters.eq("parameters.brain code", code)))
                .into(new ArrayList<Document>());
        System.out.println("Number of corresponding Mongo documents " + listDocuments.size());
        System.out.println(updatedMap);

        for (int j = 0; j < listDocuments.size(); j++) {
            Document doc = listDocuments.get(j);

            Document parameters = (Document) doc.get("parameters");
            parameters.putAll(updatedMap);
            System.out.println("\t" + parameters);

            // Update Mongo document
            doc.put("parameters", parameters);
            doc.put("analyzed", true);
            UpdateResult updateResult = collection.updateOne(Filters.eq("_id", doc.get("_id")),
                    new Document("$set", doc));

        }

    }

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}

From source file:module.script.pro12.TransferPro12.java

License:Open Source License

@SuppressWarnings({ "unchecked" })
public TransferPro12() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("samples");

    String sql = "select id_sample from epimed_prod.om_sample join epimed_prod.om_sample_series using (id_sample) "
            + "join epimed_prod.om_series using (id_series) where id_series='PRO12'";

    List<String> list = session.createSQLQuery(sql).list();

    Document pro12 = new Document();
    pro12.append("series", "PRO12");

    for (String gsmNumber : list) {

        Document doc = collection.find(Filters.eq("_id", gsmNumber)).first();

        System.out.println("-----------------------------");
        System.out.println(gsmNumber + " " + doc);

        if (doc != null) {
            // Update Mongo document
            collection.updateOne(Filters.eq("_id", gsmNumber), new Document("$push", pro12));
        }//from w ww.j  av a  2  s  .c o m

    }

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}

From source file:module.script.proallchen.ImportProallChenOriginal.java

License:Open Source License

public ImportProallChenOriginal() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    // ===== Samples ======

    MongoCollection<Document> collectionSamples = db.getCollection("samples");

    // ===== Excel data loader =====

    String inputfile = this.getInputDirectory() + this.getDirSeparator() + "PROALL_CHEN_clinical.xlsx";
    System.out.println("LOADING \t " + inputfile);
    ExcelService excelService = new ExcelService();
    excelService.load(inputfile);/*from w ww.ja  v  a  2 s  . c o  m*/

    System.out.println(excelService.getHeader());

    String idSeries = "PROALL_CHEN";
    List<String> listSeries = new ArrayList<String>();
    listSeries.add(idSeries);

    for (int i = 0; i < excelService.getData().size(); i++) {

        List<Object> line = excelService.getData().get(i);

        String idSample = "ESM" + line.get(0);

        System.out.println(idSample + " " + line);

        Document docSample = collectionSamples.find(Filters.eq("_id", idSample.trim())).first();

        System.out.println(docSample);

        Document parameters = (Document) docSample.get("parameters");

        for (int j = 0; j < excelService.getHeader().size(); j++) {

            String header = (String) excelService.getHeader().get(j);
            Object value = line.get(j);
            // System.out.println(header + " = " + value);

            parameters.append(header, value);

        }

        System.out.println(parameters);

        // Update Mongo document
        docSample.put("parameters", parameters);
        UpdateResult updateResult = collectionSamples.updateOne(Filters.eq("_id", docSample.get("_id")),
                new Document("$set", docSample));
    }

    mongoClient.close();
}

From source file:module.script.probcp.UpdateSamplesProbcp.java

License:Open Source License

public UpdateSamplesProbcp() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");
    MongoCollection<Document> collectionSamples = db.getCollection("samples");

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    String[] studies = { "tya16" };
    List<String> series = new ArrayList<String>();

    for (int l = 0; l < studies.length; l++) {

        String idStudy = studies[l];
        String studyName = idStudy.toUpperCase();

        series.clear();/*w  w w. ja v  a2  s.  c  o  m*/
        series.add(studyName);
        series.add("PROBCP");

        String sql = "select * from st_bcp." + idStudy + "_sample order by id_sample";

        List<Object> listSamples = session.createSQLQuery(sql).list();

        for (int i = 0; i < listSamples.size(); i++) {

            Object[] lineSample = (Object[]) listSamples.get(i);

            String idSample = (String) lineSample[0];
            String clinicalClassification = (String) lineSample[1];
            String tnmStage = (String) lineSample[2];
            Integer grade = (Integer) lineSample[3];
            String type = (String) lineSample[4];

            System.out.println(Arrays.toString(lineSample));

            String id = studyName + "_" + idSample;

            Document docSample = collectionSamples.find(Filters.eq("_id", id)).first();
            Document expgroup = (Document) docSample.get("exp_group");
            expgroup.append("tnm_grade", grade);
            expgroup.append("tnm_stage", null);
            docSample.append("exp_group", expgroup);

            UpdateResult updateResult = collectionSamples.updateOne(Filters.eq("_id", id),
                    new Document("$set", docSample));

            System.out.println(docSample);

        }

    }

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();

}

From source file:module.script.TransferBrbAnnotations.java

License:Open Source License

@SuppressWarnings({ "unused", "unchecked" })
public TransferBrbAnnotations() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("samples");

    String sql = "select id_sample, main_gse_number, string_agg(id_substance, ', ') as list_substances from epimed_prod.om_sample "
            + "join epimed_prod.cl_biopatho using (id_biopatho) join epimed_prod.cl_patient using (id_patient) join epimed_prod.cl_exposure using (id_patient) "
            + "where exposed=true group by id_sample";

    List<Object> list = session.createSQLQuery(sql).setResultTransformer(Criteria.ALIAS_TO_ENTITY_MAP).list();

    for (Object item : list) {

        Map<String, Object> map = (HashMap<String, Object>) item;

        String gsmNumber = (String) map.get("id_sample");
        String gseNumber = (String) map.get("main_gse_number");

        System.out.println("-----------------------------");
        System.out.println(gseNumber + " " + gsmNumber);

        Document doc = collection.find(Filters.eq("_id", gsmNumber)).first();

        if (doc != null) {
            Document expGroup = (Document) doc.get("exp_group");
            expGroup.put("exposure", map.get("list_substances"));
            System.out.println(expGroup);

            // Update Mongo document
            doc.put("exp_group", expGroup);
            doc.put("analyzed", true);
            UpdateResult updateResult = collection.updateOne(Filters.eq("_id", gsmNumber),
                    new Document("$set", doc));
        }/*  w  w  w  .  j  ava  2  s . c  o m*/

    }

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}

From source file:module.script.TransferBrbHistologyCodes.java

License:Open Source License

@SuppressWarnings({ "unused", "unchecked" })
public TransferBrbHistologyCodes() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("samples");

    String sql = "select id_sample, main_gse_number, index_histology_code, histology_code, ordered_histology_code from db_brb_lung.view_exp_group";

    List<Object> list = session.createSQLQuery(sql).setResultTransformer(Criteria.ALIAS_TO_ENTITY_MAP).list();

    for (Object item : list) {

        Map<String, Object> map = (HashMap<String, Object>) item;

        String gsmNumber = (String) map.get("id_sample");
        String gseNumber = (String) map.get("main_gse_number");

        System.out.println("-----------------------------");
        System.out.println(gseNumber + " " + gsmNumber);

        Document doc = collection.find(Filters.eq("_id", gsmNumber)).first();

        if (doc != null) {
            Document expGroup = (Document) doc.get("exp_group");
            expGroup.put("index_histology_code", map.get("index_histology_code"));
            expGroup.put("histology_code", map.get("histology_code"));
            expGroup.put("ordered_histology_code", map.get("ordered_histology_code"));
            System.out.println(expGroup);

            // Update Mongo document
            doc.put("exp_group", expGroup);
            doc.put("analyzed", true);
            UpdateResult updateResult = collection.updateOne(Filters.eq("_id", gsmNumber),
                    new Document("$set", doc));
        }/*from  w w  w  .  j ava  2  s  .c o m*/

    }

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}

From source file:module.script.TransferExposure.java

License:Open Source License

@SuppressWarnings({ "unused", "unchecked" })
public TransferExposure() {

    // ===== Session PostgreSQL =====
    SessionFactory sessionFactory = HibernateUtil
            .buildSessionFactory("config/epimed_semantic.hibernate.cfg.xml");
    Session session = sessionFactory.openSession();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("samples");

    String sql = "select id_sample, main_gse_number, string_agg(id_substance, ', ') as list_substances from epimed_prod.om_sample "
            + "join epimed_prod.cl_biopatho using (id_biopatho) join epimed_prod.cl_patient using (id_patient) join epimed_prod.cl_exposure using (id_patient) "
            + "where exposed=true group by id_sample";

    List<Object> list = session.createSQLQuery(sql).setResultTransformer(Criteria.ALIAS_TO_ENTITY_MAP).list();

    for (Object item : list) {

        Map<String, Object> map = (HashMap<String, Object>) item;

        String gsmNumber = (String) map.get("id_sample");
        String gseNumber = (String) map.get("main_gse_number");

        System.out.println("-----------------------------");
        System.out.println(gseNumber + " " + gsmNumber);

        Document doc = collection.find(Filters.eq("_id", gsmNumber)).first();

        if (doc != null) {
            Document expGroup = (Document) doc.get("exp_group");
            expGroup.put("exposure", map.get("list_substances"));
            System.out.println(expGroup);

            // Update Mongo document
            doc.put("exp_group", expGroup);
            doc.put("analyzed", true);
            UpdateResult updateResult = collection.updateOne(Filters.eq("_id", gsmNumber),
                    new Document("$set", doc));
        }//from  ww  w.j  av a  2s.c  o m

    }

    if (session.isOpen()) {
        session.close();
    }
    sessionFactory.close();

    mongoClient.close();
}