Example usage for com.mongodb MongoClientURI MongoClientURI

List of usage examples for com.mongodb MongoClientURI MongoClientURI

Introduction

In this page you can find the example usage for com.mongodb MongoClientURI MongoClientURI.

Prototype

public MongoClientURI(final String uri) 

Source Link

Document

Creates a MongoURI from the given string.

Usage

From source file:org.kurento.repository.internal.repoimpl.mongo.MongoConfiguration.java

License:Apache License

@Override
public Mongo mongo() throws UnknownHostException {
    return new MongoClient(new MongoClientURI(config.getMongoUrlConnection()));
}

From source file:org.lambda3.indra.core.impl.MongoVectorSpaceFactory.java

License:Open Source License

public MongoVectorSpaceFactory(String mongoURI) {
    if (mongoURI == null || mongoURI.isEmpty()) {
        throw new IllegalArgumentException("mongoURI can't be null nor empty");
    }/*from   w  ww  . j  a v a  2s.  c o m*/
    this.mongoClient = new MongoClient(new MongoClientURI(mongoURI));

    availableModels = new HashSet<>();
    for (String s : mongoClient.listDatabaseNames()) {
        availableModels.add(s);
    }
}

From source file:org.lov.cli.Mongo2RDF.java

License:Creative Commons License

@Override
protected void exec() {
    try {/*from   www .j  a  v a 2  s  .co  m*/
        //bootstrap connection to MongoDB and create model
        String uriString = "mongodb://" + lovConfig.getProperty("MONGO_DB_USER_PASSWORD") + "@"
                + lovConfig.getProperty("MONGO_DB_HOST") + ":"
                + Integer.parseInt(lovConfig.getProperty("MONGO_DB_PORT")) + "/?authSource=admin";
        MongoClientURI uri = new MongoClientURI(uriString);
        MongoClient mongoClient = new MongoClient(uri);
        @SuppressWarnings("deprecation")
        Jongo jongo = new Jongo(mongoClient.getDB(dbName));
        langCollection = jongo.getCollection("languages");
        //trillos
        pilotCollection = jongo.getCollection("pilots");
        agentCollection = jongo.getCollection("agents");
        vocabCollection = jongo.getCollection("vocabularies");
        elementCollection = jongo.getCollection("elements");
        Model lovModel = ModelFactory.createDefaultModel();
        lovModel.setNsPrefixes(LovConstants.getPrefixes());//allowed in n3 file
        StatementHelper sthlp = new StatementHelper(lovModel);
        int cpt;
        long startTime, estimatedTime;

        File file = new File(lovNQDumpFile + ".temp");
        if (file.exists())
            file.delete();
        file.createNewFile();
        File filen3 = new File(lovN3DumpFile + ".temp");
        if (filen3.exists())
            filen3.delete();
        filen3.createNewFile();

        // Process languages
        startTime = System.currentTimeMillis();
        log.info("Processing Languages");
        MongoCursor<Language> langs = langCollection.find().as(Language.class);
        cpt = 0;
        for (Language lang : langs) {
            cpt++;
            sthlp.addResourceStatement(lang.getUri(), LovConstants.RDF_FULL_TYPE,
                    LovConstants.LEXVO_FULL_LANGUAGE);
            sthlp.addLiteralStatement(lang.getUri(), LovConstants.RDFS_FULL_LABEL, lang.getLabel(), null, "en");
            sthlp.addLiteralStatement(lang.getUri(), LovConstants.LEXVO_FULL_ISO639P3PCODE,
                    lang.getIso639P3PCode(), null, null);
            sthlp.addLiteralStatement(lang.getUri(), LovConstants.LEXVO_FULL_ISO639P1CODE,
                    lang.getIso639P1Code(), null, null);
        }
        estimatedTime = System.currentTimeMillis() - startTime;
        log.info("=> " + cpt + " Languages processed in "
                + String.format("%d sec, %d ms", TimeUnit.MILLISECONDS.toSeconds(estimatedTime),
                        estimatedTime - TimeUnit.MILLISECONDS.toSeconds(estimatedTime)));

        //trillos
        // Process pilots
        startTime = System.currentTimeMillis();
        log.info("Processing Pilots");
        MongoCursor<Pilot> pilots = pilotCollection.find().as(Pilot.class);
        cpt = 0;
        for (Pilot pilot : pilots) {
            cpt++;
            sthlp.addResourceStatement(pilot.getName(), LovConstants.RDF_FULL_TYPE,
                    LovConstants.PILOTS_FULL_NAME);
            sthlp.addLiteralStatement(pilot.getName(), LovConstants.DC_TERMS_FULL_TITLE, pilot.getName(), null,
                    null);
            sthlp.addLiteralStatement(pilot.getName(), LovConstants.RDFS_COMMENT, pilot.getDescription(),
                    XSDDatatype.XSDstring, null);
            sthlp.addLiteralStatement(pilot.getName(), LovConstants.VOAF_FULL_OCCURRENCES,
                    "" + pilot.getNbOccurrences(), XSDDatatype.XSDint, null);

        }
        estimatedTime = System.currentTimeMillis() - startTime;
        log.info("=> " + cpt + " Pilots processed in "
                + String.format("%d sec, %d ms", TimeUnit.MILLISECONDS.toSeconds(estimatedTime),
                        estimatedTime - TimeUnit.MILLISECONDS.toSeconds(estimatedTime)));

        // Process Agents
        startTime = System.currentTimeMillis();
        log.info("Processing Agents");
        MongoCursor<Agent> agents = agentCollection.find().as(Agent.class);
        cpt = 0;
        for (Agent agent : agents) {
            cpt++;
            sthlp.addResourceStatement(agent.getPrefUri(), LovConstants.RDF_FULL_TYPE,
                    (agent.getType().equals("person")) ? LovConstants.FOAF_FULL_PERSON
                            : (agent.getType().equals("organization")) ? LovConstants.FOAF_FULL_ORGANIZATION
                                    : LovConstants.FOAF_FULL_AGENT);
            sthlp.addLiteralStatement(agent.getPrefUri(), LovConstants.FOAF_FULL_NAME, agent.getName(), null,
                    null);
            for (String altURI : agent.getAltUris()) {
                sthlp.addResourceStatement(agent.getPrefUri(), LovConstants.OWL_FULL_SAMEAS, altURI);
            }
        }
        estimatedTime = System.currentTimeMillis() - startTime;
        log.info("=> " + cpt + " Agents processed in "
                + String.format("%d sec, %d ms", TimeUnit.MILLISECONDS.toSeconds(estimatedTime),
                        estimatedTime - TimeUnit.MILLISECONDS.toSeconds(estimatedTime)));

        // Process Elements metrics
        startTime = System.currentTimeMillis();
        log.info("Processing Elements Metrics");
        MongoCursor<Element> elements = elementCollection.find().as(Element.class);
        cpt = 0;
        for (Element element : elements) {
            cpt++;
            sthlp.addLiteralStatement(element.getUri(), LovConstants.VOAF_FULL_OCCURRENCES_IN_DATASETS,
                    "" + element.getOccurrencesInDatasets(), XSDDatatype.XSDint, null);
            sthlp.addLiteralStatement(element.getUri(), LovConstants.VOAF_FULL_REUSED_BY_DATASETS,
                    "" + element.getReusedByDatasets(), XSDDatatype.XSDint, null);
        }
        estimatedTime = System.currentTimeMillis() - startTime;
        log.info("=> " + cpt + " Elements Metrics processed in "
                + String.format("%d sec, %d ms", TimeUnit.MILLISECONDS.toSeconds(estimatedTime),
                        estimatedTime - TimeUnit.MILLISECONDS.toSeconds(estimatedTime)));

        // Process Vocabularies
        startTime = System.currentTimeMillis();
        log.info("Processing Vocabularies");
        MongoCursor<Vocabulary> vocabs = vocabCollection.find().as(Vocabulary.class);
        cpt = 0;
        // add metadata on the LOV Catalog
        sthlp.addResourceStatement(lovDatasetURI, LovConstants.RDF_FULL_TYPE, LovConstants.DCAT_FULL_CATALOG);
        sthlp.addResourceStatement(lovDatasetURI, LovConstants.DC_TERMS_FULL_LICENSE,
                "https://creativecommons.org/licenses/by/4.0/");
        sthlp.addLiteralStatement(lovDatasetURI, LovConstants.DC_TERMS_FULL_MODIFIED, DateYMD(new Date()),
                XSDDatatype.XSDdate, null);
        sthlp.addLiteralStatement(lovDatasetURI, LovConstants.DC_TERMS_FULL_TITLE,
                "The Linked Open Vocabularies (LOV) Catalog", null, "en");
        sthlp.addLiteralStatement(lovDatasetURI, LovConstants.DC_TERMS_FULL_DESCRIPTION,
                "The LOV Catalog is a collection of RDFS and OWL ontologies designed to be reused to describe Data on the Web.",
                null, "en");
        for (Vocabulary vocab : vocabs) {
            log.info("Now processing: " + vocab.getPrefix());
            cpt++;
            String vocabUriLov = lovDatasetURI + "/vocabs/" + vocab.getPrefix();
            sthlp.addResourceStatement(vocabUriLov, LovConstants.RDF_FULL_TYPE,
                    LovConstants.DCAT_FULL_CATALOG_RECORD);
            sthlp.addResourceStatement(lovDatasetURI, LovConstants.DCAT_FULL_RECORD, vocabUriLov);
            sthlp.addLiteralStatement(vocabUriLov, LovConstants.DC_TERMS_FULL_ISSUED,
                    DateYMD(vocab.getCreatedInLOVAt()), XSDDatatype.XSDdate, null);
            sthlp.addLiteralStatement(vocabUriLov, LovConstants.DC_TERMS_FULL_MODIFIED,
                    DateYMD(vocab.getLastModifiedInLOVAt()), XSDDatatype.XSDdate, null);
            if (vocab.getTitles() != null) {
                for (LangValue lv : vocab.getTitles()) {
                    sthlp.addLiteralStatement(vocabUriLov, LovConstants.DC_TERMS_FULL_TITLE, lv.getValue(),
                            null, lv.getLang());
                }
            }
            sthlp.addResourceStatement(vocabUriLov, LovConstants.FOAF_FULL_PRIMARY_TOPIC, vocab.getUri());

            sthlp.addResourceStatement(vocab.getUri(), LovConstants.RDF_FULL_TYPE,
                    LovConstants.VOAF_FULL_VOCABULARY);
            sthlp.addLiteralStatement(vocab.getUri(), LovConstants.VANN_FULL_PREFERRED_NAMESPACE_URI,
                    vocab.getNsp(), null, null);
            sthlp.addLiteralStatement(vocab.getUri(), LovConstants.VANN_FULL_PREFERRED_NAMESPACE_PREFIX,
                    vocab.getPrefix(), null, null);
            sthlp.addLiteralStatement(vocab.getUri(), LovConstants.DC_TERMS_FULL_ISSUED,
                    DateYMD(vocab.getIssuedAt()), XSDDatatype.XSDdate, null);

            if (vocab.getTitles() != null) {
                for (LangValue lv : vocab.getTitles()) {
                    sthlp.addLiteralStatement(vocab.getUri(), LovConstants.DC_TERMS_FULL_TITLE, lv.getValue(),
                            null, lv.getLang());
                }
            }
            if (vocab.getDescriptions() != null) {
                for (LangValue lv : vocab.getDescriptions()) {
                    sthlp.addLiteralStatement(vocab.getUri(), LovConstants.DC_TERMS_FULL_DESCRIPTION,
                            lv.getValue(), null, lv.getLang());
                }
            }
            if (vocab.getPilots() != null) {
                for (String pilot : vocab.getPilots()) {
                    sthlp.addLiteralStatement(vocab.getUri(), LovConstants.LOV_PILOT, pilot, null, null);
                }
            }
            if (vocab.getTags() != null) {
                for (String tag : vocab.getTags()) {
                    sthlp.addLiteralStatement(vocab.getUri(), LovConstants.DCAT_FULL_KEYWORD, tag, null, null);
                }
            }

            sthlp.addLiteralStatement(vocab.getUri(), LovConstants.FOAF_FULL_HOMEPAGE, vocab.getHomepage(),
                    null, null);
            sthlp.addResourceStatement(vocab.getUri(), LovConstants.RDFS_FULL_IS_DEFINED_BY,
                    vocab.getIsDefinedBy());

            if (vocab.getCreatorIds() != null) {
                for (String agentId : vocab.getCreatorIds()) {
                    Agent agent = agentCollection.findOne("{_id:#}", new ObjectId(agentId)).as(Agent.class);
                    sthlp.addResourceStatement(vocab.getUri(), LovConstants.DC_TERMS_FULL_CREATOR,
                            agent.getPrefUri());
                }
            }
            if (vocab.getContributorIds() != null) {
                for (String agentId : vocab.getContributorIds()) {
                    Agent agent = agentCollection.findOne("{_id:#}", new ObjectId(agentId)).as(Agent.class);
                    sthlp.addResourceStatement(vocab.getUri(), LovConstants.DC_TERMS_FULL_CONTRIBUTOR,
                            agent.getPrefUri());
                }
            }
            if (vocab.getPublisherIds() != null) {
                for (String agentId : vocab.getPublisherIds()) {
                    Agent agent = agentCollection.findOne("{_id:#}", new ObjectId(agentId)).as(Agent.class);
                    sthlp.addResourceStatement(vocab.getUri(), LovConstants.DC_TERMS_FULL_PUBLISHER,
                            agent.getPrefUri());
                }
            }

            if (vocab.getReviews() != null) {
                for (Comment rev : vocab.getReviews()) {
                    Resource bn = ResourceFactory.createResource();
                    sthlp.addResourceStatement(vocab.getUri(), LovConstants.REV_FULL_HAS_REVIEW, bn);
                    sthlp.addResourceStatement(bn, LovConstants.RDF_FULL_TYPE, LovConstants.REV_FULL_REVIEW);
                    sthlp.addLiteralStatement(bn, LovConstants.REV_FULL_TEXT, rev.getBody(), null, null);
                    sthlp.addLiteralStatement(bn, LovConstants.DC_TERMS_DATE, DateYMD(rev.getCreatedAt()),
                            XSDDatatype.XSDdate, null);

                    Agent agent = agentCollection.findOne("{_id:#}", new ObjectId(rev.getAgentId()))
                            .as(Agent.class);
                    sthlp.addResourceStatement(bn, LovConstants.DC_TERMS_CREATOR, agent.getPrefUri());
                }
            }

            if (vocab.getDatasets() != null) {
                int sumOccurences = 0;
                for (org.lov.objects.Dataset dataset : vocab.getDatasets()) {
                    Resource bn_datasetOcc = ResourceFactory.createResource();
                    sthlp.addResourceStatement(vocab.getUri(), LovConstants.VOAF_FULL_USAGE_IN_DATASET,
                            bn_datasetOcc);
                    sthlp.addResourceStatement(bn_datasetOcc, LovConstants.RDF_FULL_TYPE,
                            LovConstants.VOAF_FULL_DATASET_OCCURRENCES);
                    sthlp.addResourceStatement(bn_datasetOcc, LovConstants.VOAF_FULL_IN_DATASET,
                            dataset.getUri());
                    sthlp.addLiteralStatement(bn_datasetOcc, LovConstants.VOAF_FULL_OCCURRENCES,
                            "" + dataset.getOccurrences(), XSDDatatype.XSDint, null);
                    sthlp.addLiteralStatement(dataset.getUri(), LovConstants.RDFS_LABEL, dataset.getLabel(),
                            null, "en");
                    sthlp.addLiteralStatement(bn_datasetOcc, LovConstants.DC_TERMS_DATE,
                            DateYMD(dataset.getCreatedAt()), XSDDatatype.XSDdate, null);
                    sumOccurences += dataset.getOccurrences();
                }
                //attach aggregation metrics
                sthlp.addLiteralStatement(vocab.getUri(), LovConstants.VOAF_FULL_OCCURRENCES_IN_DATASETS,
                        "" + sumOccurences, XSDDatatype.XSDint, null);
                sthlp.addLiteralStatement(vocab.getUri(), LovConstants.VOAF_FULL_REUSED_BY_DATASETS,
                        "" + vocab.getDatasets().size(), XSDDatatype.XSDint, null);

                //TODO occurrences in vocabularies ?? OR not ...
                // reused by vocabularies
                long reusedByVocabs = vocabCollection.count(
                        "{$or: [" + "{'versions.relMetadata':#}, " + "{'versions.relSpecializes':#}, "
                                + "{'versions.relGeneralizes':#}, " + "{'versions.relExtends':#}, "
                                + "{'versions.relEquivalent':#}, " + "{'versions.relDisjunc':#}, "
                                + "{'versions.relImports':#} ]}",
                        vocab.getNsp(), vocab.getNsp(), vocab.getNsp(), vocab.getNsp(), vocab.getNsp(),
                        vocab.getNsp(), vocab.getUri());
                sthlp.addLiteralStatement(vocab.getUri(), LovConstants.VOAF_FULL_REUSED_BY_VOCABULARIES,
                        "" + reusedByVocabs, XSDDatatype.XSDint, null);
            }

            //            if(vocab.getPrefix().equals("gndo")){
            //               log.debug(vocab.getPrefix());
            //            }
            if (vocab.getVersions() != null) {
                Collections.sort(vocab.getVersions());
                for (int i = 0; i < vocab.getVersions().size(); i++) {
                    VocabularyVersionWrapper version = vocab.getVersions().get(i);
                    // handle the case we don't have any file for the version --> blank node as subject
                    Resource versionURI = (version.getFileURL() != null)
                            ? ResourceFactory.createResource(version.getFileURL())
                            : ResourceFactory.createResource();
                    if (i == 0 && version.getFileURL() != null) { // <-- this is the latest version
                        //add the version in the dataset in its own namedgraph
                        Dataset dataset = DatasetFactory.createMem();
                        //                     Model m = ModelFactory.createDefaultModel();
                        //                     JenaReader jr = m.getReader(Lang.N3.getLabel());
                        //                     RDFReader rdr=m.getReader(Lang.N3.getLabel());
                        //                     rdr.
                        //                      rdr.setErrorHandler(new RDFErrorHandler() {
                        //                        
                        //                        @Override
                        //                        public void warning(Exception arg0) {
                        //                           log.error("warning$$$$$: "+arg0.getMessage());
                        //                           System.out.println("warning$$$$$: "+arg0.getMessage());
                        //                        }
                        //                        
                        //                        @Override
                        //                        public void fatalError(Exception arg0) {
                        //                           log.error("fatalError$$$$$: "+arg0.getMessage());
                        //                           
                        //                        }
                        //                        
                        //                        @Override
                        //                        public void error(Exception arg0) {
                        //                           log.error("error$$$$$: "+arg0.getMessage());
                        //                           
                        //                        }
                        //                     });
                        //                      rdr.read(m,version.getFileURL());

                        //                           RDFDataMgr.read(m, version.getFileURL(), Lang.N3);

                        try {
                            dataset.addNamedModel(vocab.getUri(),
                                    RDFDataMgr.loadModel(version.getFileURL(), Lang.N3));
                        } catch (Exception e) {
                            log.error("Error accessing and or parsing vocabulary version :" + vocab.getUri()
                                    + " - " + version.getFileURL());
                            log.error(e.getMessage());
                        }

                        OutputStream fop = new BufferedOutputStream(new FileOutputStream(file, true));
                        RDFDataMgr.write(fop, dataset, Lang.NQ);
                        fop.close();
                        dataset.close();

                        sthlp.addLiteralStatement(vocab.getUri(), LovConstants.DC_TERMS_FULL_MODIFIED,
                                DateYMD(version.getIssued()), XSDDatatype.XSDdate, null);
                        if (version.getLanguageIds() != null) {
                            for (String langId : version.getLanguageIds()) {
                                if (langId != null && !langId.contains("null")) {
                                    try {
                                        Language lang = langCollection.findOne("{_id:#}", new ObjectId(langId))
                                                .as(Language.class);
                                        sthlp.addResourceStatement(vocab.getUri(),
                                                LovConstants.DC_TERMS_FULL_LANGUAGE, lang.getUri());
                                    } catch (Exception e) {
                                        log.error("problem with the lang: " + e.getMessage());
                                    }
                                }
                            }
                        }
                    }
                    sthlp.addResourceStatement(versionURI, LovConstants.RDF_FULL_TYPE,
                            LovConstants.DCAT_FULL_DISTRIBUTION_CLASS);
                    sthlp.addResourceStatement(vocab.getUri(), LovConstants.DCAT_FULL_DISTRIBUTION_PROP,
                            versionURI);
                    sthlp.addLiteralStatement(versionURI, LovConstants.DC_TERMS_FULL_TITLE, version.getName(),
                            null, null);
                    sthlp.addLiteralStatement(versionURI, LovConstants.DC_TERMS_FULL_ISSUED,
                            DateYMD(version.getIssued()), XSDDatatype.XSDdate, null);

                    if (version.getFileURL() != null) {
                        sthlp.addLiteralStatement(versionURI, LovConstants.VOAF_FULL_CLASS_NUMBER,
                                "" + version.getClassNumber(), XSDDatatype.XSDint, null);
                        sthlp.addLiteralStatement(versionURI, LovConstants.VOAF_FULL_PROPERTY_NUMBER,
                                "" + version.getPropertyNumber(), XSDDatatype.XSDint, null);
                        sthlp.addLiteralStatement(versionURI, LovConstants.VOAF_FULL_DATATYPE_NUMBER,
                                "" + version.getDatatypeNumber(), XSDDatatype.XSDint, null);
                        sthlp.addLiteralStatement(versionURI, LovConstants.VOAF_FULL_INSTANCE_NUMBER,
                                "" + version.getInstanceNumber(), XSDDatatype.XSDint, null);
                    }
                    if (version.getLanguageIds() != null) {
                        for (String langId : version.getLanguageIds()) {
                            if (langId != null && !langId.contains("null")) {
                                try {
                                    Language lang = langCollection.findOne("{_id:#}", new ObjectId(langId))
                                            .as(Language.class);
                                    sthlp.addResourceStatement(versionURI, LovConstants.DC_TERMS_FULL_LANGUAGE,
                                            lang.getUri());
                                } catch (Exception e) {
                                    log.error("problem with the lang: " + e.getMessage());
                                }
                            }

                        }
                    }
                    if (version.getRelDisjunc() != null) {
                        for (String relNsp : version.getRelDisjunc()) {
                            Vocabulary relVocab = vocabCollection.findOne("{nsp:#}", relNsp)
                                    .as(Vocabulary.class);
                            sthlp.addResourceStatement(versionURI, LovConstants.VOAF_FULL_HAS_DISJUNCTIONS_WITH,
                                    (relVocab != null ? relVocab.getUri() : relNsp));
                        }
                    }
                    if (version.getRelEquivalent() != null) {
                        for (String relNsp : version.getRelEquivalent()) {
                            Vocabulary relVocab = vocabCollection.findOne("{nsp:#}", relNsp)
                                    .as(Vocabulary.class);
                            sthlp.addResourceStatement(versionURI, LovConstants.VOAF_FULL_HAS_EQUIVALENCES_WITH,
                                    (relVocab != null ? relVocab.getUri() : relNsp));
                        }
                    }
                    if (version.getRelExtends() != null) {
                        for (String relNsp : version.getRelExtends()) {
                            Vocabulary relVocab = vocabCollection.findOne("{nsp:#}", relNsp)
                                    .as(Vocabulary.class);
                            sthlp.addResourceStatement(versionURI, LovConstants.VOAF_FULL_EXTENDS,
                                    (relVocab != null ? relVocab.getUri() : relNsp));
                        }
                    }
                    if (version.getRelGeneralizes() != null) {
                        for (String relNsp : version.getRelGeneralizes()) {
                            Vocabulary relVocab = vocabCollection.findOne("{nsp:#}", relNsp)
                                    .as(Vocabulary.class);
                            sthlp.addResourceStatement(versionURI, LovConstants.VOAF_FULL_GENERALIZES,
                                    (relVocab != null ? relVocab.getUri() : relNsp));
                        }
                    }
                    if (version.getRelImports() != null) {
                        for (String relURI : version.getRelImports()) {
                            sthlp.addResourceStatement(versionURI, LovConstants.OWL_FULL_IMPORTS, relURI);
                        }
                    }
                    if (version.getRelMetadata() != null) {
                        for (String relNsp : version.getRelMetadata()) {
                            Vocabulary relVocab = vocabCollection.findOne("{nsp:#}", relNsp)
                                    .as(Vocabulary.class);
                            sthlp.addResourceStatement(versionURI, LovConstants.VOAF_FULL_METADATA_VOC,
                                    (relVocab != null ? relVocab.getUri() : relNsp));
                        }
                    }
                    if (version.getRelSpecializes() != null) {
                        for (String relNsp : version.getRelSpecializes()) {
                            Vocabulary relVocab = vocabCollection.findOne("{nsp:#}", relNsp)
                                    .as(Vocabulary.class);
                            sthlp.addResourceStatement(versionURI, LovConstants.VOAF_FULL_SPECIALIZES,
                                    (relVocab != null ? relVocab.getUri() : relNsp));
                        }
                    }
                }
            }
            if (cpt % 100 == 0) {
                estimatedTime = System.currentTimeMillis() - startTime;
                log.info("=> " + cpt + " Vocabularies processed in " + String.format("%d min, %d sec",
                        TimeUnit.MILLISECONDS.toMinutes(estimatedTime),
                        TimeUnit.MILLISECONDS.toSeconds(estimatedTime)
                                - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(estimatedTime))));
            }
        }
        estimatedTime = System.currentTimeMillis() - startTime;
        log.info("=> " + cpt + " Vocabularies processed in "
                + String.format("%d min, %d sec", TimeUnit.MILLISECONDS.toMinutes(estimatedTime),
                        TimeUnit.MILLISECONDS.toSeconds(estimatedTime)
                                - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(estimatedTime))));

        //Add LOV namedgraph to Dataset and write to file
        Dataset lovDataset = DatasetFactory.createMem();
        lovDataset.addNamedModel(lovDatasetURI, lovModel);
        OutputStream fop = new BufferedOutputStream(new FileOutputStream(file, true));
        RDFDataMgr.write(fop, lovDataset, Lang.NQ);
        OutputStream fopn3 = new BufferedOutputStream(new FileOutputStream(filen3));
        RDFDataMgr.write(fopn3, lovModel, Lang.N3);
        fop.close();
        fopn3.close();
        lovDataset.close();

        //when here, it means there has been no blocking error, we can then replace the existing files
        File filenqProd = new File(lovNQDumpFile);
        if (filenqProd.exists())
            filenqProd.delete();
        File filen3Prod = new File(lovN3DumpFile);
        if (filen3Prod.exists())
            filen3Prod.delete();
        boolean success = file.renameTo(filenqProd);
        if (!success) {
            log.error("Not been able to replace the nq file");
        }
        success = filen3.renameTo(filen3Prod);
        if (!success) {
            log.error("Not been able to replace the n3 file");
        }

        gzipIt(filenqProd);
        gzipIt(filen3Prod);

        log.info("---Done---");

        //mongoClient.close();

    } catch (UnknownHostException e) {
        log.error(e.getMessage());
    } catch (NotFoundException ex) {
        log.error("Not found: " + ex.getMessage());
    } catch (LOVException ex) {
        log.error(ex.getMessage());
    } catch (IOException e) {
        log.error(e.getMessage());
    } catch (Exception e) {
        log.error(e.getMessage());
    }
}

From source file:org.mongodb.tse.tests.RunQueryThreadPool.java

public static void main(String[] args) {
    Option help = Option.builder("help").argName("help").desc("get help").build();
    Option ouri = Option.builder("uri").argName("uri").desc("mongodb uri, required").hasArg().type(String.class)
            .build();//from w  ww  .j  a v a2s  .c  o m
    Option odatabase = Option.builder("database").argName("database")
            .desc("mongodb database, default productpersistdb").hasArg().type(String.class).build();
    Option ocollection = Option.builder("collection").argName("collection")
            .desc("mongodb collection, default product").hasArg().type(String.class).build();
    Option osleep = Option.builder("sleep").argName("sleep").desc("sleep between runs, default 10 seconds")
            .hasArg().type(Integer.class).build();
    Option othreads = Option.builder("threads").argName("threads").desc("number of threads to run, default 5")
            .hasArg().type(Integer.class).build();
    Option readPreference = Option.builder("readPreference").argName("readPreference")
            .desc("read preference, default is secondaryPreferred").hasArg().type(String.class).build();
    Option oids = Option.builder("ids").argName("ids").desc("list of comma separated ids").hasArg()
            .type(String.class).build();
    Option oidFile = Option.builder("idFile").argName("idFile").desc("file containing ids per line").hasArg()
            .type(String.class).build();
    Option oincludeslow = Option.builder("includeslow").argName("includeslow")
            .desc("run slow query that will pause 1 second for every document in collection").build();
    Option oincreasethreads = Option.builder("increasethreads").argName("increasethreads")
            .desc("increase thread count every second until this number").hasArg().type(Integer.class).build();

    Options options = new Options();
    options.addOption(help);
    options.addOption(ouri);
    options.addOption(odatabase);
    options.addOption(ocollection);
    options.addOption(osleep);
    options.addOption(othreads);
    options.addOption(readPreference);
    options.addOption(oids);
    options.addOption(oidFile);
    options.addOption(oincludeslow);
    options.addOption(oincreasethreads);

    CommandLineParser parser = new DefaultParser();
    CommandLine cline = null;
    try {
        // parse the command line arguments
        cline = parser.parse(options, args);
    } catch (ParseException exp) {
        // oops, something went wrong
        System.err.println("Parsing failed.  Reason: " + exp.getMessage());
    }

    if (args.length == 0 || cline.hasOption("help") || !cline.hasOption("uri")) {
        printHelp(options);
    }

    final String[] ids = parseIdFile(cline);

    String uriParameter = cline.getOptionValue("uri");
    String databaseParameter = cline.getOptionValue("database", "productpersistdb");
    String collectionParameter = cline.getOptionValue("collection", "product");
    System.out.println("Using database: " + databaseParameter + " and collection: " + collectionParameter);

    MongoClientURI uri = new MongoClientURI(uriParameter);
    MongoClient mongoClient = new MongoClient(uri);

    MongoDatabase database = mongoClient.getDatabase(databaseParameter);
    final MongoCollection<Document> collection = getCollection(cline, database, collectionParameter);

    long tsleep = 10000;
    if (cline.hasOption("sleep"))
        tsleep = Integer.parseInt(cline.getOptionValue("sleep")) * 1000;
    final long sleep = tsleep;
    int threads = 5;
    if (cline.hasOption("threads"))
        threads = Integer.parseInt(cline.getOptionValue("threads"));

    int max = ids.length;
    boolean includeslow = cline.hasOption("includeslow");
    ExecutorService pool = Executors.newCachedThreadPool();
    for (int i = 0; i < threads; i++) {
        pool.execute(new Runnable() {
            public void run() {
                int count = 0;
                for (;;) {
                    String id = ids[(count % max)];
                    Document doc = null;
                    RawBsonDocument raw = null;

                    Date date = new Date();
                    long end = 0L;
                    long start = System.nanoTime();

                    try {
                        if (includeslow
                        //&& ( count % 2 ) == 0 
                        ) {
                            FindIterable<Document> fit = collection
                                    .find(where("function() { "
                                            + "var d = new Date((new Date()).getTime() + 1*1000); "
                                            + "while ( d > (new Date())) { }; " + "return true;" + "}"))
                                    .limit(100);
                            int dcount = 0;
                            for (Document d : fit) {
                                dcount++;
                            }
                            System.out.println(
                                    String.format("%s - slow query, count:%s, start: %s, elasped: %s ns",
                                            Thread.currentThread().getName(), dcount, date, (end - start)));
                        } else {
                            doc = collection
                                    .find(and(eq("_id", id), where("function() { "
                                            + "var d = new Date((new Date()).getTime() + 1*1000); "
                                            + "while ( d > (new Date())) { }; " + "return true;" + "}")))
                                    .first();
                            end = System.nanoTime();
                            if (doc == null)
                                System.out.println("Could not find " + id);
                            System.out.println(String.format("%s - id: %s, start: %s, elasped: %s ns",
                                    Thread.currentThread().getName(), id, date, (end - start)));
                        }
                    } catch (Exception e) {
                        System.out.println("Got an exception: " + e.getMessage());
                        e.printStackTrace();
                        try {
                            Thread.sleep(1000);
                        } catch (InterruptedException e2) {
                        }
                    }

                    //try { Thread.sleep(sleep); } catch ( InterruptedException e ) {}
                    count++;

                }

            }
        });
    }
    if (cline.hasOption("increasethreads")) {
        int increaseThreads = Integer.parseInt(cline.getOptionValue("increasethreads"));
        for (int i = threads; i < increaseThreads; i++) {
            try {
                Thread.sleep(1000);
            } catch (InterruptedException e) {
            }
            pool.execute(new Runnable() {
                public void run() {
                    int count = 0;
                    for (;;) {
                        String id = ids[(count % max)];
                        Document doc = null;
                        RawBsonDocument raw = null;

                        Date date = new Date();
                        long end = 0L;
                        long start = System.nanoTime();

                        try {
                            if (includeslow
                            //&& ( count % 2 == 0 ) 
                            ) {
                                FindIterable<Document> fit = collection
                                        .find(where("function() { "
                                                + "var d = new Date((new Date()).getTime() + 1*1000); "
                                                + "while ( d > (new Date())) { }; " + "return true;" + "}"))
                                        .limit(100);
                                int dcount = 0;
                                for (Document d : fit) {
                                    dcount++;
                                }
                                System.out.println(
                                        String.format("%s - slow query, count:%s, start: %s, elasped: %s ns",
                                                Thread.currentThread().getName(), dcount, date, (end - start)));
                            } else {
                                doc = collection
                                        .find(and(eq("_id", id), where("function() { "
                                                + "var d = new Date((new Date()).getTime() + 1*1000); "
                                                + "while ( d > (new Date())) { }; " + "return true;" + "}")))
                                        .first();
                                end = System.nanoTime();
                                if (doc == null)
                                    System.out.println("Could not find " + id);
                                System.out.println(String.format("%s - id: %s, start: %s, elasped: %s ns",
                                        Thread.currentThread().getName(), id, date, (end - start)));
                            }
                        } catch (Exception e) {
                            System.out.println("Got an exception: " + e.getMessage());
                            e.printStackTrace();
                            try {
                                Thread.sleep(1000);
                            } catch (InterruptedException e2) {
                            }
                        }

                        //try { Thread.sleep(sleep); } catch ( InterruptedException e ) {}
                        count++;

                    }

                }
            });
        }
    }
}

From source file:org.mongodb.tse.tests.Timings.java

public static void main(String[] args) throws ParseException {

    Option help = Option.builder("help").argName("help").desc("get help").build();
    Option test = Option.builder("test").argName("test").desc("quick test").build();
    Option ouri = Option.builder("uri").argName("uri").desc("mongodb uri, required").hasArg().type(String.class)
            .build();/* ww w  . j av a 2  s .  c  o  m*/
    Option odatabase = Option.builder("database").argName("database")
            .desc("mongodb database, default productpersistdb").hasArg().type(String.class).build();
    Option ocollection = Option.builder("collection").argName("collection")
            .desc("mongodb collection, default product").hasArg().type(String.class).build();
    Option osleep = Option.builder("sleep").argName("sleep").desc("sleep between runs, default 10 seconds")
            .hasArg().type(Integer.class).build();
    Option otimes = Option.builder("times").argName("times").desc("number of times to run, default 100")
            .hasArg().type(Integer.class).build();
    Option readPreference = Option.builder("readPreference").argName("readPreference")
            .desc("read preference, default is secondaryPreferred").hasArg().type(String.class).build();
    Option oids = Option.builder("ids").argName("ids").desc("list of comma separated ids").hasArg()
            .type(String.class).build();
    Option oidFile = Option.builder("idFile").argName("idFile").desc("file containing ids per line").hasArg()
            .type(String.class).build();
    Option odoc = Option.builder("doc").argName("doc")
            .desc("get a Document instead of RawBsonDocument, no size output with this option").build();

    Options options = new Options();
    options.addOption(help);
    options.addOption(test);
    options.addOption(ouri);
    options.addOption(odatabase);
    options.addOption(ocollection);
    options.addOption(osleep);
    options.addOption(otimes);
    options.addOption(readPreference);
    options.addOption(oids);
    options.addOption(oidFile);
    options.addOption(odoc);

    CommandLineParser parser = new DefaultParser();
    CommandLine cline = null;
    try {
        // parse the command line arguments
        cline = parser.parse(options, args);
    } catch (ParseException exp) {
        // oops, something went wrong
        System.err.println("Parsing failed.  Reason: " + exp.getMessage());
    }

    if (args.length == 0 || cline.hasOption("help") || !cline.hasOption("uri")) {
        printHelp(options);
    }

    if (cline.hasOption("test")) {
        List<Double> testList = new ArrayList<Double>();
        for (int i = 0; i < 100; i++) {
            testList.add(new Double(i));
        }
        Collections.sort(testList);
        System.out.println(String.format("P50: %.2f, P75: %.2f, P90: %.2f, P95: %.2f, P99: %.2f",
                percentile(testList, 0.50), percentile(testList, 0.75), percentile(testList, 0.90),
                percentile(testList, 0.95), percentile(testList, 0.99)));
        System.exit(0);
    }

    String[] ids = null;
    if (cline.hasOption("idFile")) {
        ids = parseIdFile(new File(cline.getOptionValue("idFile")));
    } else
        ids = cline.getOptionValue("ids", "517886481000").split(",");

    List<Double> timeList = new ArrayList<>();

    String uriParameter = cline.getOptionValue("uri");
    String databaseParameter = cline.getOptionValue("database", "productpersistdb");
    String collectionParameter = cline.getOptionValue("collection", "product");
    System.out.println("Using database: " + databaseParameter + " and collection: " + collectionParameter);

    MongoClientURI uri = new MongoClientURI(uriParameter);
    MongoClient mongoClient = new MongoClient(uri);

    MongoDatabase database = mongoClient.getDatabase(databaseParameter);
    MongoCollection<Document> collection = null;
    MongoCollection<RawBsonDocument> rawCollection = null;

    boolean doDoc = cline.hasOption("doc");
    if (doDoc) {
        if (cline.hasOption("readPreference")) {
            String p = cline.getOptionValue("readPreference");
            collection = database.getCollection(collectionParameter)
                    .withReadPreference(ReadPreference.valueOf(p));
        } else
            collection = database.getCollection(collectionParameter)
                    .withReadPreference(ReadPreference.secondaryPreferred());
    } else {
        if (cline.hasOption("readPreference")) {
            String p = cline.getOptionValue("readPreference");
            rawCollection = database.getCollection(collectionParameter, RawBsonDocument.class)
                    .withReadPreference(ReadPreference.valueOf(p));
        } else
            rawCollection = database.getCollection(collectionParameter, RawBsonDocument.class)
                    .withReadPreference(ReadPreference.secondaryPreferred());
    }

    long sleep = 10000;
    if (cline.hasOption("sleep"))
        sleep = Integer.parseInt(cline.getOptionValue("sleep")) * 1000;
    int times = 100;
    if (cline.hasOption("times"))
        times = Integer.parseInt(cline.getOptionValue("times"));

    int count = 0;
    int max = ids.length;
    while (count < times) {
        String id = ids[(count % max)];
        Document doc = null;
        RawBsonDocument raw = null;

        Date date = new Date();
        long end = 0L;
        long start = System.nanoTime();
        if (doDoc) {
            doc = collection.find(eq("_id", id)).first();
            end = System.nanoTime();
            if (doc == null)
                System.out.println("Could not find " + id);
        } else {
            raw = rawCollection.find(eq("_id", id)).first();
            end = System.nanoTime();
            if (raw == null)
                System.out.println("Could not find " + id);
        }

        int size = 0;
        if (raw != null)
            size = raw.getByteBuffer().capacity();

        if (raw != null) {
            System.out.println(String.format("id: %s, start: %s, elasped: %s ns, docSize: %s", id, date,
                    (end - start), size));
        } else {
            System.out.println(String.format("id: %s, start: %s, elasped: %s ns", id, date, (end - start)));
        }
        timeList.add(new Double(end - start));
        try {
            Thread.sleep(sleep);
        } catch (InterruptedException e) {
        }
        count++;
    }

    Collections.sort(timeList);

    System.out.println(String.format("P50: %.2f, P75: %.2f, P90: %.2f, P95: %.2f, P99: %.2f",
            percentile(timeList, 0.50), percentile(timeList, 0.75), percentile(timeList, 0.90),
            percentile(timeList, 0.95), percentile(timeList, 0.99)));
}

From source file:org.netbeans.modules.mongodb.api.connections.ConnectionInfo.java

License:Open Source License

public MongoClientURI getMongoURI() {
    return new MongoClientURI(uri);
}

From source file:org.netbeans.modules.mongodb.ConnectionInfo.java

License:Open Source License

public MongoClientURI getMongoURI() {
    return new MongoClientURI(node.get(PREFS_KEY_URI, DEFAULT_URI));
}

From source file:org.netbeans.modules.mongodb.properties.MongoClientURIPropertyEditor.java

License:Open Source License

@Override
public void setAsText(String uri) throws IllegalArgumentException {
    setValue(new MongoClientURI(uri.trim()));
}

From source file:org.netbeans.modules.mongodb.ui.components.MongoURIEditorPanel.java

License:Open Source License

public MongoURIEditorPanel(MongoClientURI uri) {
    initComponents();//from   w w  w . j  a  v a2 s .c o  m
    final DocumentListener hostValidationListener = new AbstractDocumentListener() {

        @Override
        protected void onChange(DocumentEvent e) {
            addHostButton.setEnabled(hostField.getText().isEmpty() == false);
        }
    };
    hostField.getDocument().addDocumentListener(hostValidationListener);
    final DocumentListener optionValidationListener = new AbstractDocumentListener() {

        @Override
        protected void onChange(DocumentEvent e) {
            final String optionName = optionNameField.getText();
            if (e.getDocument() == optionNameField.getDocument()) {
                if (optionName.isEmpty() == false && MongoUtil.isSupportedOption(optionName) == false) {
                    warn("unsupported option");
                } else {
                    updateURIField();
                }
            }
            addOptionButton
                    .setEnabled(optionName.isEmpty() == false && optionValueField.getText().isEmpty() == false);
        }
    };
    optionNameField.getDocument().addDocumentListener(optionValidationListener);
    optionValueField.getDocument().addDocumentListener(optionValidationListener);
    final DocumentListener fireChangeDocumentListener = new AbstractDocumentListener() {

        @Override
        protected void onChange(DocumentEvent e) {
            updateURIField();
        }

    };
    usernameField.getDocument().addDocumentListener(fireChangeDocumentListener);
    passwordField.getDocument().addDocumentListener(fireChangeDocumentListener);
    databaseField.getDocument().addDocumentListener(fireChangeDocumentListener);
    uriField.getDocument().addDocumentListener(new AbstractDocumentListener() {

        @Override
        protected void onChange(DocumentEvent e) {
            synchronized (lock) {
                if (uriUserInput || uriFieldUpdate) {
                    return;
                }
                uriUserInput = true;
            }
            try {
                setMongoURI(new MongoClientURI(uriField.getText().trim()));
            } catch (IllegalArgumentException ex) {
                error(ex.getLocalizedMessage());
            }
            uriUserInput = false;

        }
    });
    if (uri != null) {
        setMongoURI(uri);
    }
}

From source file:org.netbeans.modules.mongodb.ui.components.MongoURIEditorPanel.java

License:Open Source License

private void updateURIField() {
    synchronized (lock) {
        if (uriUserInput || uriFieldUpdate) {
            return;
        }//from  www. j  a va2s.c  o m
        uriFieldUpdate = true;
    }
    final String uri = computeMongoURIString();
    try {
        uriField.setText(new MongoClientURI(uri).getURI());
        clearNotificationLineSupport();
    } catch (IllegalArgumentException ex) {
        error(ex.getLocalizedMessage());
    }
    changeSupport.fireChange();
    uriFieldUpdate = false;
}