Example usage for com.mongodb.client MongoDatabase drop

List of usage examples for com.mongodb.client MongoDatabase drop

Introduction

In this page you can find the example usage for com.mongodb.client MongoDatabase drop.

Prototype

void drop();

Source Link

Document

Drops this database.

Usage

From source file:bariopendatalab.db.DBAccess.java

public void dropDB() {
    MongoDatabase database = client.getDatabase(DBNAME);
    database.drop();
}

From source file:br.ufg.inf.es.saep.sandbox.persistencia.view.StartSaep.java

License:Creative Commons License

/**
 * Mtodo de inicializao do sistema.//from  w w  w.  j a  v  a  2s.  co m
 *
 * @param args the command line arguments
 */
public static void main(String[] args) {
    MongoClient cliente = new MongoClient();
    MongoDatabase mongoDatabase = cliente.getDatabase("Saep");

    InterfaceRadocDAO radocDAO = new BasicRadocDAO(mongoDatabase, "Radoc");
    InterfaceTipoDAO tipoDAO = new BasicTipoDAO(mongoDatabase, "Tipo");
    InterfaceResolucaoDAO resolucaoDAO = new BasicResolucaoDAO(mongoDatabase, "Resolucao");
    InterfaceParecerDAO parecerDAO = new BasicParecerDAO(mongoDatabase, "Parecer");

    mongoDatabase.drop();
    System.out.println("Fim do programa");
}

From source file:com.bluedragon.mongo.MongoDatabaseDrop.java

License:Open Source License

public cfData execute(cfSession _session, cfArgStructData argStruct) throws cfmRunTimeException {
    MongoDatabase db = getMongoDatabase(_session, argStruct);

    try {/* w ww. j a v  a2  s  .c  o  m*/
        db.drop();
        return cfBooleanData.TRUE;
    } catch (MongoException me) {
        throwException(_session, me.getMessage());
        return null;
    }
}

From source file:documentation.ChangeStreamSamples.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args takes an optional single argument for the connection string
 *//*ww  w .j a  v  a 2 s .  c  om*/
public static void main(final String[] args) {
    MongoClient mongoClient;

    if (args.length == 0) {
        // connect to the local database server
        mongoClient = MongoClients.create("mongodb://localhost:27017,localhost:27018,localhost:27019");
    } else {
        mongoClient = MongoClients.create(args[0]);
    }

    // Select the MongoDB database.
    MongoDatabase database = mongoClient.getDatabase("testChangeStreams");
    database.drop();
    sleep();

    // Select the collection to query.
    MongoCollection<Document> collection = database.getCollection("documents");

    /*
     * Example 1
     * Create a simple change stream against an existing collection.
     */
    System.out.println("1. Initial document from the Change Stream:");

    // Create the change stream cursor.
    MongoChangeStreamCursor<ChangeStreamDocument<Document>> cursor = collection.watch().cursor();

    // Insert a test document into the collection.
    collection.insertOne(Document.parse("{username: 'alice123', name: 'Alice'}"));
    ChangeStreamDocument<Document> next = cursor.next();
    System.out.println(next);
    cursor.close();
    sleep();

    /*
     * Example 2
     * Create a change stream with 'lookup' option enabled.
     * The test document will be returned with a full version of the updated document.
     */
    System.out.println("2. Document from the Change Stream, with lookup enabled:");

    // Create the change stream cursor.
    cursor = collection.watch().fullDocument(FullDocument.UPDATE_LOOKUP).cursor();

    // Update the test document.
    collection.updateOne(Document.parse("{username: 'alice123'}"),
            Document.parse("{$set : { email: 'alice@example.com'}}"));

    // Block until the next result is returned
    next = cursor.next();
    System.out.println(next);
    cursor.close();
    sleep();

    /*
     * Example 3
     * Create a change stream with 'lookup' option using a $match and ($redact or $project) stage.
     */
    System.out.println(
            "3. Document from the Change Stream, with lookup enabled, matching `update` operations only: ");

    // Insert some dummy data.
    collection.insertMany(asList(Document.parse("{updateMe: 1}"), Document.parse("{replaceMe: 1}")));

    // Create $match pipeline stage.
    List<Bson> pipeline = singletonList(
            Aggregates.match(Filters.or(Document.parse("{'fullDocument.username': 'alice123'}"),
                    Filters.in("operationType", asList("update", "replace", "delete")))));

    // Create the change stream cursor with $match.
    cursor = collection.watch(pipeline).fullDocument(FullDocument.UPDATE_LOOKUP).cursor();

    // Forward to the end of the change stream
    next = cursor.tryNext();

    // Update the test document.
    collection.updateOne(Filters.eq("updateMe", 1), Updates.set("updated", true));
    next = cursor.next();
    System.out.println(format("Update operationType: %s %n %s", next.getUpdateDescription(), next));

    // Replace the test document.
    collection.replaceOne(Filters.eq("replaceMe", 1), Document.parse("{replaced: true}"));
    next = cursor.next();
    System.out.println(format("Replace operationType: %s", next));

    // Delete the test document.
    collection.deleteOne(Filters.eq("username", "alice123"));
    next = cursor.next();
    System.out.println(format("Delete operationType: %s", next));
    cursor.close();
    sleep();

    /**
     * Example 4
     * Resume a change stream using a resume token.
     */
    System.out.println("4. Document from the Change Stream including a resume token:");

    // Get the resume token from the last document we saw in the previous change stream cursor.
    BsonDocument resumeToken = cursor.getResumeToken();
    System.out.println(resumeToken);

    // Pass the resume token to the resume after function to continue the change stream cursor.
    cursor = collection.watch().resumeAfter(resumeToken).cursor();

    // Insert a test document.
    collection.insertOne(Document.parse("{test: 'd'}"));

    // Block until the next result is returned
    next = cursor.next();
    System.out.println(next);
    cursor.close();
}

From source file:eu.project.ttc.models.occstore.MongoDBOccurrenceStore.java

License:Apache License

public MongoDBOccurrenceStore(String mongoDbUri, State state) {
    super();//w ww  .j a v a2s  .  c o  m

    Preconditions.checkNotNull(mongoDbUri, "MongoDB dadabase's URI must not be null");
    Preconditions.checkState(state != State.INDEXING, "Invalid occ store state for constructor. Only "
            + State.COLLECTING + " and " + State.INDEXED + " allowed");

    this.mongoDBUri = getMongoDBUri(mongoDbUri);
    this.state = state;

    initThreadExecutor();

    MongoClientURI connectionString = new MongoClientURI(mongoDbUri);
    this.mongoClient = new MongoClient(connectionString);
    MongoDatabase db = mongoClient.getDatabase(this.mongoDBUri.getDatabase())
            .withWriteConcern(WriteConcern.ACKNOWLEDGED);
    db.runCommand(new org.bson.Document("profile", 1));

    if (state == State.COLLECTING)
        db.drop();

    this.termCollection = db.getCollection("terms");
    this.occurrenceCollection = db.getCollection("occurrences");
    this.documentUrlCollection = db.getCollection("documents");

    resetBuffers();
}

From source file:examples.tour.QuickTour.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args takes an optional single argument for the connection string
 *///from   www .java 2s.  c  o m
public static void main(final String[] args) {
    MongoClient mongoClient;

    if (args.length == 0) {
        // connect to the local database server
        mongoClient = new MongoClient();
    } else {
        mongoClient = new MongoClient(new MongoClientURI(args[0]));
    }

    // get handle to "mydb" database
    MongoDatabase database = mongoClient.getDatabase("mydb");

    // get a handle to the "test" collection
    MongoCollection<Document> collection = database.getCollection("test");

    // drop all the data in it
    collection.drop();

    // make a document and insert it
    Document doc = new Document("name", "MongoDB").append("type", "database").append("count", 1).append("info",
            new Document("x", 203).append("y", 102));

    collection.insertOne(doc);

    // get it (since it's the only one in there since we dropped the rest earlier on)
    Document myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // now, lets add lots of little documents to the collection so we can explore queries and cursors
    List<Document> documents = new ArrayList<Document>();
    for (int i = 0; i < 100; i++) {
        documents.add(new Document("i", i));
    }
    collection.insertMany(documents);
    System.out.println(
            "total # of documents after inserting 100 small ones (should be 101) " + collection.count());

    // find first
    myDoc = collection.find().first();
    System.out.println(myDoc.toJson());

    // lets get all the documents in the collection and print them out
    MongoCursor<Document> cursor = collection.find().iterator();
    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    for (Document cur : collection.find()) {
        System.out.println(cur.toJson());
    }

    // now use a query to get 1 document out
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    cursor = collection.find(gt("i", 50)).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // range query with multiple constraints
    cursor = collection.find(and(gt("i", 50), lte("i", 100))).iterator();

    try {
        while (cursor.hasNext()) {
            System.out.println(cursor.next().toJson());
        }
    } finally {
        cursor.close();
    }

    // Query Filters
    myDoc = collection.find(eq("i", 71)).first();
    System.out.println(myDoc.toJson());

    // now use a range query to get a larger subset
    Block<Document> printBlock = new Block<Document>() {

        public void apply(final Document document) {
            System.out.println(document.toJson());
        }
    };
    collection.find(gt("i", 50)).forEach(printBlock);

    // filter where; 50 < i <= 100
    collection.find(and(gt("i", 50), lte("i", 100))).forEach(printBlock);

    // Sorting
    myDoc = collection.find(exists("i")).sort(descending("i")).first();
    System.out.println(myDoc.toJson());

    // Projection
    myDoc = collection.find().projection(excludeId()).first();
    System.out.println(myDoc.toJson());

    // Aggregation
    collection
            .aggregate(
                    asList(match(gt("i", 0)), project(Document.parse("{ITimes10: {$multiply: ['$i', 10]}}"))))
            .forEach(printBlock);

    myDoc = collection.aggregate(singletonList(group(null, sum("total", "$i")))).first();
    System.out.println(myDoc.toJson());

    // Update One
    collection.updateOne(eq("i", 10), set("i", 110));

    // Update Many
    UpdateResult updateResult = collection.updateMany(lt("i", 100), inc("i", 100));
    System.out.println(updateResult.getModifiedCount());

    // Delete One
    collection.deleteOne(eq("i", 110));

    // Delete Many
    DeleteResult deleteResult = collection.deleteMany(gte("i", 100));
    System.out.println(deleteResult.getDeletedCount());

    collection.drop();

    // ordered bulk writes
    List<WriteModel<Document>> writes = new ArrayList<WriteModel<Document>>();
    writes.add(new InsertOneModel<Document>(new Document("_id", 4)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 5)));
    writes.add(new InsertOneModel<Document>(new Document("_id", 6)));
    writes.add(
            new UpdateOneModel<Document>(new Document("_id", 1), new Document("$set", new Document("x", 2))));
    writes.add(new DeleteOneModel<Document>(new Document("_id", 2)));
    writes.add(new ReplaceOneModel<Document>(new Document("_id", 3), new Document("_id", 3).append("x", 4)));

    collection.bulkWrite(writes);

    collection.drop();

    collection.bulkWrite(writes, new BulkWriteOptions().ordered(false));
    //collection.find().forEach(printBlock);

    // Clean up
    database.drop();

    // release resources
    mongoClient.close();
}

From source file:gridfs.GridFSTour.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args takes an optional single argument for the connection string
 * @throws FileNotFoundException if the sample file cannot be found
 * @throws IOException if there was an exception closing an input stream
 *///  ww w  .  j  av  a  2s  . com
public static void main(final String[] args) throws FileNotFoundException, IOException {
    MongoClient mongoClient;

    if (args.length == 0) {
        // connect to the local database server
        mongoClient = new MongoClient();
    } else {
        mongoClient = new MongoClient(new MongoClientURI(args[0]));
    }

    // get handle to "mydb" database
    MongoDatabase database = mongoClient.getDatabase("mydb");
    database.drop();

    GridFSBucket gridFSBucket = GridFSBuckets.create(database);

    /*
     * UploadFromStream Example
     */
    // Get the input stream
    InputStream streamToUploadFrom = new ByteArrayInputStream("Hello World".getBytes(StandardCharsets.UTF_8));

    // Create some custom options
    GridFSUploadOptions options = new GridFSUploadOptions().chunkSizeBytes(1024)
            .metadata(new Document("type", "presentation"));

    ObjectId fileId = gridFSBucket.uploadFromStream("mongodb-tutorial", streamToUploadFrom, options);
    streamToUploadFrom.close();
    System.out.println("The fileId of the uploaded file is: " + fileId.toHexString());

    /*
     * OpenUploadStream Example
     */

    // Get some data to write
    byte[] data = "Data to upload into GridFS".getBytes(StandardCharsets.UTF_8);

    GridFSUploadStream uploadStream = gridFSBucket.openUploadStream("sampleData");
    uploadStream.write(data);
    uploadStream.close();
    System.out.println("The fileId of the uploaded file is: " + uploadStream.getFileId().toHexString());

    /*
     * Find documents
     */
    gridFSBucket.find().forEach(new Block<GridFSFile>() {
        @Override
        public void apply(final GridFSFile gridFSFile) {
            System.out.println(gridFSFile.getFilename());
        }
    });

    /*
     * Find documents with a filter
     */
    gridFSBucket.find(eq("metadata.contentType", "image/png")).forEach(new Block<GridFSFile>() {
        @Override
        public void apply(final GridFSFile gridFSFile) {
            System.out.println(gridFSFile.getFilename());
        }
    });

    /*
     * DownloadToStream
     */
    FileOutputStream streamToDownloadTo = new FileOutputStream("/tmp/mongodb-tutorial.txt");
    gridFSBucket.downloadToStream(fileId, streamToDownloadTo);
    streamToDownloadTo.close();

    /*
     * DownloadToStreamByName
     */
    streamToDownloadTo = new FileOutputStream("/tmp/mongodb-tutorial.txt");
    GridFSDownloadByNameOptions downloadOptions = new GridFSDownloadByNameOptions().revision(0);
    gridFSBucket.downloadToStreamByName("mongodb-tutorial", streamToDownloadTo, downloadOptions);
    streamToDownloadTo.close();

    /*
     * OpenDownloadStream
     */
    GridFSDownloadStream downloadStream = gridFSBucket.openDownloadStream(fileId);
    int fileLength = (int) downloadStream.getGridFSFile().getLength();
    byte[] bytesToWriteTo = new byte[fileLength];
    downloadStream.read(bytesToWriteTo);
    downloadStream.close();

    System.out.println(new String(bytesToWriteTo, StandardCharsets.UTF_8));

    /*
     * OpenDownloadStreamByName
     */

    downloadStream = gridFSBucket.openDownloadStreamByName("sampleData");
    fileLength = (int) downloadStream.getGridFSFile().getLength();
    bytesToWriteTo = new byte[fileLength];
    downloadStream.read(bytesToWriteTo);
    downloadStream.close();

    System.out.println(new String(bytesToWriteTo, StandardCharsets.UTF_8));

    /*
     * Rename
     */
    gridFSBucket.rename(fileId, "mongodbTutorial");

    /*
     * Delete
     */
    gridFSBucket.delete(fileId);

    database.drop();
}

From source file:mongodb.QuickTourAdmin.java

License:Apache License

/**
 * Run this main method to see the output of this quick example.
 *
 * @param args/*from   w  w  w .  ja  va  2 s  .  c o m*/
 *            takes an optional single argument for the connection string
 */
public static void main(final String[] args) {
    MongoClient mongoClient;

    if (args.length == 0) {
        // connect to the specified database server
        mongoClient = new MongoClient("10.9.17.105", 27017);
    } else {
        mongoClient = new MongoClient(new MongoClientURI(args[0]));
    }

    // get handle to "test" database
    MongoDatabase database = mongoClient.getDatabase("test");

    database.drop();

    // get a handle to the "test" collection
    MongoCollection<Document> collection = database.getCollection("test");

    // drop all the data in it
    collection.drop();

    // getting a list of databases
    for (String name : mongoClient.listDatabaseNames()) {
        System.out.println(name);
    }

    // drop a database
    mongoClient.getDatabase("databaseToBeDropped").drop();

    // create a collection
    database.createCollection("cappedCollection",
            new CreateCollectionOptions().capped(true).sizeInBytes(0x100000));

    for (String name : database.listCollectionNames()) {
        System.out.println(name);
    }

    // drop a collection:
    collection.drop();

    // create an ascending index on the "i" field
    // 1 ascending or -1 for descending
    collection.createIndex(new Document("i", 1));

    // list the indexes on the collection
    for (final Document index : collection.listIndexes()) {
        System.out.println(index.toJson());
    }

    // create a text index on the "content" field
    // text indexes to support text search of string content
    collection.createIndex(new Document("content", "text"));

    collection.insertOne(new Document("_id", 0).append("content", "textual content"));
    collection.insertOne(new Document("_id", 1).append("content", "additional content"));
    collection.insertOne(new Document("_id", 2).append("content", "irrelevant content"));

    // Find using the text index
    long matchCount = collection.count(text("textual content -irrelevant"));
    System.out.println("Text search matches: " + matchCount);

    // Find using the $language operator
    Bson textSearch = text("textual content -irrelevant", "english");
    matchCount = collection.count(textSearch);
    System.out.println("Text search matches (english): " + matchCount);

    // Find the highest scoring match
    Document projection = new Document("score", new Document("$meta", "textScore"));
    Document myDoc = collection.find(textSearch).projection(projection).first();
    System.out.println("Highest scoring document: " + myDoc.toJson());

    // Run a command
    Document buildInfo = database.runCommand(new Document("buildInfo", 1));
    System.out.println(buildInfo);

    // release resources
    database.drop();
    mongoClient.close();
}

From source file:MultiSources.Fusionner.java

public void exportAllCandsMongoDB(ArrayList<NodeCandidate> allCands) {
    MongoClient mongoClient = new MongoClient();
    MongoDatabase db = mongoClient.getDatabase("evals_Triticum");
    db.drop();
    MongoCollection<Document> collection = db.getCollection("triticumCandidate");
    MongoCollection<Document> collectionRel = db.getCollection("triticumICHR");
    MongoCollection<Document> collectionType = db.getCollection("triticumTypeCandidate");
    MongoCollection<Document> collectionLabel = db.getCollection("triticumLabelCandidate");
    for (NodeCandidate nc : allCands) {
        if (nc.isIndividual()) {
            collection.insertOne(new Document(nc.toDBObject()));
            IndividualCandidate ic = (IndividualCandidate) nc;
            for (ArcCandidate ac : ic.getAllArcCandidates()) {
                String prop = ac.getDataProp();
                if (prop.startsWith("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")) {
                    collectionType.insertOne(new Document(ac.toDBObject()));
                } else if (prop.startsWith("http://ontology.irstea.fr/AgronomicTaxon#hasHigherRank")) {
                    collectionRel.insertOne(new Document(ac.toDBObject()));
                }/*from   ww  w .ja v a2s  .co  m*/
                for (LabelCandidate lc : ic.getLabelCandidates()) {
                    collectionLabel.insertOne(new Document(lc.toDBObject()));
                }
            }
        }

    }
}

From source file:mypackage.CreateDB.java

protected void processRequest(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    response.setContentType("text/html;charset=UTF-8");
    try (PrintWriter out = response.getWriter()) {
        String getDBName = request.getParameter("dbname");
        MongoClient mongoClient = new MongoClient("localhost", 27017);
        HttpSession httpSession = request.getSession(false);
        DB db = mongoClient.getDB("mydb");
        DBCollection dBCollection = db.getCollection(httpSession.getAttribute("uname") + "DB");
        BasicDBObject dBObject = new BasicDBObject();
        dBObject.put("kapil", getDBName);
        WriteResult writeResult = dBCollection.insert(dBObject);

        if (writeResult.getN() == 0) {
            out.print("true");
            MongoDatabase mongoDatabase = mongoClient.getDatabase(getDBName);
            mongoDatabase.createCollection("Welcome");
            mongoDatabase.drop();
        } else {//from  ww w .  j a  va  2s  .co  m
            out.print("false");
        }

    }
}