Example usage for com.mongodb WriteResult getN

List of usage examples for com.mongodb WriteResult getN

Introduction

In this page you can find the example usage for com.mongodb WriteResult getN.

Prototype

public int getN() 

Source Link

Document

Gets the "n" field, which contains the number of documents affected in the write operation.

Usage

From source file:fr.eolya.utils.nosql.mongodb.MongoDBCollection.java

License:Apache License

public int update(BasicDBObject docsearch, BasicDBObject doc) {
    try {/*from  w w w .j  a va 2s  . com*/
        WriteResult wr = coll.update(docsearch, doc);
        return wr.getN();
    } catch (Exception e) {
        //e.printStackTrace();
        return -1;
    }
}

From source file:fr.eolya.utils.nosql.mongodb.MongoDBCollection.java

License:Apache License

public int remove(BasicDBObject doc) {
    WriteResult wr = coll.remove(doc);
    return wr.getN();
}

From source file:fr.eolya.utils.nosql.mongodb.MongoDBCollection.java

License:Apache License

public int removeAll() {
    WriteResult wr = coll.remove(new BasicDBObject());
    return wr.getN();
}

From source file:fr.gouv.vitam.mdbes.MainIngestMDBESFromFile.java

License:Open Source License

@Override
public void run() {
    if (file == null) {
        // ES/*from w w w  .  j a v a 2  s .c o  m*/
        //Thread.sleep(1000);
        try {
            for (int i = 0; i < files.length - 1; i++) {
                System.out.println("ESFile: " + files[i]);
                final HashMap<String, String> esIndex = new HashMap<>();
                final FileInputStream fstream = new FileInputStream(files[i]);
                final DataInputStream in = new DataInputStream(fstream);
                final BufferedReader br = new BufferedReader(new InputStreamReader(in));
                String strLine;
                // Read File Line By Line
                while ((strLine = br.readLine()) != null) {
                    final BSONObject bson = (BSONObject) JSON.parse(strLine);
                    ElasticSearchAccess.addEsIndex(original, model, esIndex, bson);
                }
                // Close the input stream
                br.close();
                in.close();
                fstream.close();
                if (!esIndex.isEmpty()) {
                    System.out.println("Last bulk ES");
                    original.addEsEntryIndex(true, esIndex, model);
                    esIndex.clear();
                }
            }
            // last file might contains already inserted but to be updated DAip
            int i = files.length - 1;
            System.out.println("ESFile: " + files[i]);
            final FileInputStream fstream = new FileInputStream(files[i]);
            final DataInputStream in = new DataInputStream(fstream);
            final BufferedReader br = new BufferedReader(new InputStreamReader(in));
            String strLine;
            // Read File Line By Line
            while ((strLine = br.readLine()) != null) {
                final BSONObject bson = (BSONObject) JSON.parse(strLine);
                ElasticSearchAccess.addEsIndex(original, model, bson);
            }
            // Close the input stream
            br.close();
            in.close();
            fstream.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        return;
    }
    MongoDbAccess dbvitam = null;
    FileInputStream fstream = null;
    DataInputStream in = null;
    final BufferedReader br;
    try {
        System.out.println("MDFile: " + file);
        fstream = new FileInputStream(file);
        in = new DataInputStream(fstream);
        br = new BufferedReader(new InputStreamReader(in));
        dbvitam = new MongoDbAccess(mongoClient, database, esbase, unicast, false);
        // now ingest metaaip/metafield/data
        final long date11 = System.currentTimeMillis();
        String strLine;
        int nb = 0;

        if (false) {
            // Tokumx
            List<DBObject> inserts = new ArrayList<DBObject>(GlobalDatas.LIMIT_MDB_NEW_INDEX);
            while ((strLine = br.readLine()) != null) {
                final DBObject bson = (DBObject) JSON.parse(strLine);
                inserts.add(bson);
                nb++;
                if (nb % GlobalDatas.LIMIT_MDB_NEW_INDEX == 0) {
                    WriteResult result = dbvitam.daips.collection.insert(inserts);
                    if (result.getN() != nb) {
                        LOGGER.error("Wrong bulk op: " + result);
                    }
                    MainIngestFile.cptMaip.addAndGet(nb);
                    inserts.clear();
                    nb = 0;
                    System.out.print(".");
                }
            }
            if (nb != 0) {
                WriteResult result = dbvitam.daips.collection.insert(inserts);
                if (result.getN() != nb) {
                    LOGGER.error("Wrong bulk op: " + result);
                }
                MainIngestFile.cptMaip.addAndGet(nb);
                inserts.clear();
                nb = 0;
            }
        } else {
            BulkWriteOperation bulk = dbvitam.daips.collection.initializeUnorderedBulkOperation();
            while ((strLine = br.readLine()) != null) {
                final DBObject bson = (DBObject) JSON.parse(strLine);
                bulk.insert(bson);
                nb++;
                if (nb % GlobalDatas.LIMIT_MDB_NEW_INDEX == 0) {
                    BulkWriteResult result = bulk.execute();
                    bulk = dbvitam.daips.collection.initializeUnorderedBulkOperation();
                    if (result.getInsertedCount() != nb) {
                        LOGGER.error("Wrong bulk op: " + result);
                    }
                    MainIngestFile.cptMaip.addAndGet(nb);
                    nb = 0;
                    System.out.print(".");
                }
            }
            if (nb != 0) {
                BulkWriteResult result = bulk.execute();
                if (result.getInsertedCount() != nb) {
                    LOGGER.error("Wrong bulk op: " + result);
                }
                MainIngestFile.cptMaip.addAndGet(nb);
                nb = 0;
            }
        }
        final long date12 = System.currentTimeMillis();
        loadt.addAndGet(date12 - date11);
        return;
    } catch (final InvalidUuidOperationException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (final FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } finally {
        // release resources
        try {
            in.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        try {
            fstream.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        if (dbvitam != null) {
            dbvitam.close();
        }
    }
}

From source file:fr.gouv.vitam.mdbes.MainIngestMDBFromFile.java

License:Open Source License

@Override
public void run() {
    MongoDbAccess dbvitam = null;/*w ww .  j a  v a  2s  .  c  o m*/
    FileInputStream fstream = null;
    DataInputStream in = null;
    final BufferedReader br;
    try {
        fstream = new FileInputStream(file);
        in = new DataInputStream(fstream);
        br = new BufferedReader(new InputStreamReader(in));
        dbvitam = new MongoDbAccess(mongoClient, database, esbase, unicast, false);
        // now ingest metaaip/metafield/data
        final long date11 = System.currentTimeMillis();
        String strLine;
        int nb = 0;

        if (false) {
            // Tokumx
            List<DBObject> inserts = new ArrayList<DBObject>(GlobalDatas.LIMIT_MDB_NEW_INDEX);
            while ((strLine = br.readLine()) != null) {
                final DBObject bson = (DBObject) JSON.parse(strLine);
                inserts.add(bson);
                nb++;
                if (nb % GlobalDatas.LIMIT_MDB_NEW_INDEX == 0) {
                    WriteResult result = dbvitam.daips.collection.insert(inserts);
                    if (result.getN() != nb) {
                        LOGGER.error("Wrong bulk op: " + result);
                    }
                    MainIngestFile.cptMaip.addAndGet(nb);
                    inserts.clear();
                    nb = 0;
                    System.out.print(".");
                }
            }
            if (nb != 0) {
                WriteResult result = dbvitam.daips.collection.insert(inserts);
                if (result.getN() != nb) {
                    LOGGER.error("Wrong bulk op: " + result);
                }
                MainIngestFile.cptMaip.addAndGet(nb);
                inserts.clear();
                nb = 0;
            }
        } else {
            BulkWriteOperation bulk = dbvitam.daips.collection.initializeUnorderedBulkOperation();
            while ((strLine = br.readLine()) != null) {
                final DBObject bson = (DBObject) JSON.parse(strLine);
                bulk.insert(bson);
                nb++;
                if (nb % GlobalDatas.LIMIT_MDB_NEW_INDEX == 0) {
                    BulkWriteResult result = bulk.execute();
                    bulk = dbvitam.daips.collection.initializeUnorderedBulkOperation();
                    if (result.getInsertedCount() != nb) {
                        LOGGER.error("Wrong bulk op: " + result);
                    }
                    MainIngestFile.cptMaip.addAndGet(nb);
                    nb = 0;
                    System.out.print(".");
                }
            }
            if (nb != 0) {
                BulkWriteResult result = bulk.execute();
                if (result.getInsertedCount() != nb) {
                    LOGGER.error("Wrong bulk op: " + result);
                }
                MainIngestFile.cptMaip.addAndGet(nb);
                nb = 0;
            }
        }
        final long date12 = System.currentTimeMillis();
        loadt.addAndGet(date12 - date11);
        return;
    } catch (final InvalidUuidOperationException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (final FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } finally {
        // release resources
        try {
            in.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        try {
            fstream.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        if (dbvitam != null) {
            dbvitam.close();
        }
    }
}

From source file:fr.xebia.cocktail.CocktailRepository.java

License:Apache License

public boolean delete(Cocktail cocktail) {
    Preconditions.checkNotNull(cocktail.getObjectId(), "Given objectId must not be null in %s", cocktail);
    try {//from w  w  w.  ja  va  2  s .  co m
        // SOLR
        UpdateResponse solrResponse = solrServer.deleteByQuery("id:" + cocktail.getId());
        logger.trace("solr.delete for {}: {}", cocktail, solrResponse);

        // MONGODB
        WriteResult mongoResult = cocktails.remove(
                BasicDBObjectBuilder.start().add("_id", cocktail.getObjectId()).get(), WriteConcern.SAFE);
        logger.trace("mongo.remove for {}: {}", cocktail, mongoResult);

        return mongoResult.getN() > 0;
    } catch (Exception e) {
        throw new RuntimeException("Exception deleting " + cocktail, e);
    }
}

From source file:HAL.libraries.blackboard_client.BlackboardClient.java

License:Open Source License

/**
 * Removes all documents matching the provided query from the currently selected collection.
 * //from   ww w.jav a 2s  . c om
 * @param query DBObject representing the query used for deleting documents.
 * @return The amount of records that have been removed.
 * @throws InvalidDBNamespaceException No collection has been selected.
 * @throws GeneralMongoException A MongoException occurred.
 **/
public int removeDocuments(DBObject query) throws InvalidDBNamespaceException, GeneralMongoException {
    if (currentCollection == null) {
        throw new InvalidDBNamespaceException("No collection has been selected.");
    }

    try {
        WriteResult res = currentCollection.remove(query);
        return res.getN();
    } catch (MongoException mongoException) {
        throw new GeneralMongoException("An error occurred attempting to remove.", mongoException);
    }

}

From source file:HAL.libraries.blackboard_client.BlackboardClient.java

License:Open Source License

/**
 * Updates all documents matching the provided search query within the currently selected collection.
 * Documents are updated according to the query specified in updateQuery.
 * //from  w w  w.  j  a  v a2s .  com
 * @param searchQuery The query that should be used to select the target documents.
 * @param updateQuery The query that should be used to update the target documents.
 * @return The amount of documents that have been updated.
 * @throws InvalidDBNamespaceException No collection has been selected.
 * @throws GeneralMongoException A MongoException occurred.
 **/
public int updateDocuments(DBObject searchQuery, DBObject updateQuery)
        throws InvalidDBNamespaceException, GeneralMongoException {
    if (currentCollection == null) {
        throw new InvalidDBNamespaceException("No collection has been selected.");
    }

    try {
        WriteResult res = currentCollection.updateMulti(searchQuery, updateQuery);
        return res.getN();
    } catch (MongoException mongoException) {
        throw new GeneralMongoException("An error occurred attempting to update.", mongoException);
    }
}

From source file:io.lumeer.storage.mongodb.dao.organization.MorphiaProjectDao.java

License:Open Source License

@Override
public void deleteProject(final String projectId) {
    WriteResult writeResult = datastore.delete(databaseCollection(), MorphiaProject.class,
            new ObjectId(projectId));
    if (writeResult.getN() != 1) {
        throw new WriteFailedException(writeResult);
    }/*from  w  ww.  j av a2s.  com*/
}

From source file:io.lumeer.storage.mongodb.dao.project.MorphiaCollectionDao.java

License:Open Source License

@Override
public void deleteCollection(final String id) {
    WriteResult writeResult = datastore.delete(databaseCollection(), MorphiaCollection.class, new ObjectId(id));
    if (writeResult.getN() != 1) {
        throw new WriteFailedException(writeResult);
    }//from   w w w  .j  a  v  a 2  s . co m
}