Example usage for com.mongodb.bulk BulkWriteResult getUpserts

List of usage examples for com.mongodb.bulk BulkWriteResult getUpserts

Introduction

In this page you can find the example usage for com.mongodb.bulk BulkWriteResult getUpserts.

Prototype

public abstract List<BulkWriteUpsert> getUpserts();

Source Link

Document

Gets an unmodifiable list of upserted items, or the empty list if there were none.

Usage

From source file:org.eclipse.ditto.services.thingsearch.persistence.write.streaming.SearchUpdaterStream.java

License:Open Source License

private static String logResult(final BulkWriteResult bulkWriteResult) {
    return String.format("BulkWriteResult[matched=%d,upserts=%d,inserted=%d,modified=%d,deleted=%d]",
            bulkWriteResult.getMatchedCount(), bulkWriteResult.getUpserts().size(),
            bulkWriteResult.getInsertedCount(), bulkWriteResult.getModifiedCount(),
            bulkWriteResult.getDeletedCount());
}

From source file:org.opencb.cellbase.lib.db.variation.VariationMongoDBAdaptor.java

License:Apache License

private int updatePopulationFrequencies(List<Document> variantDocumentList) {

    List<Bson> queries = new ArrayList<>(variantDocumentList.size());
    List<Bson> updates = new ArrayList<>(variantDocumentList.size());

    for (Document variantDBObject : variantDocumentList) {
        Document annotationDBObject = (Document) variantDBObject.get("annotation");
        Document push = new Document(POP_FREQUENCIES_FIELD, annotationDBObject.get("populationFrequencies"));

        // Remove annotation object from the DBObject so that push and setOnInsert do not update the same fields:
        // i.e. annotation.populationFrequencies and annotation
        variantDBObject.remove("annotation");
        addChunkId(variantDBObject);//w w w  .j av  a  2 s  .  c o m

        Document update = new Document().append("$pushAll", push).append("$setOnInsert", variantDBObject);

        updates.add(update);

        String chunkId = getChunkIdPrefix((String) variantDBObject.get("chromosome"),
                (int) variantDBObject.get("start"), variationChunkSize);
        queries.add(new Document("_chunkIds", chunkId).append("chromosome", variantDBObject.get("chromosome"))
                .append("start", variantDBObject.get("start"))
                .append("reference", variantDBObject.get("reference"))
                .append("alternate", variantDBObject.get("alternate")));
    }

    BulkWriteResult bulkWriteResult;
    if (!queries.isEmpty()) {
        logger.info("updating object");
        QueryOptions options = new QueryOptions("upsert", true);
        options.put("multi", false);
        try {
            bulkWriteResult = mongoDBCollection.update(queries, updates, options).first();
        } catch (BulkWriteException e) {
            throw e;
        }
        logger.info("{} object updated",
                bulkWriteResult.getUpserts().size() + bulkWriteResult.getModifiedCount());
        return bulkWriteResult.getUpserts().size() + bulkWriteResult.getModifiedCount();
    }
    logger.info("no object updated");
    return 0;

}

From source file:org.opencb.opencga.storage.mongodb.variant.adaptors.VariantMongoDBAdaptor.java

License:Apache License

/**
 * Two steps insertion:/*www .j ava 2 s  .  com*/
 * First check that the variant and study exists making an update.
 * For those who doesn't exist, pushes a study with the file and genotype information
 * <p>
 * The documents that throw a "dup key" exception are those variants that exist and have the study.
 * Then, only for those variants, make a second update.
 * <p>
 * *An interesting idea would be to invert this actions depending on the number of already inserted variants.
 *
 * @param data                        Variants to insert
 * @param fileId                      File ID
 * @param variantConverter            Variant converter to be used
 * @param variantSourceEntryConverter Variant source converter to be used
 * @param studyConfiguration          Configuration for the study
 * @param loadedSampleIds             Other loaded sampleIds EXCEPT those that are going to be loaded
 * @return QueryResult object
 */
QueryResult<MongoDBVariantWriteResult> insert(List<Variant> data, int fileId,
        DocumentToVariantConverter variantConverter,
        DocumentToStudyVariantEntryConverter variantSourceEntryConverter, StudyConfiguration studyConfiguration,
        List<Integer> loadedSampleIds) {

    MongoDBVariantWriteResult writeResult = new MongoDBVariantWriteResult();
    long startTime = System.currentTimeMillis();
    if (data.isEmpty()) {
        return new QueryResult<>("insertVariants", 0, 1, 1, "", "", Collections.singletonList(writeResult));
    }
    List<Bson> queries = new ArrayList<>(data.size());
    List<Bson> updates = new ArrayList<>(data.size());
    // Use a multiset instead of a normal set, to keep tracking of duplicated variants
    Multiset<String> nonInsertedVariants = HashMultiset.create();
    String fileIdStr = Integer.toString(fileId);

    //        List<String> extraFields = studyConfiguration.getAttributes().getAsStringList(VariantStorageEngine.Options.EXTRA_GENOTYPE_FIELDS
    //                .key());
    boolean excludeGenotypes = studyConfiguration.getAttributes().getBoolean(
            VariantStorageEngine.Options.EXCLUDE_GENOTYPES.key(),
            VariantStorageEngine.Options.EXCLUDE_GENOTYPES.defaultValue());

    long nanoTime = System.nanoTime();
    Map missingSamples = Collections.emptyMap();
    String defaultGenotype = studyConfiguration.getAttributes().getString(DEFAULT_GENOTYPE.key(), "");
    if (defaultGenotype.equals(DocumentToSamplesConverter.UNKNOWN_GENOTYPE)) {
        logger.debug("Do not need fill gaps. DefaultGenotype is UNKNOWN_GENOTYPE({}).",
                DocumentToSamplesConverter.UNKNOWN_GENOTYPE);
    } else if (excludeGenotypes) {
        logger.debug("Do not need fill gaps. Excluding genotypes.");
    } else if (!loadedSampleIds.isEmpty()) {
        missingSamples = new Document(DocumentToSamplesConverter.UNKNOWN_GENOTYPE, loadedSampleIds); // ?/?
    }
    //            List<Object> missingOtherValues = new ArrayList<>(loadedSampleIds.size());
    //            for (int i = 0; i < loadedSampleIds.size(); i++) {
    //                missingOtherValues.add(DBObjectToSamplesConverter.UNKNOWN_FIELD);
    //            }
    for (Variant variant : data) {
        if (variant.getType().equals(VariantType.NO_VARIATION)) {
            //Storage-MongoDB is not able to store NON VARIANTS
            writeResult.setSkippedVariants(writeResult.getSkippedVariants() + 1);
            continue;
        } else if (variant.getType().equals(VariantType.SYMBOLIC)) {
            logger.warn("Skip symbolic variant " + variant.toString());
            writeResult.setSkippedVariants(writeResult.getSkippedVariants() + 1);
            continue;
        }
        String id = variantConverter.buildStorageId(variant);
        for (StudyEntry studyEntry : variant.getStudies()) {
            if (studyEntry.getFiles().size() == 0
                    || !studyEntry.getFiles().get(0).getFileId().equals(fileIdStr)) {
                continue;
            }
            int studyId = studyConfiguration.getStudyId();
            Document study = variantSourceEntryConverter.convertToStorageType(variant, studyEntry);
            Document genotypes = study.get(DocumentToStudyVariantEntryConverter.GENOTYPES_FIELD,
                    Document.class);
            if (genotypes != null) { //If genotypes is null, genotypes are not suppose to be loaded
                genotypes.putAll(missingSamples); //Add missing samples
                //                        for (String extraField : extraFields) {
                //                            List<Object> otherFieldValues = (List<Object>) study.get(extraField.toLowerCase());
                //                            otherFieldValues.addAll(0, missingOtherValues);
                //                        }
            }
            Document push = new Document(DocumentToVariantConverter.STUDIES_FIELD, study);
            Document update = new Document().append("$push", push).append("$setOnInsert",
                    variantConverter.convertToStorageType(variant));
            if (variant.getIds() != null && !variant.getIds().isEmpty()
                    && !variant.getIds().iterator().next().isEmpty()) {
                update.put("$addToSet", new Document(DocumentToVariantConverter.IDS_FIELD,
                        new Document("$each", variant.getIds())));
            }
            // { _id: <variant_id>, "studies.sid": {$ne: <studyId> } }
            //If the variant exists and contains the study, this find will fail, will try to do the upsert, and throw a
            // duplicated key exception.
            queries.add(new Document("_id", id).append(
                    DocumentToVariantConverter.STUDIES_FIELD + "."
                            + DocumentToStudyVariantEntryConverter.STUDYID_FIELD,
                    new Document("$ne", studyId)));
            updates.add(update);
        }
    }

    //
    if (!queries.isEmpty()) {
        QueryOptions options = new QueryOptions(UPSERT, true);
        options.put(MULTI, false);
        int newDocuments;
        int updatedObjects;

        try {
            BulkWriteResult bulkWriteResult;
            bulkWriteResult = variantsCollection.update(queries, updates, options).first();
            newDocuments = bulkWriteResult.getUpserts().size();
            updatedObjects = bulkWriteResult.getModifiedCount();
        } catch (MongoBulkWriteException e) {
            BulkWriteResult bulkWriteResult;
            bulkWriteResult = e.getWriteResult();
            newDocuments = bulkWriteResult.getUpserts().size();
            updatedObjects = bulkWriteResult.getModifiedCount();
            for (BulkWriteError writeError : e.getWriteErrors()) {
                if (writeError.getCode() == 11000) { //Dup Key error code
                    Matcher matcher = writeResultErrorPattern.matcher(writeError.getMessage());
                    if (matcher.find()) {
                        String id = matcher.group(1);
                        nonInsertedVariants.add(id);
                    } else {
                        throw e;
                    }
                } else {
                    throw e;
                }
            }
        }

        writeResult.setNewVariants(newDocuments);
        writeResult.setUpdatedVariants(updatedObjects);
        //                writeResult.setNewDocuments(data.size() - nonInsertedVariants.size() - writeResult.getSkippedVariants());
        queries.clear();
        updates.clear();
    }
    writeResult.setNewVariantsNanoTime(System.nanoTime() - nanoTime);
    nanoTime = System.nanoTime();

    for (Variant variant : data) {
        variant.setAnnotation(null);
        String id = variantConverter.buildStorageId(variant);

        if (nonInsertedVariants != null && !nonInsertedVariants.contains(id)) {
            continue; //Already inserted variant
        }

        for (StudyEntry studyEntry : variant.getStudies()) {
            if (studyEntry.getFiles().size() == 0
                    || !studyEntry.getFiles().get(0).getFileId().equals(fileIdStr)) {
                continue;
            }

            Document studyObject = variantSourceEntryConverter.convertToStorageType(variant, studyEntry);
            Document genotypes = studyObject.get(DocumentToStudyVariantEntryConverter.GENOTYPES_FIELD,
                    Document.class);
            Document push = new Document();

            if (!excludeGenotypes) {
                if (genotypes != null) { //If genotypes is null, genotypes are not suppose to be loaded
                    for (String genotype : genotypes.keySet()) {
                        push.put(
                                DocumentToVariantConverter.STUDIES_FIELD + ".$."
                                        + DocumentToStudyVariantEntryConverter.GENOTYPES_FIELD + "." + genotype,
                                new Document("$each", genotypes.get(genotype)));
                    }
                    //                    for (String extraField : extraFields) {
                    //                        List values = (List) studyObject.get(extraField.toLowerCase());
                    //                        push.put(DBObjectToVariantConverter.STUDIES_FIELD + ".$." + extraField.toLowerCase(),
                    //                                new Document("$each", values).append("$position", loadedSampleIds.size()));
                    //                    }
                } else {
                    push.put(
                            DocumentToVariantConverter.STUDIES_FIELD + ".$."
                                    + DocumentToStudyVariantEntryConverter.GENOTYPES_FIELD,
                            Collections.emptyMap());
                }
            }
            push.put(
                    DocumentToVariantConverter.STUDIES_FIELD + ".$."
                            + DocumentToStudyVariantEntryConverter.FILES_FIELD,
                    ((List) studyObject.get(DocumentToStudyVariantEntryConverter.FILES_FIELD)).get(0));
            Document update = new Document(new Document("$push", push));

            queries.add(new Document("_id", id)
                    .append(DocumentToVariantConverter.STUDIES_FIELD + '.'
                            + DocumentToStudyVariantEntryConverter.STUDYID_FIELD,
                            studyConfiguration.getStudyId())
                    .append(DocumentToVariantConverter.STUDIES_FIELD + '.'
                            + DocumentToStudyVariantEntryConverter.FILES_FIELD + '.'
                            + DocumentToStudyVariantEntryConverter.FILEID_FIELD, new Document("$ne", fileId)));
            updates.add(update);

        }
    }
    writeResult.setExistingVariantsNanoTime(System.nanoTime() - nanoTime);

    if (!queries.isEmpty()) {
        QueryOptions options = new QueryOptions(UPSERT, false);
        options.put(MULTI, false);
        QueryResult<BulkWriteResult> update = variantsCollection.update(queries, updates, options);
        // Can happen that nonInsertedVariantsNum != queries.size() != nonInsertedVariants.size() if there was
        // a duplicated variant.
        writeResult.setNonInsertedVariants(nonInsertedVariants.size() - update.first().getMatchedCount());
        writeResult.setUpdatedVariants(writeResult.getUpdatedVariants() + update.first().getModifiedCount());
    }

    return new QueryResult<>("insertVariants", ((int) (System.currentTimeMillis() - startTime)), 1, 1, "", "",
            Collections.singletonList(writeResult));
}

From source file:org.restheart.handlers.bulk.BulkResultRepresentationFactory.java

License:Open Source License

private void addBulkResult(final BulkOperationResult result, final RequestContext context,
        final Representation rep, final String requestPath) {
    Representation nrep = new Representation();

    BulkWriteResult wr = result.getBulkResult();

    if (wr.wasAcknowledged()) {
        if (wr.getUpserts() != null) {
            nrep.addProperty("inserted", new BsonInt32(wr.getUpserts().size()));

            // add links to new, upserted documents
            wr.getUpserts().stream().forEach(update -> {
                nrep.addLink(//from  ww  w  .  java2s.c  o m
                        new Link("rh:newdoc", URLUtils.getReferenceLink(context, requestPath, update.getId())),
                        true);
            });
        }

        nrep.addProperty("deleted", new BsonInt32(wr.getDeletedCount()));

        if (wr.isModifiedCountAvailable()) {
            nrep.addProperty("modified", new BsonInt32(wr.getModifiedCount()));
        }

        nrep.addProperty("matched", new BsonInt32(wr.getMatchedCount()));

        rep.addRepresentation("rh:result", nrep);
    }
}

From source file:org.restheart.handlers.bulk.BulkResultRepresentationFactory.java

License:Open Source License

private void addWriteResult(final BulkWriteResult wr, final Representation rep, final String requestPath) {
    Representation nrep = new Representation();

    if (wr.wasAcknowledged()) {
        if (wr.getUpserts() != null) {
            nrep.addProperty("inserted", new BsonInt32(wr.getUpserts().size()));

            // add links to new, upserted documents
            wr.getUpserts().stream().forEach(update -> {
                nrep.addLink(new Link("rh:newdoc", URLUtils.getReferenceLink(requestPath, update.getId())),
                        true);//from   w  ww .  j ava  2  s . c o m
            });
        }

        nrep.addProperty("deleted", new BsonInt32(wr.getDeletedCount()));

        if (wr.isModifiedCountAvailable()) {
            nrep.addProperty("modified", new BsonInt32(wr.getModifiedCount()));
        }

        nrep.addProperty("matched", new BsonInt32(wr.getMatchedCount()));

        rep.addRepresentation("rh:result", nrep);
    }
}