Example usage for com.mongodb WriteConcern ACKNOWLEDGED

List of usage examples for com.mongodb WriteConcern ACKNOWLEDGED

Introduction

In this page you can find the example usage for com.mongodb WriteConcern ACKNOWLEDGED.

Prototype

WriteConcern ACKNOWLEDGED

To view the source code for com.mongodb WriteConcern ACKNOWLEDGED.

Click Source Link

Document

Write operations that use this write concern will wait for acknowledgement, using the default write concern configured on the server.

Usage

From source file:fr.cirad.web.controller.gigwa.base.AbstractVariantController.java

License:Open Source License

/**
 * This method returns the number of variants that match provided parameters.
 *
 * @param request the request//from ww w  .  jav  a2s.c o m
 * @param sModule the module
 * @param projId the proj id
 * @param selectedVariantTypes the selected variant types
 * @param selectedSequences the selected sequences
 * @param selectedIndividuals the selected individuals
 * @param gtPattern the gt code
 * @param genotypeQualityThreshold the genotype quality threshold
 * @param readDepthThreshold the read depth threshold
 * @param missingData the missing data
 * @param minmaf the minmaf
 * @param maxmaf the maxmaf
 * @param minposition the minposition
 * @param maxposition the maxposition
 * @param alleleCount the allele count
 * @param geneName the gene name
 * @param variantEffects the variant effects
 * @param processID the process id
 * @return the long
 * @throws Exception the exception
 */
@RequestMapping(variantCountURL)
protected @ResponseBody long countVariants(HttpServletRequest request, @RequestParam("module") String sModule,
        @RequestParam("project") int projId, @RequestParam("variantTypes") String selectedVariantTypes,
        @RequestParam("sequences") String selectedSequences,
        @RequestParam("individuals") String selectedIndividuals, @RequestParam("gtPattern") String gtPattern,
        @RequestParam("genotypeQualityThreshold") Integer genotypeQualityThreshold,
        @RequestParam("readDepthThreshold") Integer readDepthThreshold,
        @RequestParam("missingData") Double missingData,
        @RequestParam(value = "minmaf", required = false) Float minmaf,
        @RequestParam(value = "maxmaf", required = false) Float maxmaf,
        @RequestParam("minposition") Long minposition, @RequestParam("maxposition") Long maxposition,
        @RequestParam("alleleCount") String alleleCount, @RequestParam("geneName") String geneName,
        @RequestParam("variantEffects") String variantEffects,
        @RequestParam("processID") final String processID) throws Exception {
    final ProgressIndicator progress = new ProgressIndicator(processID.substring(1 + processID.indexOf('|')),
            new String[0]);
    ProgressIndicator.registerProgressIndicator(progress);

    DBCollection tmpVarColl = getTemporaryVariantCollection(sModule, progress.getProcessId(),
            true /*empty it*/);
    try {
        String queryKey = getQueryKey(request, sModule, projId, selectedVariantTypes, selectedSequences,
                selectedIndividuals, gtPattern, genotypeQualityThreshold, readDepthThreshold, missingData,
                minmaf, maxmaf, minposition, maxposition, alleleCount, geneName, variantEffects);

        final MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule);
        DBCollection cachedCountcollection = mongoTemplate.getCollection(MgdbDao.COLLECTION_NAME_CACHED_COUNTS);
        //         cachedCountcollection.drop();
        DBCursor countCursor = cachedCountcollection.find(new BasicDBObject("_id", queryKey));
        Long count = null;
        if (countCursor.hasNext()) {
            count = 0l;
            for (Object aPartialCount : ((BasicDBList) countCursor.next()
                    .get(MgdbDao.FIELD_NAME_CACHED_COUNT_VALUE)).toArray())
                count += (Long) aPartialCount;
        }
        LOG.debug((count == null ? "new" : "existing") + " queryKey hash: " + queryKey);
        if (count == null) {
            long before = System.currentTimeMillis();

            progress.addStep("Counting matching variants");
            String sRegexOrAggregationOperator = GenotypingDataQueryBuilder.getGenotypePatternToQueryMap()
                    .get(gtPattern);

            List<String> alleleCountList = alleleCount.length() == 0 ? null
                    : Arrays.asList(alleleCount.split(";"));

            GenotypingProject genotypingProject = mongoTemplate.findById(projId, GenotypingProject.class);
            if (genotypingProject.getAlleleCounts().size() != 1
                    || genotypingProject.getAlleleCounts().iterator().next() != 2) { // Project does not only have bi-allelic data: make sure we can apply MAF filter on selection
                boolean fExactlyOneNumberOfAllelesSelected = alleleCountList != null
                        && alleleCountList.size() == 1;
                boolean fBiAllelicSelected = fExactlyOneNumberOfAllelesSelected
                        && "2".equals(alleleCountList.get(0));
                boolean fMafRequested = (maxmaf != null && maxmaf < 50) || (minmaf != null && minmaf > 0);
                if (fMafRequested && !fBiAllelicSelected) {
                    progress.setError("MAF is only supported on biallelic data!");
                    return 0l;
                }
            }

            String actualSequenceSelection = selectedSequences;
            if (actualSequenceSelection.length() == 0) {
                ArrayList<String> externallySelectedSeqs = getSequenceIDsBeingFilteredOn(request, sModule);
                if (externallySelectedSeqs != null)
                    actualSequenceSelection = StringUtils.join(externallySelectedSeqs, ";");
            }

            boolean fNeedToFilterOnGenotypingData = needToFilterOnGenotypingData(sModule, projId,
                    sRegexOrAggregationOperator, genotypeQualityThreshold, readDepthThreshold, missingData,
                    minmaf, maxmaf, geneName, variantEffects);

            BasicDBList variantQueryDBList = buildVariantDataQuery(sModule, projId,
                    selectedVariantTypes.length() == 0 ? null : Arrays.asList(selectedVariantTypes.split(";")),
                    actualSequenceSelection.length() == 0 ? null
                            : Arrays.asList(actualSequenceSelection.split(";")),
                    minposition, maxposition, alleleCountList);
            if (variantQueryDBList.isEmpty()) {
                if (!fNeedToFilterOnGenotypingData && mongoTemplate.count(null, GenotypingProject.class) == 1)
                    count = mongoTemplate.count(new Query(), VariantData.class); // no filter whatsoever
            } else {
                if (!fNeedToFilterOnGenotypingData) { // filtering on variant features only: we just need a count
                    count = mongoTemplate.getCollection(mongoTemplate.getCollectionName(VariantData.class))
                            .count(new BasicDBObject("$and", variantQueryDBList));
                } else { // filtering on variant features and genotyping data: we need a list of variant IDs to restrict the genotyping data search to
                    long beforeAggQuery = System.currentTimeMillis();
                    progress.setProgressDescription("Filtering variants for count...");

                    DBCollection variantColl = mongoTemplate
                            .getCollection(mongoTemplate.getCollectionName(VariantData.class));
                    List<DBObject> pipeline = new ArrayList<DBObject>();
                    pipeline.add(new BasicDBObject("$match", new BasicDBObject("$and", variantQueryDBList)));
                    BasicDBObject projectObject = new BasicDBObject("_id", "$_id");
                    projectObject.put(
                            VariantData.FIELDNAME_REFERENCE_POSITION + "."
                                    + ReferencePosition.FIELDNAME_SEQUENCE,
                            "$" + VariantData.FIELDNAME_REFERENCE_POSITION + "."
                                    + ReferencePosition.FIELDNAME_SEQUENCE);
                    projectObject.put(
                            VariantData.FIELDNAME_REFERENCE_POSITION + "."
                                    + ReferencePosition.FIELDNAME_START_SITE,
                            "$" + VariantData.FIELDNAME_REFERENCE_POSITION + "."
                                    + ReferencePosition.FIELDNAME_START_SITE);
                    projectObject.put(VariantData.FIELDNAME_TYPE, "$" + VariantData.FIELDNAME_TYPE);
                    projectObject.put(VariantData.FIELDNAME_KNOWN_ALLELE_LIST,
                            "$" + VariantData.FIELDNAME_KNOWN_ALLELE_LIST);
                    pipeline.add(new BasicDBObject("$project", projectObject));
                    pipeline.add(new BasicDBObject("$out", tmpVarColl.getName()));
                    variantColl.aggregate(pipeline);

                    mongoTemplate.getDb().setWriteConcern(WriteConcern.ACKNOWLEDGED);
                    LOG.debug("Variant preliminary query found " + tmpVarColl.count() + " results in "
                            + (System.currentTimeMillis() - beforeAggQuery) / 1000f + "s");

                    progress.setProgressDescription(null);
                    if (tmpVarColl.count() == 0)
                        count = 0l; // no need to search any further
                }
            }

            if (count != null) {
                BasicDBObject dbo = new BasicDBObject("_id", queryKey);
                dbo.append(MgdbDao.FIELD_NAME_CACHED_COUNT_VALUE, new Long[] { count });
                cachedCountcollection.save(dbo);
            } else { // now filter on genotyping data
                List<String> selectedIndividualList = selectedIndividuals.length() == 0 ? null
                        : Arrays.asList(selectedIndividuals.split(";"));
                if (selectedIndividualList == null)
                    selectedIndividualList = getIndividualsInDbOrder(sModule, projId);

                GigwaSearchVariantsExportRequest gsvr = new GigwaSearchVariantsExportRequest();
                gsvr.setAlleleCount(alleleCount);
                if (minposition != null)
                    gsvr.setStart(minposition);
                if (maxposition != null)
                    gsvr.setEnd(maxposition);
                gsvr.setGeneName(geneName);
                gsvr.setReferenceName(selectedSequences);
                gsvr.setSelectedVariantTypes(selectedVariantTypes);
                gsvr.setVariantEffect(variantEffects);
                gsvr.setVariantSetId(sModule + ServiceInterface.ID_SEPARATOR + projId);

                gsvr.setMissingData(missingData);
                gsvr.setMinmaf(minmaf);
                gsvr.setMaxmaf(maxmaf);
                gsvr.setGtPattern(gtPattern);

                HashMap<String, Integer> annotationFieldThresholds = new HashMap<String, Integer>();
                annotationFieldThresholds.put(VCFConstants.GENOTYPE_QUALITY_KEY, genotypeQualityThreshold);
                annotationFieldThresholds.put(VCFConstants.DEPTH_KEY, readDepthThreshold);
                gsvr.setAnnotationFieldThresholds(annotationFieldThresholds);
                gsvr.setCallSetIds(selectedIndividualList);

                GenotypingDataQueryBuilder genotypingDataQueryBuilder = new GenotypingDataQueryBuilder(gsvr,
                        tmpVarColl);
                try {
                    final int nChunkCount = genotypingDataQueryBuilder.getNumberOfQueries();
                    if (nChunkCount > 1)
                        LOG.debug("Query split into " + nChunkCount);

                    final Long[] partialCountArray = new Long[nChunkCount];
                    final Builder aggOpts = AggregationOptions.builder().allowDiskUse(false);
                    final ArrayList<Thread> threadsToWaitFor = new ArrayList<Thread>();
                    final AtomicInteger finishedThreadCount = new AtomicInteger(0);

                    ArrayList<List<DBObject>> genotypingDataPipelines = new ArrayList();
                    while (genotypingDataQueryBuilder.hasNext())
                        genotypingDataPipelines.add(genotypingDataQueryBuilder.next());

                    ArrayList<Integer> chunkIndices = new ArrayList<Integer>();
                    for (int i = 0; i < genotypingDataPipelines.size(); i++)
                        chunkIndices.add(i);
                    Collections.shuffle(chunkIndices);

                    for (int i = 0; i < chunkIndices.size()/*/2*/; i++) {
                        final List<DBObject> genotypingDataPipeline = genotypingDataPipelines
                                .get(chunkIndices.get(i));

                        // Now the $group operation, used for counting
                        DBObject groupFields = new BasicDBObject("_id", null);
                        groupFields.put("count", new BasicDBObject("$sum", 1));
                        genotypingDataPipeline.add(new BasicDBObject("$group", groupFields));

                        if (i == 0 && tmpVarColl.count() <= 5)
                            LOG.debug(genotypingDataPipeline);

                        if (progress.hasAborted()) {
                            genotypingDataQueryBuilder.cleanup(); // otherwise a pending db-cursor will remain
                            return 0l;
                        }

                        final int chunkIndex = i;

                        Thread t = new Thread() {
                            public void run() {
                                //                               long b4 = System.currentTimeMillis();
                                Cursor it = mongoTemplate
                                        .getCollection(MongoTemplateManager
                                                .getMongoCollectionName(VariantRunData.class))
                                        .aggregate(genotypingDataPipeline, aggOpts.build());
                                partialCountArray[chunkIndex] = it.hasNext()
                                        ? ((Number) it.next().get("count")).longValue()
                                        : 0;
                                progress.setCurrentStepProgress(
                                        (short) (finishedThreadCount.incrementAndGet() * 100 / nChunkCount));
                                //                           System.out.println("chunk " + chunkIndex + " took " + (System.currentTimeMillis() - b4));
                                genotypingDataPipeline.clear(); // release memory (VERY IMPORTANT)
                            }
                        };

                        if (i % NUMBER_OF_SIMULTANEOUS_QUERY_THREADS == (NUMBER_OF_SIMULTANEOUS_QUERY_THREADS
                                - 1)) {
                            t.run(); // run synchronously
                        } else {
                            threadsToWaitFor.add(t);
                            t.start(); // run asynchronously for better speed
                        }
                    }

                    for (Thread t : threadsToWaitFor) // wait for all threads before moving to next phase
                        t.join();

                    progress.setCurrentStepProgress(100);

                    count = 0l;
                    for (Long partialCount : partialCountArray)
                        count += partialCount;

                    BasicDBObject dbo = new BasicDBObject("_id", queryKey);
                    dbo.append(MgdbDao.FIELD_NAME_CACHED_COUNT_VALUE, partialCountArray);
                    cachedCountcollection.save(dbo);
                } catch (Exception e) {
                    genotypingDataQueryBuilder.cleanup(); // otherwise a pending db-cursor will remain
                    throw e;
                }
            }
            LOG.info("countVariants found " + count + " results in "
                    + (System.currentTimeMillis() - before) / 1000d + "s");
        }

        progress.markAsComplete();
        if (progress.hasAborted())
            return 0l;

        return count;
    } finally {
        //         getTemporaryVariantCollection(sModule, progress.getProcessId(), true);   // always empty it
    }
}

From source file:fr.cirad.web.controller.gigwa.base.AbstractVariantController.java

License:Open Source License

/**
 * Find variants./*from w  w  w.  j  a  v a2  s  .c  o m*/
 *
 * @param request the request
 * @param sModule the module
 * @param projId the proj id
 * @param selectedVariantTypes the selected variant types
 * @param selectedSequences the selected sequences
 * @param selectedIndividuals the selected individuals
 * @param gtPattern the gt code
 * @param genotypeQualityThreshold the genotype quality threshold
 * @param readDepthThreshold the read depth threshold
 * @param missingData the missing data
 * @param minmaf the minmaf
 * @param maxmaf the maxmaf
 * @param minposition the minposition
 * @param maxposition the maxposition
 * @param alleleCount the allele count
 * @param geneName the gene name
 * @param variantEffects the variant effects
 * @param wantedFields the wanted fields
 * @param page the page
 * @param size the size
 * @param sortBy the sort by
 * @param sortDir the sort dir
 * @param processID the process id
 * @return true, if successful
 * @throws Exception the exception
 */
@RequestMapping(variantFindURL)
/**
 *  This method build a list of variants in a temporary collection, that may be used later for browsing or exporting results
 */
protected @ResponseBody boolean findVariants(HttpServletRequest request, @RequestParam("module") String sModule,
        @RequestParam("project") int projId, @RequestParam("variantTypes") String selectedVariantTypes,
        @RequestParam("sequences") String selectedSequences,
        @RequestParam("individuals") String selectedIndividuals, @RequestParam("gtPattern") String gtPattern,
        @RequestParam("genotypeQualityThreshold") int genotypeQualityThreshold,
        @RequestParam("readDepthThreshold") int readDepthThreshold,
        @RequestParam("missingData") double missingData, @RequestParam("minmaf") Float minmaf,
        @RequestParam("maxmaf") Float maxmaf, @RequestParam("minposition") Long minposition,
        @RequestParam("maxposition") Long maxposition, @RequestParam("alleleCount") String alleleCount,
        @RequestParam("geneName") String geneName, @RequestParam("variantEffects") String variantEffects,
        @RequestParam("wantedFields") String wantedFields, @RequestParam("page") int page,
        @RequestParam("size") int size, @RequestParam("sortBy") String sortBy,
        @RequestParam("sortDir") String sortDir, @RequestParam("processID") String processID) throws Exception {
    long before = System.currentTimeMillis();

    String token = processID.substring(1 + processID.indexOf('|'));

    final ProgressIndicator progress = new ProgressIndicator(token, new String[0]);
    ProgressIndicator.registerProgressIndicator(progress);
    progress.addStep("Loading results");

    String actualSequenceSelection = selectedSequences;
    if (actualSequenceSelection.length() == 0) {
        ArrayList<String> externallySelectedSeqs = getSequenceIDsBeingFilteredOn(request, sModule);
        if (externallySelectedSeqs != null)
            actualSequenceSelection = StringUtils.join(externallySelectedSeqs, ";");
    }

    List<String> selectedSequenceList = actualSequenceSelection.length() == 0 ? null
            : Arrays.asList(actualSequenceSelection.split(";"));
    String queryKey = getQueryKey(request, sModule, projId, selectedVariantTypes, selectedSequences,
            selectedIndividuals, gtPattern, genotypeQualityThreshold, readDepthThreshold, missingData, minmaf,
            maxmaf, minposition, maxposition, alleleCount, geneName, variantEffects);

    final MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule);
    DBCollection cachedCountCollection = mongoTemplate.getCollection(MgdbDao.COLLECTION_NAME_CACHED_COUNTS);
    DBCursor countCursor = cachedCountCollection.find(new BasicDBObject("_id", queryKey));

    final DBCollection variantColl = mongoTemplate
            .getCollection(mongoTemplate.getCollectionName(VariantData.class));
    final Object[] partialCountArray = !countCursor.hasNext() ? null
            : ((BasicDBList) countCursor.next().get(MgdbDao.FIELD_NAME_CACHED_COUNT_VALUE)).toArray();

    final DBCollection tmpVarColl = getTemporaryVariantCollection(sModule, progress.getProcessId(), false);

    String sRegexOrAggregationOperator = GenotypingDataQueryBuilder.getGenotypePatternToQueryMap()
            .get(gtPattern);
    boolean fNeedToFilterOnGenotypingData = needToFilterOnGenotypingData(sModule, projId,
            sRegexOrAggregationOperator, genotypeQualityThreshold, readDepthThreshold, missingData, minmaf,
            maxmaf, geneName, variantEffects);
    final BasicDBList variantQueryDBList = buildVariantDataQuery(sModule, projId,
            selectedVariantTypes.length() == 0 ? null : Arrays.asList(selectedVariantTypes.split(";")),
            selectedSequenceList, minposition, maxposition,
            alleleCount.length() == 0 ? null : Arrays.asList(alleleCount.split(";")));

    if (!variantQueryDBList.isEmpty()
            && tmpVarColl.count() == 0 /* otherwise we kept the preliminary list from the count procedure */) { // apply filter on variant features
        progress.setProgressDescription("Filtering variants for display...");
        long beforeAggQuery = System.currentTimeMillis();
        List<DBObject> pipeline = new ArrayList<DBObject>();
        pipeline.add(new BasicDBObject("$match", new BasicDBObject("$and", variantQueryDBList)));
        BasicDBObject projectObject = new BasicDBObject("_id", "$_id");
        projectObject.put(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_SEQUENCE,
                "$" + VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_SEQUENCE);
        projectObject.put(
                VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_START_SITE,
                "$" + VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_START_SITE);
        projectObject.put(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_END_SITE,
                "$" + VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_END_SITE);
        projectObject.put(VariantData.FIELDNAME_TYPE, "$" + VariantData.FIELDNAME_TYPE);
        projectObject.put(VariantData.FIELDNAME_KNOWN_ALLELE_LIST,
                "$" + VariantData.FIELDNAME_KNOWN_ALLELE_LIST);
        pipeline.add(new BasicDBObject("$project", projectObject));

        pipeline.add(new BasicDBObject("$out", tmpVarColl.getName()));
        variantColl.aggregate(pipeline);

        LOG.debug("Variant preliminary query found " + tmpVarColl.count() + " results in "
                + (System.currentTimeMillis() - beforeAggQuery) / 1000f + "s");
        progress.setProgressDescription(null);
    } else if (fNeedToFilterOnGenotypingData && tmpVarColl.count() > 0)
        LOG.debug(
                "Re-using " + tmpVarColl.count() + " results from count procedure's variant preliminary query");

    if (progress.hasAborted())
        return false;

    if (fNeedToFilterOnGenotypingData) { // now filter on genotyping data
        final ConcurrentLinkedQueue<Thread> queryThreadsToWaitFor = new ConcurrentLinkedQueue<Thread>(),
                removalThreadsToWaitFor = new ConcurrentLinkedQueue<Thread>();
        final AtomicInteger finishedThreadCount = new AtomicInteger(0);
        final ConcurrentSkipListSet<Comparable> allVariantsThatPassRunFilter = new ConcurrentSkipListSet<Comparable>();

        GigwaSearchVariantsExportRequest gsvr = new GigwaSearchVariantsExportRequest();
        gsvr.setAlleleCount(alleleCount);
        if (minposition != null)
            gsvr.setStart(minposition);
        if (maxposition != null)
            gsvr.setEnd(maxposition);
        gsvr.setGeneName(geneName);
        gsvr.setReferenceName(selectedSequences);
        gsvr.setSelectedVariantTypes(selectedVariantTypes);
        gsvr.setVariantEffect(variantEffects);
        gsvr.setVariantSetId(sModule + ServiceInterface.ID_SEPARATOR + projId);

        gsvr.setMissingData(missingData);
        gsvr.setMinmaf(minmaf);
        gsvr.setMaxmaf(maxmaf);
        gsvr.setGtPattern(gtPattern);
        HashMap<String, Integer> annotationFieldThresholds = new HashMap<String, Integer>();
        annotationFieldThresholds.put(VCFConstants.GENOTYPE_QUALITY_KEY, genotypeQualityThreshold);
        annotationFieldThresholds.put(VCFConstants.DEPTH_KEY, readDepthThreshold);
        gsvr.setAnnotationFieldThresholds(annotationFieldThresholds);
        gsvr.setCallSetIds(selectedIndividuals == null || selectedIndividuals.length() == 0
                ? getIndividualsInDbOrder(sModule, projId)
                : Arrays.asList(selectedIndividuals.split(";")));

        final GenotypingDataQueryBuilder genotypingDataQueryBuilder = new GenotypingDataQueryBuilder(gsvr,
                tmpVarColl);
        genotypingDataQueryBuilder.keepTrackOfPreFilters(!variantQueryDBList.isEmpty());
        try {
            final int nChunkCount = genotypingDataQueryBuilder.getNumberOfQueries();
            if (nChunkCount != partialCountArray.length) {
                LOG.error("Different number of chunks between counting and listing variant rows!");
                progress.setError("Different number of chunks between counting and listing variant rows!");
                return false;
            }
            if (nChunkCount > 1)
                LOG.debug("Query split into " + nChunkCount);

            ArrayList<List<DBObject>> genotypingDataPipelines = new ArrayList();
            while (genotypingDataQueryBuilder.hasNext())
                genotypingDataPipelines.add(genotypingDataQueryBuilder.next());

            ArrayList<Integer> chunkIndices = new ArrayList<Integer>();
            for (int i = 0; i < genotypingDataPipelines.size(); i++)
                chunkIndices.add(i);
            Collections.shuffle(chunkIndices);

            for (int i = 0; i < chunkIndices.size(); i++) {
                final int chunkIndex = chunkIndices.get(i);
                final List<DBObject> genotypingDataPipeline = genotypingDataPipelines.get(chunkIndex);

                if (progress.hasAborted()) {
                    genotypingDataQueryBuilder.cleanup(); // otherwise a pending db-cursor will remain
                    return false;
                }

                Thread t = new Thread() {
                    public void run() {
                        Cursor genotypingDataCursor = mongoTemplate
                                .getCollection(
                                        MongoTemplateManager.getMongoCollectionName(VariantRunData.class))
                                .aggregate(genotypingDataPipeline,
                                        AggregationOptions.builder().allowDiskUse(true).build());
                        final ArrayList<Comparable> variantsThatPassedRunFilter = new ArrayList<Comparable>();
                        while (genotypingDataCursor.hasNext())
                            variantsThatPassedRunFilter
                                    .add((Comparable) genotypingDataCursor.next().get("_id"));

                        if (variantQueryDBList.isEmpty()) // otherwise we won't need it
                            allVariantsThatPassRunFilter.addAll(variantsThatPassedRunFilter);
                        else { // mark the results we want to keep
                            final List<Comparable> lastUsedPreFilter = genotypingDataQueryBuilder
                                    .getPreFilteredIDsForChunk(chunkIndex);

                            Thread removalThread = new Thread() {
                                public void run() {
                                    genotypingDataPipeline.clear(); // release memory (VERY IMPORTANT)

                                    long beforeTempCollUpdate = System.currentTimeMillis();
                                    if (variantsThatPassedRunFilter.size() == lastUsedPreFilter.size())
                                        return; // none to remove

                                    Collection<Comparable> filteredOutVariants = variantsThatPassedRunFilter
                                            .size() == 0 ? lastUsedPreFilter
                                                    : CollectionUtils.subtract(lastUsedPreFilter,
                                                            variantsThatPassedRunFilter);
                                    BasicDBObject removalQuery = GenotypingDataQueryBuilder
                                            .tryAndShrinkIdList("_id", filteredOutVariants, 4);
                                    WriteResult wr = tmpVarColl.remove(removalQuery);
                                    LOG.debug("Chunk N." + (chunkIndex) + ": " + wr.getN()
                                            + " filtered-out temp records removed in "
                                            + (System.currentTimeMillis() - beforeTempCollUpdate) / 1000d
                                            + "s");

                                    progress.setCurrentStepProgress(
                                            (short) (finishedThreadCount.incrementAndGet() * 100
                                                    / nChunkCount));
                                }
                            };
                            removalThreadsToWaitFor.add(removalThread);
                            removalThread.start();
                        }
                    }
                };

                if (i % NUMBER_OF_SIMULTANEOUS_QUERY_THREADS == (NUMBER_OF_SIMULTANEOUS_QUERY_THREADS - 1))
                    t.run(); // sometimes run synchronously so that all queries are not sent at the same time (also helps smooth progress display)
                else {
                    queryThreadsToWaitFor.add(t);
                    t.start(); // run asynchronously for better speed
                }
            }

            // wait for all threads before moving to next phase
            for (Thread t : queryThreadsToWaitFor)
                t.join();
            for (Thread t : removalThreadsToWaitFor)
                t.join();
        } catch (Exception e) {
            genotypingDataQueryBuilder.cleanup(); // otherwise a pending db-cursor will remain
            throw e;
        }

        if (progress.hasAborted())
            return false;

        progress.addStep("Updating temporary results");
        progress.moveToNextStep();
        final long beforeTempCollUpdate = System.currentTimeMillis();
        mongoTemplate.getDb().setWriteConcern(WriteConcern.ACKNOWLEDGED);
        if (variantQueryDBList.isEmpty()) { // we filtered on runs only: keep track of the final dataset
            List<BasicDBObject> pipeline = new ArrayList<>();
            pipeline.add(new BasicDBObject("$match",
                    GenotypingDataQueryBuilder.tryAndShrinkIdList("_id", allVariantsThatPassRunFilter, 4)));
            BasicDBObject projectObject = new BasicDBObject("_id", "$_id");
            projectObject.put(
                    VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_SEQUENCE,
                    "$" + VariantData.FIELDNAME_REFERENCE_POSITION + "."
                            + ReferencePosition.FIELDNAME_SEQUENCE);
            projectObject.put(
                    VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_START_SITE,
                    "$" + VariantData.FIELDNAME_REFERENCE_POSITION + "."
                            + ReferencePosition.FIELDNAME_START_SITE);
            projectObject.put(VariantData.FIELDNAME_TYPE, "$" + VariantData.FIELDNAME_TYPE);
            projectObject.put(VariantData.FIELDNAME_KNOWN_ALLELE_LIST,
                    "$" + VariantData.FIELDNAME_KNOWN_ALLELE_LIST);
            projectObject.put(VariantData.FIELDNAME_VERSION, "$" + VariantData.FIELDNAME_VERSION);
            pipeline.add(new BasicDBObject("$project", projectObject));
            pipeline.add(new BasicDBObject("$out", tmpVarColl.getName()));
            variantColl.aggregate(pipeline);
            LOG.debug(tmpVarColl.count() + " temp records created in "
                    + (System.currentTimeMillis() - beforeTempCollUpdate) / 1000d + "s");
        }
    }

    progress.markAsComplete();
    LOG.info("findVariants took " + (System.currentTimeMillis() - before) / 1000d + "s");
    return true;
}

From source file:geojson.sof20181050.ConvertToGeoJSON.java

License:Apache License

/**
 * Performs the document updates using the legacy driver.
 * <p>//from w  w  w  .  j  a v  a2s. c om
 * The main draw back here (other than those discussed in
 * {@link #doSynchronously()}) is the difficulty creating the GeoJSON
 * documents.
 * </p>
 * 
 * @throws UnknownHostException
 *             On an invalid URI.
 */
protected static void doLegacy() throws UnknownHostException {
    // Execute the query to find all of the documents and then
    // update them.
    final com.mongodb.MongoClient legacyClient = new com.mongodb.MongoClient(new MongoClientURI(URI));
    final com.mongodb.DBCollection legacyCollection = legacyClient.getDB(theCollection.getDatabaseName())
            .getCollection(theCollection.getName());
    try {
        int count = 0;
        for (final DBObject doc : legacyCollection.find()) {
            final Object id = doc.get("_id");
            final Number lat = (Number) doc.get("latitude_deg");
            final Number lon = (Number) doc.get("longitude_deg");

            final BasicDBObject query = new BasicDBObject();
            query.append("_id", id);

            final ArrayList<Double> coordinates = new ArrayList<>();
            coordinates.add(lon.doubleValue());
            coordinates.add(lat.doubleValue());
            final BasicDBObject geojson = new BasicDBObject("type", "Point");
            geojson.append("coordinates", coordinates);
            final BasicDBObject set = new BasicDBObject("loc", geojson);
            final BasicDBObject update = new BasicDBObject("$set", set);

            legacyCollection.update(query, update, /* upsert= */false, /* multi= */false,
                    WriteConcern.ACKNOWLEDGED);

            count += 1;
        }
        System.out.printf("Updated %d documents via the legacy driver.%n", count);
    } finally {
        // Always close the client.
        legacyClient.close();
    }
}

From source file:io.github.microcks.config.MongoConfiguration.java

License:Apache License

@Bean
public WriteConcernResolver writeConcernResolver() {
    return action -> {
        log.info("Using Write Concern of Acknowledged");
        return WriteConcern.ACKNOWLEDGED;
    };//from   w w  w  .j av a 2 s .com
}

From source file:io.gravitee.am.repository.mongodb.common.MongoFactory.java

License:Apache License

@Override
public MongoClient getObject() throws Exception {
    // Client settings
    MongoClientSettings.Builder builder = MongoClientSettings.builder();
    builder.writeConcern(WriteConcern.ACKNOWLEDGED);

    // codec configuration for pojo mapping
    CodecRegistry pojoCodecRegistry = fromRegistries(MongoClients.getDefaultCodecRegistry(),
            fromProviders(PojoCodecProvider.builder().automatic(true).build()));
    builder.codecRegistry(pojoCodecRegistry);

    // Trying to get the MongoClientURI if uri property is defined
    String uri = readPropertyValue(propertyPrefix + "uri");
    if (uri != null && !uri.isEmpty()) {
        // The builder can be configured with default options, which may be overridden by options specified in
        // the URI string.
        MongoClientSettings settings = builder.codecRegistry(pojoCodecRegistry)
                .applyConnectionString(new ConnectionString(uri)).build();

        return MongoClients.create(settings);
    } else {//from w  w w .  ja  v  a2s . c o m
        // Advanced configuration
        SocketSettings.Builder socketBuilder = SocketSettings.builder();
        ClusterSettings.Builder clusterBuilder = ClusterSettings.builder();
        ConnectionPoolSettings.Builder connectionPoolBuilder = ConnectionPoolSettings.builder();
        ServerSettings.Builder serverBuilder = ServerSettings.builder();
        SslSettings.Builder sslBuilder = SslSettings.builder();

        Integer connectTimeout = readPropertyValue(propertyPrefix + "connectTimeout", Integer.class, 1000);
        Integer maxWaitTime = readPropertyValue(propertyPrefix + "maxWaitTime", Integer.class);
        Integer socketTimeout = readPropertyValue(propertyPrefix + "socketTimeout", Integer.class, 1000);
        Boolean socketKeepAlive = readPropertyValue(propertyPrefix + "socketKeepAlive", Boolean.class, true);
        Integer maxConnectionLifeTime = readPropertyValue(propertyPrefix + "maxConnectionLifeTime",
                Integer.class);
        Integer maxConnectionIdleTime = readPropertyValue(propertyPrefix + "maxConnectionIdleTime",
                Integer.class);

        // We do not want to wait for a server
        Integer serverSelectionTimeout = readPropertyValue(propertyPrefix + "serverSelectionTimeout",
                Integer.class, 1000);
        Integer minHeartbeatFrequency = readPropertyValue(propertyPrefix + "minHeartbeatFrequency",
                Integer.class);
        String description = readPropertyValue(propertyPrefix + "description", String.class, "gravitee.io");
        Integer heartbeatFrequency = readPropertyValue(propertyPrefix + "heartbeatFrequency", Integer.class);
        Boolean sslEnabled = readPropertyValue(propertyPrefix + "sslEnabled", Boolean.class);

        if (maxWaitTime != null)
            connectionPoolBuilder.maxWaitTime(maxWaitTime, TimeUnit.MILLISECONDS);
        if (connectTimeout != null)
            socketBuilder.connectTimeout(connectTimeout, TimeUnit.MILLISECONDS);
        if (socketTimeout != null)
            socketBuilder.readTimeout(socketTimeout, TimeUnit.MILLISECONDS);
        if (socketKeepAlive != null)
            socketBuilder.keepAlive(socketKeepAlive);
        if (maxConnectionLifeTime != null)
            connectionPoolBuilder.maxConnectionLifeTime(maxConnectionLifeTime, TimeUnit.MILLISECONDS);
        if (maxConnectionIdleTime != null)
            connectionPoolBuilder.maxConnectionIdleTime(maxConnectionIdleTime, TimeUnit.MILLISECONDS);
        if (minHeartbeatFrequency != null)
            serverBuilder.minHeartbeatFrequency(minHeartbeatFrequency, TimeUnit.MILLISECONDS);
        if (description != null)
            clusterBuilder.description(description);
        if (heartbeatFrequency != null)
            serverBuilder.heartbeatFrequency(heartbeatFrequency, TimeUnit.MILLISECONDS);
        if (sslEnabled != null)
            sslBuilder.enabled(sslEnabled);
        if (serverSelectionTimeout != null)
            clusterBuilder.serverSelectionTimeout(serverSelectionTimeout, TimeUnit.MILLISECONDS);

        // credentials option
        String username = readPropertyValue(propertyPrefix + "username");
        String password = readPropertyValue(propertyPrefix + "password");
        MongoCredential credentials = null;
        if (username != null || password != null) {
            String authSource = readPropertyValue(propertyPrefix + "authSource", String.class, "gravitee-am");
            credentials = MongoCredential.createCredential(username, authSource, password.toCharArray());
            builder.credential(credentials);
        }

        // clustering option
        List<ServerAddress> seeds;
        int serversCount = getServersCount();
        if (serversCount == 0) {
            String host = readPropertyValue(propertyPrefix + "host", String.class, "localhost");
            int port = readPropertyValue(propertyPrefix + "port", int.class, 27017);
            seeds = Collections.singletonList(new ServerAddress(host, port));
        } else {
            seeds = new ArrayList<>(serversCount);
            for (int i = 0; i < serversCount; i++) {
                seeds.add(buildServerAddress(i));
            }
        }
        clusterBuilder.hosts(seeds);

        SocketSettings socketSettings = socketBuilder.build();
        ClusterSettings clusterSettings = clusterBuilder.build();
        ConnectionPoolSettings connectionPoolSettings = connectionPoolBuilder.build();
        ServerSettings serverSettings = serverBuilder.build();
        SslSettings sslSettings = sslBuilder.build();
        MongoClientSettings settings = builder
                .applyToClusterSettings(builder1 -> builder1.applySettings(clusterSettings))
                .applyToSocketSettings(builder1 -> builder1.applySettings(socketSettings))
                .applyToConnectionPoolSettings(builder1 -> builder1.applySettings(connectionPoolSettings))
                .applyToServerSettings(builder1 -> builder1.applySettings(serverSettings))
                .applyToSslSettings(builder1 -> builder1.applySettings(sslSettings)).build();

        return MongoClients.create(settings);
    }
}

From source file:io.gravitee.am.repository.mongodb.management.ManagementRepositoryTestConfiguration.java

License:Apache License

@Bean(name = "managementMongo")
public MongoClient mongo() {
    // cluster configuration
    ClusterSettings clusterSettings = ClusterSettings.builder()
            .hosts(Collections.singletonList(new ServerAddress("localhost", 12345))).build();
    // codec configuration
    CodecRegistry pojoCodecRegistry = fromRegistries(MongoClients.getDefaultCodecRegistry(),
            fromProviders(PojoCodecProvider.builder().automatic(true).build()));

    MongoClientSettings settings = MongoClientSettings.builder().clusterSettings(clusterSettings)
            .codecRegistry(pojoCodecRegistry).writeConcern(WriteConcern.ACKNOWLEDGED).build();
    return MongoClients.create(settings);
}

From source file:io.gravitee.am.repository.mongodb.oauth2.OAuth2RepositoryTestConfiguration.java

License:Apache License

@Bean(name = "oauth2Mongo")
public MongoClient mongo() {
    // cluster configuration
    ClusterSettings clusterSettings = ClusterSettings.builder()
            .hosts(Collections.singletonList(new ServerAddress("localhost", 12346))).build();
    // codec configuration
    CodecRegistry pojoCodecRegistry = fromRegistries(MongoClients.getDefaultCodecRegistry(),
            fromProviders(PojoCodecProvider.builder().automatic(true).build()));

    MongoClientSettings settings = MongoClientSettings.builder().clusterSettings(clusterSettings)
            .codecRegistry(pojoCodecRegistry).writeConcern(WriteConcern.ACKNOWLEDGED).build();
    return MongoClients.create(settings);
}

From source file:net.acesinc.nifi.processors.mongodb.PartialUpdateMongo.java

protected WriteConcern getWriteConcern(final ProcessContext context) {
    final String writeConcernProperty = context.getProperty(WRITE_CONCERN).getValue();
    WriteConcern writeConcern = null;//from  w  ww. j av  a2  s . c  om
    switch (writeConcernProperty) {
    case WRITE_CONCERN_ACKNOWLEDGED:
        writeConcern = WriteConcern.ACKNOWLEDGED;
        break;
    case WRITE_CONCERN_UNACKNOWLEDGED:
        writeConcern = WriteConcern.UNACKNOWLEDGED;
        break;
    case WRITE_CONCERN_FSYNCED:
        writeConcern = WriteConcern.FSYNCED;
        break;
    case WRITE_CONCERN_JOURNALED:
        writeConcern = WriteConcern.JOURNALED;
        break;
    case WRITE_CONCERN_REPLICA_ACKNOWLEDGED:
        writeConcern = WriteConcern.REPLICA_ACKNOWLEDGED;
        break;
    case WRITE_CONCERN_MAJORITY:
        writeConcern = WriteConcern.MAJORITY;
        break;
    default:
        writeConcern = WriteConcern.ACKNOWLEDGED;
    }
    return writeConcern;
}

From source file:org.alfresco.cacheserver.dao.mongo.MongoWebSocketDAO.java

License:Open Source License

private void init() {
    if (db == null) {
        throw new RuntimeException("Mongo DB must not be null");
    }/*ww  w  .  j av a 2 s .  co m*/

    this.registrationData = getCollection(db, registrationDataCollectionName, WriteConcern.ACKNOWLEDGED);

    {
        DBObject keys = BasicDBObjectBuilder.start("u", 1).get();
        this.registrationData.ensureIndex(keys, "byUserName", false);
    }
}

From source file:org.alfresco.contentstore.dao.mongo.MongoContentDAO.java

License:Open Source License

private void init() {
    if (db == null) {
        throw new RuntimeException("Mongo DB must not be null");
    }/* ww  w  . j  a  va 2s  . co  m*/

    this.contentData = getCollection(db, contentCollectionName, WriteConcern.ACKNOWLEDGED);

    {
        DBObject keys = BasicDBObjectBuilder.start("e", 1).add("n", 1).add("v", 1).add("pri", 1).get();
        this.contentData.ensureIndex(keys, "byNodeId", false);
    }

    {
        DBObject keys = BasicDBObjectBuilder.start("e", 1).add("n", 1).add("p", 1).get();
        this.contentData.ensureIndex(keys, "byNodePath", false);
    }
}