Example usage for com.mongodb DBObject put

List of usage examples for com.mongodb DBObject put

Introduction

In this page you can find the example usage for com.mongodb DBObject put.

Prototype

Object put(String key, Object v);

Source Link

Document

Sets a name/value pair in this object.

Usage

From source file:com.icoin.trading.tradeengine.query.tradeexecuted.repositories.TradeExecutedQueryRepositoryImpl.java

License:Apache License

@Override
public List<OpenHighLowCloseVolume> ohlc(String orderBookIdentifier, Date start, Date end, Pageable pageable) {
    hasLength(orderBookIdentifier);//from ww  w .  j  a  v a2s .  co  m
    notNull(pageable);

    DBObject command = BasicDBObjectBuilder.start("aggregate", TRADE_EXECUTED_ENTRY_COLLECTION).get();

    final DBObject criteriaObject = Criteria.where("orderBookIdentifier").is(orderBookIdentifier)
            .and("tradeTime").gte(start).lt(end).getCriteriaObject();
    DBObject match = BasicDBObjectBuilder.start("$match", criteriaObject).get();

    DBObject projection = BasicDBObjectBuilder
            .start("year", BasicDBObjectBuilder.start("$year", "$tradeTime").get())
            .append("month", BasicDBObjectBuilder.start("$month", "$tradeTime").get())
            .append("day", BasicDBObjectBuilder.start("$dayOfMonth", "$tradeTime").get())
            .append("hour", BasicDBObjectBuilder.start("$hour", "$tradeTime").get())
            .append("minute", BasicDBObjectBuilder.start("$minute", "$tradeTime").get())
            .append("tradedPrice", 1).append("tradedAmount", 1).append("_id", 0).get();

    DBObject project = BasicDBObjectBuilder.start("$project", projection).get();

    //{ "aggregate" : "tradeExecutedEntry" ,
    // "pipeline" : [ { "$match" : { "orderBookIdentifier" : "f830f7e3-9f99-4688-92e7-6dbafc7220a8" ,
    // "tradeTime" : { "$gte" : { "$date" : "2007-12-12T04:12:12.120Z"} ,
    // "$lt" : { "$date" : "2012-12-12T04:12:04.120Z"}}}} ,
    // { "$project" : { "tradeTime" : 1 , "tradedPrice.amount" : 1 , "tradedAmount.amount" : 1 , "year" :
    // { "$year" : [ "$tradeTime"]} , "month" : { "$month" : [ "$tradeTime"]} , "week" : { "$week" : [ "$tradeTime"]}}} ,
    // { "$group" : { "_id" : "$year" , "open" : { "$first" : "$tradedPrice"} , "high" : { "$max" : "$tradedPrice"} ,
    // "low" : { "$min" : "$tradedPrice"} , "close" : { "$last" : "$tradedPrice"} , "volume" : { "$sum" : "$tradedAmount"}}} ,
    // { "$skip" : 0} , { "$limit" : 100}]}

    //        {"$project": {
    //            "year":       {"$year": "$dt"},
    //            "month":      {"$month": "$dt"},
    //            "day":        {"$dayOfMonth": "$dt"},
    //            "hour":       {"$hour": "$dt"},
    //            "minute":     {"$minute": "$dt"},
    //            "second":     {"$second": "$dt"},
    //            "dt": 1,
    //                    "p": 1 }},
    //        {"_id" : {"year": "$year", "month": "$month", "day": "$day", "hour": "$hour", "minute": "$minute" },
    //            "open":  {"$first": "$p"},
    //            "high":  {"$max": "$p"},
    //            "low":   {"$min": "$p"},
    //            "close": {"$last": "$p"} }} ] )

    //        02:41:22.649 [main] DEBUG c.i.t.t.q.t.r.TradeExecutedQueryRepositoryImpl - aggregation { "aggregate" : "tradeExecutedEntry" , "pipeline" : [ { "$match" : { "orderBookIdentifier" : "c623022b-9baa-437a-a70f-b59adead3ecf" , "tradeTime" : { "$gte" : { "$date" : "2007-12-12T04:12:12.120Z"} , "$lt" : { "$date" : "2012-12-12T04:12:04.120Z"}}}} , { "$project" : { "year" : { "$year" : "$tradeTime"} , "month" : { "$month" : "$tradeTime"} , "day" : { "$dayOfMonth" : "$tradeTime"} , "hour" : { "$hour" : "$tradeTime"} , "minute" : { "$minute" : "$tradeTime"} , "tradedPrice" : 1 , "tradedAmount" : 1 , "_id" : 0}} , { "$group" : { "_id" : { "year" : "$year" , "priceCcy" : "$tradedPrice.currency" , "amountCcy" : "$tradedAmount.currency"} , "open" : { "$first" : "$tradedPrice.amount"} , "high" : { "$max" : "$tradedPrice.amount"} , "low" : { "$min" : "$tradedPrice.amount"} , "close" : { "$last" : "$tradedPrice.amount"} , "volume" : { "$sum" : "$tradedAmount.amount"}}}]} found :[ { "_id" : { "year" : 2012 , "priceCcy" : "CNY" , "amountCcy" : "BTC"} , "open" : 10500 , "high" : 10500 , "low" : 10500 , "close" : 10500 , "volume" : 11550000000} , { "_id" : { "year" : 2010 , "priceCcy" : "CNY" , "amountCcy" : "BTC"} , "open" : 10500 , "high" : 10500 , "low" : 10500 , "close" : 10500 , "volume" : 2100000000} , { "_id" : { "year" : 2011 , "priceCcy" : "CNY" , "amountCcy" : "BTC"} , "open" : 10500 , "high" : 10500 , "low" : 10500 , "close" : 10500 , "volume" : 1050000000}]
    //        02:46:45.023 [main] DEBUG c.i.t.t.q.t.r.TradeExecutedQueryRepositoryImpl - aggregation { "aggregate" : "tradeExecutedEntry" , "pipeline" : [ { "$match" : { "orderBookIdentifier" : "04527652-b53b-47bf-967d-2001fbe18c13" , "tradeTime" : { "$gte" : { "$date" : "2007-12-12T04:12:12.120Z"} , "$lt" : { "$date" : "2012-12-12T04:12:04.120Z"}}}} , { "$project" : { "year" : { "$year" : "$tradeTime"} , "month" : { "$month" : "$tradeTime"} , "day" : { "$dayOfMonth" : "$tradeTime"} , "hour" : { "$hour" : "$tradeTime"} , "minute" : { "$minute" : "$tradeTime"} , "tradedPrice" : 1 , "tradedAmount" : 1 , "_id" : 0}} , { "$group" : { "_id" : { "year" : "$year" , "priceCcy" : "$tradedPrice.currency" , "amountCcy" : "$tradedAmount.currency"} , "open" : { "$first" : "$tradedPrice.amount"} , "high" : { "$max" : "$tradedPrice.amount"} , "low" : { "$min" : "$tradedPrice.amount"} , "close" : { "$last" : "$tradedPrice.amount"} , "volume" : { "$sum" : "$tradedAmount.amount"}}}]} found :[ { "_id" : { "year" : 2012 , "priceCcy" : "CNY" , "amountCcy" : "BTC"} , "open" : 10500 , "high" : 10500 , "low" : 10500 , "close" : 10500 , "volume" : 11550000000} , { "_id" : { "year" : 2010 , "priceCcy" : "CNY" , "amountCcy" : "BTC"} , "open" : 10500 , "high" : 10500 , "low" : 10500 , "close" : 10500 , "volume" : 2100000000} , { "_id" : { "year" : 2011 , "priceCcy" : "CNY" , "amountCcy" : "BTC"} , "open" : 10500 , "high" : 10500 , "low" : 10500 , "close" : 10500 , "volume" : 1050000000}]

    final DBObject groupId = BasicDBObjectBuilder.start("year", "$year")
            //                .append("month", "$month")
            //                .append("day", "$dayOfMonth")
            //                .append("hour", "$hour")
            //                .append("minute", "$minute")
            .append("priceCcy", "$tradedPrice.currency").append("amountCcy", "$tradedAmount.currency").get();
    DBObject groupOp = BasicDBObjectBuilder.start("_id", groupId)
            .append("open", BasicDBObjectBuilder.start("$first", "$tradedPrice.amount").get())
            .append("high", BasicDBObjectBuilder.start("$max", "$tradedPrice.amount").get())
            .append("low", BasicDBObjectBuilder.start("$min", "$tradedPrice.amount").get())
            .append("close", BasicDBObjectBuilder.start("$last", "$tradedPrice.amount").get())
            .append("volume", BasicDBObjectBuilder.start("$sum", "$tradedAmount.amount").get()).get();

    DBObject group = BasicDBObjectBuilder.start("$group", groupOp).get();

    final BasicDBList pipeline = new BasicDBList();
    pipeline.add(match);
    pipeline.add(project);
    pipeline.add(group);
    command.put("pipeline", pipeline);

    CommandResult commandResult = mongoTemplate.executeCommand(command);
    handleCommandError(commandResult, command);

    // map results
    @SuppressWarnings("unchecked")
    Iterable<DBObject> resultSet = (Iterable<DBObject>) commandResult.get("result");
    List<OpenHighLowCloseVolume> mappedResults = Lists.newLinkedList();

    if (logger.isDebugEnabled()) {
        logger.debug("aggregation {} found :{}", command, resultSet);
    }

    System.err.println(Long.MAX_VALUE / 100000000);
    return null;
}

From source file:com.ikanow.aleph2.shared.crud.mongodb.services.MongoDbCrudService.java

License:Apache License

/** creates a new object and inserts an _id field if needed
 * @param bean the object to convert/*from   www . j a v a 2s .  c  o  m*/
 * @return the converted BSON (possibly with _id inserted)
 */
protected DBObject convertToBson(final O bean) {
    final DBObject dbo = Patterns.match().<DBObject>andReturn()
            .when(() -> JsonNode.class != _state.bean_clazz, () -> _state.coll.convertToDbObject(bean))
            .otherwise(() -> {
                try (BsonObjectGenerator generator = new BsonObjectGenerator()) {
                    _object_mapper.writeTree(generator, (JsonNode) bean);
                    return generator.getDBObject();
                } catch (Exception e) {
                    return new BasicDBObject();
                }
            });
    if (_state.insert_string_id_if_missing) {
        if (!dbo.containsField(_ID))
            dbo.put(_ID, new ObjectId().toString());
    }
    return dbo;
}

From source file:com.ikanow.aleph2.shared.crud.mongodb.utils.MongoDbUtils.java

License:Apache License

/** Inserts an object into field1.field2, creating objects along the way
 * @param mutable the mutable object into which the the nested field is inserted
 * @param parent the top level fieldname
 * @param nested the nested fieldname /*from  w w  w  .  j a v  a2 s.  c o m*/
 * @param to_insert the object to insert
 */
protected static void nestedPut(final BasicDBObject mutable, final String parent, final String nested,
        final Object to_insert) {
    final DBObject dbo = (DBObject) mutable.get(parent);
    if (null != dbo) {
        dbo.put(nested, to_insert);
    } else {
        BasicDBObject new_dbo = new BasicDBObject();
        new_dbo.put(nested, to_insert);
        mutable.put(parent, new_dbo);
    }
}

From source file:com.ikanow.infinit.e.core.mapreduce.HadoopJobRunner.java

License:Open Source License

/**
 * Moves the output of a job from output_tmp to output and deletes
 * the tmp collection.//from ww w. ja v  a2s  .c  om
 * 
 * @param cmr
 * @throws IOException 
 * @throws ParserConfigurationException 
 * @throws SAXException 
 */
private void moveTempOutput(CustomMapReduceJobPojo cmr)
        throws IOException, SAXException, ParserConfigurationException {
    // If we are an export job then move files:
    bringTempOutputToFront(cmr);
    // (the rest of this will just do nothing) 

    /**
     * Atomic plan:
     * If not append, move customlookup pointer to tmp collection, drop old collection.
     * If append, set sync flag (find/mod), move results from tmp to old, unset sync flag.
     * 
     */
    //step1 build out any of the post proc arguments
    DBObject postProcObject = null;
    boolean limitAllData = true;
    boolean hasSort = false;
    int limit = 0;
    BasicDBObject sort = new BasicDBObject();
    try {
        postProcObject = (DBObject) com.mongodb.util.JSON
                .parse(getQueryOrProcessing(cmr.query, QuerySpec.POSTPROC));
        if (postProcObject != null) {
            if (postProcObject.containsField("limitAllData")) {
                limitAllData = (Boolean) postProcObject.get("limitAllData");
            }
            if (postProcObject.containsField("limit")) {
                limit = (Integer) postProcObject.get("limit");
                if (postProcObject.containsField("sortField")) {
                    String sfield = (String) postProcObject.get("sortField");
                    int sortDir = 1;
                    if (postProcObject.containsField("sortDirection")) {
                        sortDir = (Integer) postProcObject.get("sortDirection");
                    }
                    sort.put(sfield, sortDir);
                    hasSort = true;
                } else if (limit > 0) {
                    //set a default sort because the user posted a limit
                    sort.put("_id", -1);
                    hasSort = true;
                }
            }
        }
    } catch (Exception ex) {
        _logger.info(
                "job_error_post_proc_title=" + cmr.jobtitle + " job_error_post_proc_id=" + cmr._id.toString()
                        + " job_error_post_proc_message=" + HarvestExceptionUtils.createExceptionMessage(ex));
    }

    //step 2a if not appending results then work on temp collection and swap to main
    if ((null == cmr.appendResults) || !cmr.appendResults) //format temp then change lookup pointer to temp collection
    {
        //transform all the results into necessary format:         
        DBCursor dbc_tmp = DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollectionTemp)
                .find(new BasicDBObject("key", null)).sort(sort).limit(limit);
        while (dbc_tmp.hasNext()) {
            DBObject dbo = dbc_tmp.next();
            Object key = dbo.get("_id");
            dbo.put("key", key);
            dbo.removeField("_id");
            DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollectionTemp).insert(dbo);
        }
        DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollectionTemp)
                .remove(new BasicDBObject("key", null));

        //swap the output collections
        BasicDBObject notappendupdates = new BasicDBObject(CustomMapReduceJobPojo.outputCollection_,
                cmr.outputCollectionTemp);
        notappendupdates.append(CustomMapReduceJobPojo.outputCollectionTemp_, cmr.outputCollection);
        DbManager.getCustom().getLookup().findAndModify(new BasicDBObject(CustomMapReduceJobPojo._id_, cmr._id),
                new BasicDBObject(MongoDbManager.set_, notappendupdates));
        String temp = cmr.outputCollectionTemp;
        cmr.outputCollectionTemp = cmr.outputCollection;
        cmr.outputCollection = temp;
    } else //step 2b if appending results then drop modified results in output collection
    {
        DbManager.getCustom().getLookup().findAndModify(new BasicDBObject(CustomMapReduceJobPojo._id_, cmr._id),
                new BasicDBObject(MongoDbManager.set_, new BasicDBObject("isUpdatingOutput", true)));
        //remove any aged out results
        if ((null != cmr.appendAgeOutInDays) && cmr.appendAgeOutInDays > 0) {
            //remove any results that have aged out
            long ageOutMS = (long) (cmr.appendAgeOutInDays * MS_IN_DAY);
            Date lastAgeOut = new Date(((new Date()).getTime() - ageOutMS));
            DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollection).remove(
                    new BasicDBObject("_id", new BasicDBObject(MongoDbManager.lt_, new ObjectId(lastAgeOut))));
        }
        DBCursor dbc_tmp;
        if (!limitAllData) {
            //sort and limit the temp data set because we only want to process it
            dbc_tmp = DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollectionTemp)
                    .find(new BasicDBObject("key", null)).sort(sort).limit(limit);
            limit = 0; //reset limit so we get everything in a few steps (we only want to limit the new data)
        } else {
            dbc_tmp = DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollectionTemp)
                    .find(new BasicDBObject("key", null));
        }

        DBCollection dbc = DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollection);
        //transform temp results and dump into output collection
        while (dbc_tmp.hasNext()) {
            DBObject dbo = dbc_tmp.next();
            //transform the dbo to format {_id:ObjectId, key:(prev_id), value:value}
            Object key = dbo.get("_id");
            dbo.put("key", key);
            dbo.removeField("_id");
            //_id field should be automatically set to objectid when inserting now
            dbc.insert(dbo);
        }
        //if there is a sort, we need to apply it to all the data now
        if (hasSort) {
            ObjectId OID = new ObjectId();
            BasicDBObject query = new BasicDBObject("_id", new BasicDBObject(MongoDbManager.lt_, OID));
            //find everything inserted before now and sort/limit the data
            DBCursor dbc_sort = dbc.find(query).sort(sort).limit(limit);
            while (dbc_sort.hasNext()) {
                //reinsert the data into db (it should be in sorted order naturally now)
                DBObject dbo = dbc_sort.next();
                dbo.removeField("_id");
                dbc.insert(dbo);
            }
            //remove everything inserted before we reorganized everything (should leave only the new results in natural order)
            dbc.remove(query);
        }
        DbManager.getCustom().getLookup().findAndModify(new BasicDBObject(CustomMapReduceJobPojo._id_, cmr._id),
                new BasicDBObject(MongoDbManager.set_, new BasicDBObject("isUpdatingOutput", false)));
    }
    //step3 clean up temp output collection so we can use it again
    // (drop it, removing chunks)
    try {
        DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollectionTemp).drop();
    } catch (Exception e) {
    } // That's fine, it probably just doesn't exist yet...
}

From source file:com.ikanow.infinit.e.data_model.custom.InfiniteMongoSplitter.java

License:Apache License

@SuppressWarnings("unchecked")
public static List<InputSplit> calculateSplits_phase2(InfiniteMongoConfig conf, BasicDBObject confQuery,
        boolean alwaysUseChunks, boolean newShardScheme, Integer splitDocCount) {
    alwaysUseChunks &= (conf.getMaxSplits() != MAX_SPLITS);
    // (in standalone mode, never use chunks)

    MongoURI uri = conf.getInputURI();/*  ww w. j  a  va2s  .  co  m*/
    DBCollection coll = InfiniteMongoConfigUtil.getCollection(uri);
    if (conf.getLimit() > 0) {
        return calculateManualSplits(conf, confQuery, 1, conf.getLimit(), coll);
    } else {
        if (!alwaysUseChunks) {
            int nMaxCount = 1 + conf.getMaxDocsPerSplit() * conf.getMaxSplits();
            int count = 0;
            if (null == splitDocCount) {
                if (nMaxCount <= 1) {
                    nMaxCount = 0;
                } else {
                    //DEBUG
                    //System.out.println(coll.find(confQuery).limit(1).explain());

                    count = (int) coll.getCount(confQuery, null, nMaxCount, 0);
                    if (0 == count) {
                        return new ArrayList<InputSplit>();
                    }
                } //TESTED
            } else {
                count = splitDocCount;
            }

            //if maxdocssplit and maxsplits is set and there are less documents than splits*docspersplit then use the new splitter
            //otherwise use the old splitter
            if (conf.getMaxDocsPerSplit() > 0 && conf.getMaxSplits() > 0 && (count < nMaxCount)) {
                _logger.debug("Calculating splits manually");
                int splits_needed = (count / conf.getMaxDocsPerSplit()) + 1;

                return calculateManualSplits(conf, confQuery, splits_needed, conf.getMaxDocsPerSplit(), coll);
            } //TESTED
        }
        if (newShardScheme && !confQuery.containsField(DocumentPojo.sourceKey_)) {
            // OK if we're going to do the sharded version then we will want to calculate
            splitPrecalculations_newShardScheme(confQuery, null); // (modifies confQuery if returns true)            
        } //TESTED: checked did nothing when had sourceKey, added sourceKey when necessary (eg entities.index case)

        if (!newShardScheme) { // unlike new sharding scheme, in this case the query is fixed, so overwrite now:
            conf.setQuery(confQuery);
        }

        List<InputSplit> splits = MongoSplitter.calculateSplits(conf);
        // (unless manually set, like above, runs with the _original_ query)
        int initialSplitSize = splits.size();

        // We have the MongoDB-calculated splits, now calculate their intersection vs the query
        @SuppressWarnings("rawtypes")
        Map<String, TreeSet<Comparable>> orderedArraySet = new HashMap<String, TreeSet<Comparable>>();
        @SuppressWarnings("rawtypes")
        Map<String, NavigableSet<Comparable>> orderedArraySet_afterMin = new HashMap<String, NavigableSet<Comparable>>();
        BasicDBObject originalQuery = confQuery;

        ArrayList<InputSplit> newsplits = new ArrayList<InputSplit>(splits.size());
        Iterator<InputSplit> splitIt = splits.iterator();
        while (splitIt.hasNext()) {
            try {
                orderedArraySet_afterMin.clear();

                MongoInputSplit mongoSplit = (MongoInputSplit) splitIt.next();
                BasicDBObject min = (BasicDBObject) mongoSplit.getQuerySpec().get("$min");
                BasicDBObject max = (BasicDBObject) mongoSplit.getQuerySpec().get("$max");

                //DEBUG
                //_logger.info("+----------------- NEW SPLIT ----------------: " + min + " /" + max);
                //System.out.println("+----------------- NEW SPLIT ----------------: " + min + " /" + max);

                if (null != min) { // How does the min fit in with the general query
                    try {
                        if (compareFields(-1, originalQuery, min, max, orderedArraySet,
                                orderedArraySet_afterMin) < 0) {
                            splitIt.remove();
                            continue;
                        }
                    } catch (Exception e) {
                    } // do nothing probably just some comparable issue
                } //TESTED

                if (null != max) { // How does the min fit in with the general query
                    try {
                        if (compareFields(1, originalQuery, max, min, orderedArraySet,
                                orderedArraySet_afterMin) > 0) {
                            splitIt.remove();
                            continue;
                        }
                    } catch (Exception e) {
                    } // do nothing probably just some comparable issue
                } //TESTED

                //DEBUG
                //_logger.info("(retained split)");
                //System.out.println("(retained split)");

                // (don't worry about edge cases, won't happen very often and will just result in a spurious empty mapper)

                ////////////////////////////////

                // Now some infinit.e specific processing...

                if (newShardScheme) {
                    @SuppressWarnings("rawtypes")
                    TreeSet<Comparable> sourceKeyOrderedArray = orderedArraySet.get(DocumentPojo.sourceKey_);
                    if ((null != sourceKeyOrderedArray) && !sourceKeyOrderedArray.isEmpty()) {
                        @SuppressWarnings("rawtypes")
                        Comparable minSourceKey = null;
                        Object minSourceKeyObj = (null == min) ? null : min.get(DocumentPojo.sourceKey_);
                        if (minSourceKeyObj instanceof String) {
                            minSourceKey = (String) minSourceKeyObj;
                        }
                        if (null == minSourceKey) {
                            minSourceKey = sourceKeyOrderedArray.first();
                        } //TESTED
                        @SuppressWarnings("rawtypes")
                        Comparable maxSourceKey = null;
                        Object maxSourceKeyObj = (null == max) ? null : max.get(DocumentPojo.sourceKey_);
                        if (maxSourceKeyObj instanceof String) {
                            maxSourceKey = (String) maxSourceKeyObj;
                        }
                        if (null == maxSourceKey) {
                            maxSourceKey = sourceKeyOrderedArray.last();
                        } //TESTED

                        DBObject splitQuery = mongoSplit.getQuerySpec();
                        BasicDBObject splitQueryQuery = new BasicDBObject(
                                (BasicBSONObject) splitQuery.get("$query"));
                        if (0 == minSourceKey.compareTo(maxSourceKey)) { // single matching sourceKEy
                            splitQueryQuery.put(DocumentPojo.sourceKey_, maxSourceKey);
                        } //TESTED (array of sources, only one matches)
                        else { // multiple matching source keys
                            splitQueryQuery.put(DocumentPojo.sourceKey_, new BasicDBObject(DbManager.in_,
                                    sourceKeyOrderedArray.subSet(minSourceKey, true, maxSourceKey, true)));
                        } //TESTED (array of sources, multiple match)               
                        newsplits.add(
                                new InfiniteMongoInputSplit(mongoSplit, splitQueryQuery, conf.isNoTimeout()));
                    } else { // original query is of sufficient simplicity
                        newsplits.add(
                                new InfiniteMongoInputSplit(mongoSplit, originalQuery, conf.isNoTimeout()));
                    } //TESTED (no change to existing source)

                } //TESTED
                else { // old sharding scheme, remove min/max and replace with normal _id based query where possible

                    DBObject splitQuery = mongoSplit.getQuerySpec();
                    // Step 1: create a query range for _id:
                    BasicDBObject idRange = null;
                    Object idMin = (min == null) ? null : min.get(DocumentPojo._id_);
                    Object idMax = (max == null) ? null : max.get(DocumentPojo._id_);
                    if (!(idMin instanceof ObjectId))
                        idMin = null;
                    if (!(idMax instanceof ObjectId))
                        idMax = null;

                    if ((null != idMin) || (null != idMax)) {
                        idRange = new BasicDBObject();
                        if (null != idMin) {
                            idRange.put(DbManager.gte_, idMin);
                        }
                        if (null != idMax) {
                            idRange.put(DbManager.lt_, idMax);
                        }
                    } //TESTED                  

                    // Step 2: merge with whatever we have at the moment:
                    if (null != idRange) {
                        BasicDBObject splitQueryQuery = new BasicDBObject(
                                (BasicBSONObject) splitQuery.get("$query"));
                        Object idQueryElement = splitQueryQuery.get(DocumentPojo._id_);
                        boolean convertedAwayFromMinMax = false;
                        if (null == idQueryElement) { // nice and easy, add _id range
                            splitQueryQuery.put(DocumentPojo._id_, idRange);
                            convertedAwayFromMinMax = true;
                        } //TESTED
                        else if (!splitQueryQuery.containsField(DbManager.and_)) { // OK we're going to just going to make life easy
                            splitQueryQuery.remove(DocumentPojo._id_);
                            splitQueryQuery.put(DbManager.and_,
                                    Arrays.asList(new BasicDBObject(DocumentPojo._id_, idQueryElement),
                                            new BasicDBObject(DocumentPojo._id_, idRange)));
                            convertedAwayFromMinMax = true;
                        } //TESTED
                          // (else stick with min/max)

                        if (convertedAwayFromMinMax) { // can construct an _id query
                            splitQuery.removeField("$min");
                            splitQuery.removeField("$max");
                        } //TESTED
                        splitQuery.put("$query", splitQueryQuery);
                    }
                    newsplits.add(new InfiniteMongoInputSplit(mongoSplit, conf.isNoTimeout()));
                } //TESTED         
            } catch (Exception e) {
                //DEBUG
                //e.printStackTrace();
            } // do nothing must be some other type of input split
        } //TESTED

        //DEBUG
        //System.out.println("Calculating splits via mongo-hadoop: " + initialSplitSize + " reduced to " + splits.size());

        _logger.info("Calculating (converted) splits via mongo-hadoop: " + initialSplitSize + " reduced to "
                + newsplits.size());
        return newsplits;
    }
}

From source file:com.ikanow.infinit.e.data_model.store.MongoDbUtil.java

License:Apache License

public static DBObject convert(BSONWritable dbo) {
    DBObject out = new BasicDBObject();
    for (Object entryIt : dbo.toMap().entrySet()) {
        @SuppressWarnings("unchecked")
        Map.Entry<String, Object> entry = (Map.Entry<String, Object>) entryIt;
        out.put(entry.getKey(), entry.getValue());
    }//from w w  w  .j  a  va 2  s  . c  o  m
    return out;
}

From source file:com.imaginea.mongodb.requestdispatchers.DocumentRequestDispatcher.java

License:Apache License

/**
 * Maps GET Request to get list of documents inside a collection inside a
 * database present in mongo db to a service function that returns the list.
 * Also forms the JSON response for this request and sent it to client. In
 * case of any exception from the service files an error object if formed.
 * /*from   ww w  . ja  v a 2  s.c  o  m*/
 * @param dbName
 *            Name of Database
 * @param collectionName
 *            Name of Collection
 * @param dbInfo
 *            Mongo Db Configuration provided by user to connect to.
 * @param request
 *            Get the HTTP request context to extract session parameters
 * @return A String of JSON format with list of All Documents in a
 *         collection.
 */
@GET
@Produces(MediaType.APPLICATION_JSON)
public String getQueriedDocsList(@PathParam("dbName") final String dbName,
        @PathParam("collectionName") final String collectionName, @QueryParam("query") final String query,
        @QueryParam("dbInfo") final String dbInfo, @QueryParam("fields") String keys,
        @QueryParam("limit") final String limit, @QueryParam("skip") final String skip,
        @Context final HttpServletRequest request) throws JSONException {

    // Get all fields with "_id" in case of keys = null
    if (keys == null) {
        keys = "";
    }
    final String fields = keys;

    String response = new ResponseTemplate().execute(logger, dbInfo, request, new ResponseCallback() {
        public Object execute() throws Exception {

            DocumentService documentService = new DocumentServiceImpl(dbInfo);
            // Get query
            DBObject queryObj = (DBObject) JSON.parse(query);
            StringTokenizer strtok = new StringTokenizer(fields, ",");
            DBObject keyObj = new BasicDBObject();
            while (strtok.hasMoreElements()) {
                keyObj.put(strtok.nextToken(), 1);
            }
            int docsLimit = Integer.parseInt(limit);
            int docsSkip = Integer.parseInt(skip);
            ArrayList<DBObject> documentList = documentService.getQueriedDocsList(dbName, collectionName,
                    queryObj, keyObj, docsLimit, docsSkip);
            return documentList;
        }
    });

    return response;
}

From source file:com.imaginea.mongodb.services.CollectionServiceImpl.java

License:Apache License

/**
 * Creates a collection inside a database in mongo to which user is
 * connected to./*from w  w w  . j  av  a 2 s . c  o m*/
 * 
 * @param dbName
 *            Name of Database in which to insert a collection
 * @param collectionName
 *            Name of Collection to be inserted
 * @param capped
 *            Specify if the collection is capped
 * @param size
 *            Specify the size of collection
 * @param maxDocs
 *            specify maximum no of documents in the collection
 * @return Success if Insertion is successful else throw exception
 * @exception EmptyDatabaseNameException
 *                if dbName is null
 * @exception EmptyCollectionNameException
 *                if collectionName is null
 * @exception UndefinedDatabaseException
 *                if database is not present
 * @exception DuplicateCollectionException
 *                if collection is already present
 * @exception InsertCollectionException
 *                exception while inserting collection
 * @exception DatabaseException
 *                throw super type of UndefinedDatabaseException
 * @exception ValidationException
 *                throw super type of
 *                EmptyDatabaseNameException,EmptyCollectionNameException
 * @exception CollectionException
 *                throw super type of
 *                DuplicateCollectionException,InsertCollectionException
 */
public String insertCollection(String dbName, String collectionName, boolean capped, int size, int maxDocs)
        throws DatabaseException, CollectionException, ValidationException {

    mongoInstance = mongoInstanceProvider.getMongoInstance();
    if (dbName == null) {
        throw new EmptyDatabaseNameException("Database name is null");

    }
    if (dbName.equals("")) {
        throw new EmptyDatabaseNameException("Database Name Empty");
    }

    if (collectionName == null) {
        throw new EmptyCollectionNameException("Collection name is null");
    }
    if (collectionName.equals("")) {
        throw new EmptyCollectionNameException("Collection Name Empty");
    }
    try {
        if (!mongoInstance.getDatabaseNames().contains(dbName)) {
            throw new UndefinedDatabaseException("Db with name [" + dbName + "] doesn't exist.");
        }
        if (mongoInstance.getDB(dbName).getCollectionNames().contains(collectionName)) {
            throw new DuplicateCollectionException(
                    "Collection [" + collectionName + "] Already exists in Database [" + dbName + "]");
        }

        DBObject options = new BasicDBObject();
        options.put("capped", capped);
        if (capped) {
            options.put("size", size);
            options.put("max", maxDocs);
        }
        mongoInstance.getDB(dbName).createCollection(collectionName, options);
    } catch (MongoException m) {
        InsertCollectionException e = new InsertCollectionException("COLLECTION_CREATION_EXCEPTION",
                m.getCause());
        throw e;
    }
    String result = "Created Collection [" + collectionName + "] in Database [" + dbName + "]";

    return result;
}

From source file:com.imaginea.mongodb.services.DocumentServiceImpl.java

License:Apache License

/**
 * Gets the list of documents inside a collection in a database in mongo to
 * which user is connected to./*from ww w . ja va  2s  . c o  m*/
 * 
 * @param dbName
 *            Name of Database
 * @param collectionName
 *            Name of Collection from which to get all Documents
 * 
 * @param query
 *            query to be performed. In case of empty query {} return all
 *            docs.
 * 
 * @param keys
 *            Keys to be present in the resulted docs.
 * 
 * @param limit
 *            Number of docs to show.
 * 
 * @param skip
 *            Docs to skip from the front.
 * 
 * @return List of all documents.
 * @exception EmptyDatabaseNameException
 *                If database name is null
 * @exception EmptyCollectionNameException
 *                If Collection name is null
 * @exception UndefinedDatabaseException
 *                If database is not present
 * @exception UndefinedCollectionException
 *                If Collection is not present
 * @exception DatabaseException
 *                throw super type of UndefinedDatabaseException
 * @exception ValidationException
 *                throw super type of
 *                EmptyDatabaseNameException,EmptyCollectionNameException
 * @exception CollectionException
 *                throw super type of UndefinedCollectionException
 * @exception DocumentException
 *                exception while performing get doc list
 * 
 */

public ArrayList<DBObject> getQueriedDocsList(String dbName, String collectionName, DBObject query,
        DBObject keys, int limit, int skip)
        throws DatabaseException, CollectionException, DocumentException, ValidationException {

    mongoInstance = mongoInstanceProvider.getMongoInstance();

    if (dbName == null) {
        throw new EmptyDatabaseNameException("Database name is null");

    }
    if (dbName.equals("")) {
        throw new EmptyDatabaseNameException("Database Name Empty");
    }

    if (collectionName == null) {
        throw new EmptyCollectionNameException("Collection name is null");
    }
    if (collectionName.equals("")) {
        throw new EmptyCollectionNameException("Collection Name Empty");
    }

    ArrayList<DBObject> dataList = new ArrayList<DBObject>();
    try {
        if (!mongoInstance.getDatabaseNames().contains(dbName)) {
            throw new UndefinedDatabaseException("DB with name [" + dbName + "]DOES_NOT_EXIST");
        }

        if (!mongoInstance.getDB(dbName).getCollectionNames().contains(collectionName)) {
            throw new UndefinedCollectionException("Collection with name [" + collectionName
                    + "] DOES NOT EXIST in Database [" + dbName + "]");
        }
        if (keys.keySet().isEmpty()) {
            keys.put("_id", 1); // For empty keys return all _id of all docs
        }

        // Return Queried Documents
        DBCursor cursor = mongoInstance.getDB(dbName).getCollection(collectionName).find(query, keys);
        cursor.limit(limit);
        cursor.skip(skip);

        if (cursor.hasNext()) {
            while (cursor.hasNext()) {
                dataList.add(cursor.next());
            }
        }
    } catch (MongoException e) {
        throw new DocumentException(ErrorCodes.GET_DOCUMENT_LIST_EXCEPTION, "GET_DOCUMENT_LIST_EXCEPTION",
                e.getCause());
    }
    return dataList;

}

From source file:com.imaginea.mongodb.services.DocumentServiceImpl.java

License:Apache License

/**
 * Insert a document inside a collection in a database in mongo to which
 * user is connected to./*from w  ww . ja  v a  2 s.  c  om*/
 * 
 * @param dbName
 *            Name of Database
 * @param collectionName
 *            Name of Collection from which to get all Documents
 * 
 * @param document
 *            : Document data to be inserted
 * @return : Insertion Status
 * @exception EmptyDatabaseNameException
 *                If database name is null
 * @exception EmptyCollectionNameException
 *                If Collection name is null
 * @exception EmptyDocumentDataException
 *                If Document data is null
 * @exception UndefinedDatabaseException
 *                If database is not present
 * @exception UndefinedCollectionException
 *                If Collection is not present
 * @exception InsertDocumentException
 *                Any exception while inserting document
 * @exception DatabaseException
 *                throw super type of UndefinedDatabaseException
 * @exception ValidationException
 *                throw super type of
 *                EmptyDatabaseNameException,EmptyCollectionNameException
 *                ,EmptyDocumentDataException
 * @exception CollectionException
 *                throw super type of UndefinedCollectionException
 * @exception DocumentException
 *                throw super type of InsertDocumentException
 * 
 */

public String insertDocument(String dbName, String collectionName, DBObject document)
        throws DatabaseException, CollectionException, DocumentException, ValidationException {
    mongoInstance = mongoInstanceProvider.getMongoInstance();
    if (dbName == null) {
        throw new EmptyDatabaseNameException("Database name is null");

    }
    if (dbName.equals("")) {
        throw new EmptyDatabaseNameException("Database Name Empty");
    }

    if (collectionName == null) {
        throw new EmptyCollectionNameException("Collection name is null");
    }
    if (collectionName.equals("")) {
        throw new EmptyCollectionNameException("Collection Name Empty");
    }

    String result = null;
    try {
        if (!mongoInstance.getDatabaseNames().contains(dbName)) {
            throw new UndefinedDatabaseException("DB [" + dbName + "] DOES NOT EXIST");
        }

        if (!mongoInstance.getDB(dbName).getCollectionNames().contains(collectionName)) {
            throw new UndefinedCollectionException(
                    "COLLECTION [ " + collectionName + "] _DOES_NOT_EXIST in Db [ " + dbName + "]");
        }

        // _id also provided by user
        if (document.get("_id") != null) {
            String temp = (String) document.get("_id");
            ObjectId id = new ObjectId(temp);
            document.put("_id", id); // Putting object id instead of string
            // which is there
        }
        // MongoDb permits Duplicate document Insert

        mongoInstance.getDB(dbName).getCollection(collectionName).insert(document);
        result = "Inserted Document with Data : [" + document + "]";
    } catch (IllegalArgumentException e) {
        // When error converting object Id
        throw new DocumentException(ErrorCodes.INVALID_OBJECT_ID, "INVALID_OBJECT_ID");
    } catch (MongoException e) {
        throw new InsertDocumentException("DOCUMENT_CREATION_EXCEPTION", e.getCause());
    }
    return result;
}