Example usage for com.mongodb DBObject removeField

List of usage examples for com.mongodb DBObject removeField

Introduction

In this page you can find the example usage for com.mongodb DBObject removeField.

Prototype

Object removeField(String key);

Source Link

Document

Removes a field with a given name from this object.

Usage

From source file:com.google.code.morphia.mapping.Mapper.java

License:Open Source License

/**
 * <p>/*  ww w .  java 2  s  .c om*/
 * Converts a java object to a mongo-compatible object (possibly a DBObject
 * for complex mappings). Very similar to {@link Mapper.toDBObject}
 * </p>
 * <p>
 * Used (mainly) by query/update operations
 * </p>
 */
Object toMongoObject(Object javaObj, boolean includeClassName) {
    if (javaObj == null) {
        return null;
    }
    Class origClass = javaObj.getClass();

    if (origClass.isAnonymousClass() && origClass.getSuperclass().isEnum())
        origClass = origClass.getSuperclass();

    Object newObj = converters.encode(origClass, javaObj);
    if (newObj == null) {
        log.warning("converted " + javaObj + " to null");
        return newObj;
    }
    Class type = newObj.getClass();
    boolean bSameType = origClass.equals(type);

    //TODO: think about this logic a bit more. 
    //Even if the converter changed it, should it still be processed?
    if (!bSameType && !(Map.class.isAssignableFrom(type) || Iterable.class.isAssignableFrom(type)))
        return newObj;
    else { //The converter ran, and produced another type, or it is a list/map

        boolean isSingleValue = true;
        boolean isMap = false;
        Class subType = null;

        if (type.isArray() || Map.class.isAssignableFrom(type) || Iterable.class.isAssignableFrom(type)) {
            isSingleValue = false;
            isMap = ReflectionUtils.implementsInterface(type, Map.class);
            // subtype of Long[], List<Long> is Long
            subType = (type.isArray()) ? type.getComponentType()
                    : ReflectionUtils.getParameterizedClass(type, (isMap) ? 1 : 0);
        }

        if (isSingleValue && !ReflectionUtils.isPropertyType(type)) {
            DBObject dbObj = toDBObject(newObj);
            if (!includeClassName)
                dbObj.removeField(CLASS_NAME_FIELDNAME);
            return dbObj;
        } else if (newObj instanceof DBObject) {
            return newObj;
        } else if (isMap) {
            if (ReflectionUtils.isPropertyType(subType))
                return toDBObject(newObj);
            else {
                HashMap m = new HashMap();
                for (Map.Entry e : (Iterable<Map.Entry>) ((Map) newObj).entrySet())
                    m.put(e.getKey(), toMongoObject(e.getValue(), includeClassName));

                return m;
            }
            //Set/List but needs elements converted
        } else if (!isSingleValue && !ReflectionUtils.isPropertyType(subType)) {
            ArrayList<Object> vals = new ArrayList<Object>();
            if (type.isArray())
                for (Object obj : (Object[]) newObj)
                    vals.add(toMongoObject(obj, includeClassName));
            else
                for (Object obj : (Iterable) newObj)
                    vals.add(toMongoObject(obj, includeClassName));

            return vals;
        } else {
            return newObj;
        }
    }
}

From source file:com.ikanow.infinit.e.api.social.sharing.ShareHandler.java

License:Open Source License

/**
 * updateBinary/*from www  . j  a  v a 2  s.  co m*/
 * @param ownerIdStr
 * @param shareIdStr
 * @param type
 * @param title
 * @param description
 * @param mediatype
 * @param bytes
 * @return
 */
public ResponsePojo updateBinary(String ownerIdStr, String shareIdStr, String type, String title,
        String description, String mediatype, byte[] bytes) {
    ResponsePojo rp = new ResponsePojo();
    try {
        //get old share
        BasicDBObject query = new BasicDBObject("_id", new ObjectId(shareIdStr));
        DBObject dboshare = DbManager.getSocial().getShare().findOne(query);
        if (dboshare != null) {
            //write everything but binary
            dboshare.removeField("binaryData");
            SharePojo share = SharePojo.fromDb(dboshare, SharePojo.class);
            // Check ... am I the owner?
            ObjectId ownerId = new ObjectId(ownerIdStr);
            boolean bAdminOrModOfAllCommunities = RESTTools.adminLookup(ownerIdStr);
            if (!share.getOwner().get_id().equals(ownerId)) { // Then I have to be admin (except for one special case)
                if (!bAdminOrModOfAllCommunities) {
                    // Special case: I am also community admin/moderator of every community to which this share belongs
                    bAdminOrModOfAllCommunities = true;
                    for (ShareCommunityPojo comm : share.getCommunities()) {
                        if (!SocialUtils.isOwnerOrModerator(comm.get_id().toString(), ownerIdStr)) {
                            bAdminOrModOfAllCommunities = false;
                        }
                    } //TESTED

                    if (!bAdminOrModOfAllCommunities) {
                        rp.setResponse(new ResponseObject("Update Share", false,
                                "Unable to update share: you are not owner or admin"));
                        return rp;
                    }
                }
            } //end if not owner

            // Check: am I trying to update a reference or json?
            if (null == share.getBinaryId()) {
                rp.setResponse(new ResponseObject("Update Share", false,
                        "Unable to update share: this is not a binary share"));
                return rp;
            }

            if (!bAdminOrModOfAllCommunities) { // quick check whether I'm admin on-request - if so can endorse
                bAdminOrModOfAllCommunities = RESTTools.adminLookup(ownerIdStr, false);
            } //TESTED

            // Remove endorsements unless I'm admin (if I'm not admin I must be owner...)
            if (!bAdminOrModOfAllCommunities) { // Now need to check if I'm admin/mod/content publisher for each community..
                if (null == share.getEndorsed()) { // fill this with all allowed communities
                    share.setEndorsed(new HashSet<ObjectId>());
                    share.getEndorsed().add(share.getOwner().get_id()); // (will be added later)
                    for (ShareCommunityPojo comm : share.getCommunities()) {
                        if (SocialUtils.isOwnerOrModeratorOrContentPublisher(comm.get_id().toString(),
                                ownerIdStr)) {
                            share.getEndorsed().add(comm.get_id());
                        }
                    }
                } //TESTED
                else {
                    for (ShareCommunityPojo comm : share.getCommunities()) {
                        // (leave it as is except remove anything that I can't endorse)
                        if (!SocialUtils.isOwnerOrModeratorOrContentPublisher(comm.get_id().toString(),
                                ownerIdStr)) {
                            share.getEndorsed().remove(comm.get_id());
                        }
                    }
                } //TESTED   
            } //TESTED
            else {
                if (null == share.getEndorsed()) { // fill this with all communities
                    share.setEndorsed(new HashSet<ObjectId>());
                    share.getEndorsed().add(share.getOwner().get_id());
                    for (ShareCommunityPojo comm : share.getCommunities()) {
                        share.getEndorsed().add(comm.get_id());
                    }
                }
                //(else just leave with the same set of endorsements as before)
            } //TESTED

            share.setModified(new Date());
            share.setType(type);
            share.setTitle(title);
            share.setDescription(description);
            share.setMediaType(mediatype);
            share.setBinaryData(null);
            share.setBinaryId(updateGridFile(share.getBinaryId(), bytes));

            DbManager.getSocial().getShare().update(query, share.toDb());

            rp.setResponse(new ResponseObject("Update Share", true, "Binary share updated successfully"));
        } else {
            rp.setResponse(new ResponseObject("Update Share", false,
                    "Shareid does not exist or you are not owner or admin"));
        }
    } catch (Exception e) {
        logger.error("Exception Message: " + e.getMessage(), e);
        rp.setResponse(new ResponseObject("Update Share", false, "Unable to update share: " + e.getMessage()));
    }
    return rp;
}

From source file:com.ikanow.infinit.e.core.mapreduce.HadoopJobRunner.java

License:Open Source License

/**
 * Moves the output of a job from output_tmp to output and deletes
 * the tmp collection./* ww w . j  a  v a 2s  .c  om*/
 * 
 * @param cmr
 * @throws IOException 
 * @throws ParserConfigurationException 
 * @throws SAXException 
 */
private void moveTempOutput(CustomMapReduceJobPojo cmr)
        throws IOException, SAXException, ParserConfigurationException {
    // If we are an export job then move files:
    bringTempOutputToFront(cmr);
    // (the rest of this will just do nothing) 

    /**
     * Atomic plan:
     * If not append, move customlookup pointer to tmp collection, drop old collection.
     * If append, set sync flag (find/mod), move results from tmp to old, unset sync flag.
     * 
     */
    //step1 build out any of the post proc arguments
    DBObject postProcObject = null;
    boolean limitAllData = true;
    boolean hasSort = false;
    int limit = 0;
    BasicDBObject sort = new BasicDBObject();
    try {
        postProcObject = (DBObject) com.mongodb.util.JSON
                .parse(getQueryOrProcessing(cmr.query, QuerySpec.POSTPROC));
        if (postProcObject != null) {
            if (postProcObject.containsField("limitAllData")) {
                limitAllData = (Boolean) postProcObject.get("limitAllData");
            }
            if (postProcObject.containsField("limit")) {
                limit = (Integer) postProcObject.get("limit");
                if (postProcObject.containsField("sortField")) {
                    String sfield = (String) postProcObject.get("sortField");
                    int sortDir = 1;
                    if (postProcObject.containsField("sortDirection")) {
                        sortDir = (Integer) postProcObject.get("sortDirection");
                    }
                    sort.put(sfield, sortDir);
                    hasSort = true;
                } else if (limit > 0) {
                    //set a default sort because the user posted a limit
                    sort.put("_id", -1);
                    hasSort = true;
                }
            }
        }
    } catch (Exception ex) {
        _logger.info(
                "job_error_post_proc_title=" + cmr.jobtitle + " job_error_post_proc_id=" + cmr._id.toString()
                        + " job_error_post_proc_message=" + HarvestExceptionUtils.createExceptionMessage(ex));
    }

    //step 2a if not appending results then work on temp collection and swap to main
    if ((null == cmr.appendResults) || !cmr.appendResults) //format temp then change lookup pointer to temp collection
    {
        //transform all the results into necessary format:         
        DBCursor dbc_tmp = DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollectionTemp)
                .find(new BasicDBObject("key", null)).sort(sort).limit(limit);
        while (dbc_tmp.hasNext()) {
            DBObject dbo = dbc_tmp.next();
            Object key = dbo.get("_id");
            dbo.put("key", key);
            dbo.removeField("_id");
            DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollectionTemp).insert(dbo);
        }
        DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollectionTemp)
                .remove(new BasicDBObject("key", null));

        //swap the output collections
        BasicDBObject notappendupdates = new BasicDBObject(CustomMapReduceJobPojo.outputCollection_,
                cmr.outputCollectionTemp);
        notappendupdates.append(CustomMapReduceJobPojo.outputCollectionTemp_, cmr.outputCollection);
        DbManager.getCustom().getLookup().findAndModify(new BasicDBObject(CustomMapReduceJobPojo._id_, cmr._id),
                new BasicDBObject(MongoDbManager.set_, notappendupdates));
        String temp = cmr.outputCollectionTemp;
        cmr.outputCollectionTemp = cmr.outputCollection;
        cmr.outputCollection = temp;
    } else //step 2b if appending results then drop modified results in output collection
    {
        DbManager.getCustom().getLookup().findAndModify(new BasicDBObject(CustomMapReduceJobPojo._id_, cmr._id),
                new BasicDBObject(MongoDbManager.set_, new BasicDBObject("isUpdatingOutput", true)));
        //remove any aged out results
        if ((null != cmr.appendAgeOutInDays) && cmr.appendAgeOutInDays > 0) {
            //remove any results that have aged out
            long ageOutMS = (long) (cmr.appendAgeOutInDays * MS_IN_DAY);
            Date lastAgeOut = new Date(((new Date()).getTime() - ageOutMS));
            DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollection).remove(
                    new BasicDBObject("_id", new BasicDBObject(MongoDbManager.lt_, new ObjectId(lastAgeOut))));
        }
        DBCursor dbc_tmp;
        if (!limitAllData) {
            //sort and limit the temp data set because we only want to process it
            dbc_tmp = DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollectionTemp)
                    .find(new BasicDBObject("key", null)).sort(sort).limit(limit);
            limit = 0; //reset limit so we get everything in a few steps (we only want to limit the new data)
        } else {
            dbc_tmp = DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollectionTemp)
                    .find(new BasicDBObject("key", null));
        }

        DBCollection dbc = DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollection);
        //transform temp results and dump into output collection
        while (dbc_tmp.hasNext()) {
            DBObject dbo = dbc_tmp.next();
            //transform the dbo to format {_id:ObjectId, key:(prev_id), value:value}
            Object key = dbo.get("_id");
            dbo.put("key", key);
            dbo.removeField("_id");
            //_id field should be automatically set to objectid when inserting now
            dbc.insert(dbo);
        }
        //if there is a sort, we need to apply it to all the data now
        if (hasSort) {
            ObjectId OID = new ObjectId();
            BasicDBObject query = new BasicDBObject("_id", new BasicDBObject(MongoDbManager.lt_, OID));
            //find everything inserted before now and sort/limit the data
            DBCursor dbc_sort = dbc.find(query).sort(sort).limit(limit);
            while (dbc_sort.hasNext()) {
                //reinsert the data into db (it should be in sorted order naturally now)
                DBObject dbo = dbc_sort.next();
                dbo.removeField("_id");
                dbc.insert(dbo);
            }
            //remove everything inserted before we reorganized everything (should leave only the new results in natural order)
            dbc.remove(query);
        }
        DbManager.getCustom().getLookup().findAndModify(new BasicDBObject(CustomMapReduceJobPojo._id_, cmr._id),
                new BasicDBObject(MongoDbManager.set_, new BasicDBObject("isUpdatingOutput", false)));
    }
    //step3 clean up temp output collection so we can use it again
    // (drop it, removing chunks)
    try {
        DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollectionTemp).drop();
    } catch (Exception e) {
    } // That's fine, it probably just doesn't exist yet...
}

From source file:com.ikanow.infinit.e.data_model.custom.InfiniteMongoSplitter.java

License:Apache License

@SuppressWarnings("unchecked")
public static List<InputSplit> calculateSplits_phase2(InfiniteMongoConfig conf, BasicDBObject confQuery,
        boolean alwaysUseChunks, boolean newShardScheme, Integer splitDocCount) {
    alwaysUseChunks &= (conf.getMaxSplits() != MAX_SPLITS);
    // (in standalone mode, never use chunks)

    MongoURI uri = conf.getInputURI();/*from ww  w  . j ava2 s . c om*/
    DBCollection coll = InfiniteMongoConfigUtil.getCollection(uri);
    if (conf.getLimit() > 0) {
        return calculateManualSplits(conf, confQuery, 1, conf.getLimit(), coll);
    } else {
        if (!alwaysUseChunks) {
            int nMaxCount = 1 + conf.getMaxDocsPerSplit() * conf.getMaxSplits();
            int count = 0;
            if (null == splitDocCount) {
                if (nMaxCount <= 1) {
                    nMaxCount = 0;
                } else {
                    //DEBUG
                    //System.out.println(coll.find(confQuery).limit(1).explain());

                    count = (int) coll.getCount(confQuery, null, nMaxCount, 0);
                    if (0 == count) {
                        return new ArrayList<InputSplit>();
                    }
                } //TESTED
            } else {
                count = splitDocCount;
            }

            //if maxdocssplit and maxsplits is set and there are less documents than splits*docspersplit then use the new splitter
            //otherwise use the old splitter
            if (conf.getMaxDocsPerSplit() > 0 && conf.getMaxSplits() > 0 && (count < nMaxCount)) {
                _logger.debug("Calculating splits manually");
                int splits_needed = (count / conf.getMaxDocsPerSplit()) + 1;

                return calculateManualSplits(conf, confQuery, splits_needed, conf.getMaxDocsPerSplit(), coll);
            } //TESTED
        }
        if (newShardScheme && !confQuery.containsField(DocumentPojo.sourceKey_)) {
            // OK if we're going to do the sharded version then we will want to calculate
            splitPrecalculations_newShardScheme(confQuery, null); // (modifies confQuery if returns true)            
        } //TESTED: checked did nothing when had sourceKey, added sourceKey when necessary (eg entities.index case)

        if (!newShardScheme) { // unlike new sharding scheme, in this case the query is fixed, so overwrite now:
            conf.setQuery(confQuery);
        }

        List<InputSplit> splits = MongoSplitter.calculateSplits(conf);
        // (unless manually set, like above, runs with the _original_ query)
        int initialSplitSize = splits.size();

        // We have the MongoDB-calculated splits, now calculate their intersection vs the query
        @SuppressWarnings("rawtypes")
        Map<String, TreeSet<Comparable>> orderedArraySet = new HashMap<String, TreeSet<Comparable>>();
        @SuppressWarnings("rawtypes")
        Map<String, NavigableSet<Comparable>> orderedArraySet_afterMin = new HashMap<String, NavigableSet<Comparable>>();
        BasicDBObject originalQuery = confQuery;

        ArrayList<InputSplit> newsplits = new ArrayList<InputSplit>(splits.size());
        Iterator<InputSplit> splitIt = splits.iterator();
        while (splitIt.hasNext()) {
            try {
                orderedArraySet_afterMin.clear();

                MongoInputSplit mongoSplit = (MongoInputSplit) splitIt.next();
                BasicDBObject min = (BasicDBObject) mongoSplit.getQuerySpec().get("$min");
                BasicDBObject max = (BasicDBObject) mongoSplit.getQuerySpec().get("$max");

                //DEBUG
                //_logger.info("+----------------- NEW SPLIT ----------------: " + min + " /" + max);
                //System.out.println("+----------------- NEW SPLIT ----------------: " + min + " /" + max);

                if (null != min) { // How does the min fit in with the general query
                    try {
                        if (compareFields(-1, originalQuery, min, max, orderedArraySet,
                                orderedArraySet_afterMin) < 0) {
                            splitIt.remove();
                            continue;
                        }
                    } catch (Exception e) {
                    } // do nothing probably just some comparable issue
                } //TESTED

                if (null != max) { // How does the min fit in with the general query
                    try {
                        if (compareFields(1, originalQuery, max, min, orderedArraySet,
                                orderedArraySet_afterMin) > 0) {
                            splitIt.remove();
                            continue;
                        }
                    } catch (Exception e) {
                    } // do nothing probably just some comparable issue
                } //TESTED

                //DEBUG
                //_logger.info("(retained split)");
                //System.out.println("(retained split)");

                // (don't worry about edge cases, won't happen very often and will just result in a spurious empty mapper)

                ////////////////////////////////

                // Now some infinit.e specific processing...

                if (newShardScheme) {
                    @SuppressWarnings("rawtypes")
                    TreeSet<Comparable> sourceKeyOrderedArray = orderedArraySet.get(DocumentPojo.sourceKey_);
                    if ((null != sourceKeyOrderedArray) && !sourceKeyOrderedArray.isEmpty()) {
                        @SuppressWarnings("rawtypes")
                        Comparable minSourceKey = null;
                        Object minSourceKeyObj = (null == min) ? null : min.get(DocumentPojo.sourceKey_);
                        if (minSourceKeyObj instanceof String) {
                            minSourceKey = (String) minSourceKeyObj;
                        }
                        if (null == minSourceKey) {
                            minSourceKey = sourceKeyOrderedArray.first();
                        } //TESTED
                        @SuppressWarnings("rawtypes")
                        Comparable maxSourceKey = null;
                        Object maxSourceKeyObj = (null == max) ? null : max.get(DocumentPojo.sourceKey_);
                        if (maxSourceKeyObj instanceof String) {
                            maxSourceKey = (String) maxSourceKeyObj;
                        }
                        if (null == maxSourceKey) {
                            maxSourceKey = sourceKeyOrderedArray.last();
                        } //TESTED

                        DBObject splitQuery = mongoSplit.getQuerySpec();
                        BasicDBObject splitQueryQuery = new BasicDBObject(
                                (BasicBSONObject) splitQuery.get("$query"));
                        if (0 == minSourceKey.compareTo(maxSourceKey)) { // single matching sourceKEy
                            splitQueryQuery.put(DocumentPojo.sourceKey_, maxSourceKey);
                        } //TESTED (array of sources, only one matches)
                        else { // multiple matching source keys
                            splitQueryQuery.put(DocumentPojo.sourceKey_, new BasicDBObject(DbManager.in_,
                                    sourceKeyOrderedArray.subSet(minSourceKey, true, maxSourceKey, true)));
                        } //TESTED (array of sources, multiple match)               
                        newsplits.add(
                                new InfiniteMongoInputSplit(mongoSplit, splitQueryQuery, conf.isNoTimeout()));
                    } else { // original query is of sufficient simplicity
                        newsplits.add(
                                new InfiniteMongoInputSplit(mongoSplit, originalQuery, conf.isNoTimeout()));
                    } //TESTED (no change to existing source)

                } //TESTED
                else { // old sharding scheme, remove min/max and replace with normal _id based query where possible

                    DBObject splitQuery = mongoSplit.getQuerySpec();
                    // Step 1: create a query range for _id:
                    BasicDBObject idRange = null;
                    Object idMin = (min == null) ? null : min.get(DocumentPojo._id_);
                    Object idMax = (max == null) ? null : max.get(DocumentPojo._id_);
                    if (!(idMin instanceof ObjectId))
                        idMin = null;
                    if (!(idMax instanceof ObjectId))
                        idMax = null;

                    if ((null != idMin) || (null != idMax)) {
                        idRange = new BasicDBObject();
                        if (null != idMin) {
                            idRange.put(DbManager.gte_, idMin);
                        }
                        if (null != idMax) {
                            idRange.put(DbManager.lt_, idMax);
                        }
                    } //TESTED                  

                    // Step 2: merge with whatever we have at the moment:
                    if (null != idRange) {
                        BasicDBObject splitQueryQuery = new BasicDBObject(
                                (BasicBSONObject) splitQuery.get("$query"));
                        Object idQueryElement = splitQueryQuery.get(DocumentPojo._id_);
                        boolean convertedAwayFromMinMax = false;
                        if (null == idQueryElement) { // nice and easy, add _id range
                            splitQueryQuery.put(DocumentPojo._id_, idRange);
                            convertedAwayFromMinMax = true;
                        } //TESTED
                        else if (!splitQueryQuery.containsField(DbManager.and_)) { // OK we're going to just going to make life easy
                            splitQueryQuery.remove(DocumentPojo._id_);
                            splitQueryQuery.put(DbManager.and_,
                                    Arrays.asList(new BasicDBObject(DocumentPojo._id_, idQueryElement),
                                            new BasicDBObject(DocumentPojo._id_, idRange)));
                            convertedAwayFromMinMax = true;
                        } //TESTED
                          // (else stick with min/max)

                        if (convertedAwayFromMinMax) { // can construct an _id query
                            splitQuery.removeField("$min");
                            splitQuery.removeField("$max");
                        } //TESTED
                        splitQuery.put("$query", splitQueryQuery);
                    }
                    newsplits.add(new InfiniteMongoInputSplit(mongoSplit, conf.isNoTimeout()));
                } //TESTED         
            } catch (Exception e) {
                //DEBUG
                //e.printStackTrace();
            } // do nothing must be some other type of input split
        } //TESTED

        //DEBUG
        //System.out.println("Calculating splits via mongo-hadoop: " + initialSplitSize + " reduced to " + splits.size());

        _logger.info("Calculating (converted) splits via mongo-hadoop: " + initialSplitSize + " reduced to "
                + newsplits.size());
        return newsplits;
    }
}

From source file:com.jaspersoft.mongodb.query.MongoDbQueryWrapper.java

License:Open Source License

private void createIterator() throws JRException {
    if (!queryObject.containsField(COLLECTION_NAME_KEY)) {
        throw new JRException("\"" + COLLECTION_NAME_KEY + "\" must be part of the query object");
    }/*from w ww . j ava 2  s .  com*/
    DBObject findQueryObject = (DBObject) queryObject.get(FIND_QUERY_KEY);
    if (findQueryObject == null) {
        findQueryObject = new BasicDBObject();
    }
    if (queryObject.containsField(FIND_QUERY_REGEXP_KEY)) {
        DBObject regExpObject = (DBObject) queryObject.get(FIND_QUERY_REGEXP_KEY);
        String value, flags;
        int index;
        for (String key : regExpObject.keySet()) {
            value = (String) regExpObject.get(key);
            if (value.startsWith("/")) {
                value = value.substring(1, value.length());
            } else {
                throw new JRException("Regular expressions must start with: /");
            }
            if (!value.contains("/")) {
                throw new JRException("No ending symbol found: /");
            }
            index = value.lastIndexOf("/");
            flags = null;
            if (index == value.length() - 1) {
                value = value.substring(0, index);
            } else {
                flags = value.substring(index + 1, value.length());
                value = value.substring(0, index);
            }
            findQueryObject.put(key, Pattern.compile((flags != null ? "(?" + flags + ")" : "") + value));
        }
    }

    DBCollection collection = connection.getMongoDatabase()
            .getCollectionFromString((String) queryObject.removeField(COLLECTION_NAME_KEY));
    if (queryObject.containsField(MAP_REDUCE_KEY)) {
        Object value = queryObject.removeField(MAP_REDUCE_KEY);
        if (!(value instanceof DBObject)) {
            logger.error("MapReduce value must be a valid JSON object");
        } else {
            DBObject mapReduceObject = (DBObject) value;
            String map = validateProperty(mapReduceObject, MAP_KEY);
            String reduce = validateProperty(mapReduceObject, REDUCE_KEY);
            Object outObject = mapReduceObject.get(OUT_KEY);
            if (outObject == null) {
                throw new JRException("\"out\" cannot be null");
            }
            String collectionName = null;
            Object outDb = null;
            OutputType outputType = null;
            boolean hasOutputType = false;
            if (logger.isDebugEnabled()) {
                logger.debug("Out object: " + outObject + ". Type: " + outObject.getClass().getName());
            }
            if (outObject instanceof String) {
                collectionName = String.valueOf(outObject);
            } else if (outObject instanceof DBObject) {
                DBObject outDbObject = (DBObject) outObject;
                outDb = outDbObject.removeField(OUT_DB_KEY);
                Iterator<String> keysIterator = outDbObject.keySet().iterator();
                String type = null;
                if (keysIterator.hasNext()) {
                    type = keysIterator.next();
                    collectionName = String.valueOf(outDbObject.get(type));
                } else {
                    throw new JRException("\"out\" object cannot be empty");
                }
                type = type.toUpperCase();
                outputType = OutputType.valueOf(type);
                if (outputType == null) {
                    throw new JRException("Unknow output type: " + type);
                }
                hasOutputType = true;
                if (logger.isDebugEnabled()) {
                    logger.debug("outobject: " + outDbObject);
                    logger.debug("collectionName: " + collectionName);
                    logger.debug("outputType: " + outputType);
                }
            } else {
                throw new JRException("Unsupported type for \"out\": " + outObject.getClass().getName());
            }
            MapReduceCommand mapReduceCommand = new MapReduceCommand(collection, map, reduce, collectionName,
                    hasOutputType ? outputType : OutputType.REPLACE, null);
            if (outDb != null) {
                mapReduceCommand.setOutputDB(String.valueOf(outDb));
            }
            Object finalizeObject = mapReduceObject.removeField(FINALIZE_KEY);
            if (finalizeObject != null) {
                mapReduceCommand.setFinalize(String.valueOf(finalizeObject));
            }
            MapReduceOutput mapReduceOutput = collection.mapReduce(mapReduceCommand);
            DBCollection mapReduceCollection = mapReduceOutput.getOutputCollection();
            if (mapReduceCollection != null) {
                collection = mapReduceCollection;
            }
        }
    }

    iterator = collection.find(findQueryObject, (DBObject) queryObject.get(FIND_FIELDS_KEY));
    if (queryObject.containsField(SORT_KEY)) {
        iterator = iterator.sort((DBObject) queryObject.get(SORT_KEY));
    }
    if (queryObject.containsField(LIMIT_KEY)) {
        Integer value = processInteger(queryObject.get(LIMIT_KEY));
        if (value != null) {
            iterator = iterator.limit(value.intValue());
        }
    }
}

From source file:com.mebigfatguy.mongobrowser.actions.DeleteAction.java

License:Apache License

@Override
public void actionPerformed(ActionEvent e) {
    MongoTreeNode node = context.getSelectedNode();
    switch (node.getType()) {
    case Collection: {
        DBCollection collection = (DBCollection) node.getUserObject();
        collection.drop();//  w w  w  . ja  v a  2 s  .  c o  m
        MongoTreeNode dbNode = (MongoTreeNode) node.getParent();
        dbNode.remove(node);
        DefaultTreeModel model = (DefaultTreeModel) context.getTree().getModel();
        model.nodeStructureChanged(dbNode);
    }
        break;

    case Object: {
        DBObject object = (DBObject) node.getUserObject();
        MongoTreeNode collectionNode = TreeUtils.findCollectionNode(node);
        DBCollection collection = (DBCollection) collectionNode.getUserObject();
        collection.remove(object);
        collectionNode.remove(node);
        DefaultTreeModel model = (DefaultTreeModel) context.getTree().getModel();
        model.nodeStructureChanged(collectionNode);
    }
        break;

    case KeyValue: {
        MongoTreeNode.KV kv = (MongoTreeNode.KV) node.getUserObject();
        String key = kv.getKey();
        if (!key.startsWith("_")) {
            MongoTreeNode objectNode = (MongoTreeNode) node.getParent();
            DBObject object = (DBObject) objectNode.getUserObject();
            object.removeField(key);
            MongoTreeNode collectionNode = TreeUtils.findCollectionNode(objectNode);
            DBCollection collection = (DBCollection) collectionNode.getUserObject();
            collection.save(object);
            objectNode.remove(node);
            DefaultTreeModel model = (DefaultTreeModel) context.getTree().getModel();
            model.nodeStructureChanged(objectNode);
        }
    }
        break;
    }
}

From source file:com.mobileman.kuravis.core.services.treatment_review.impl.TreatmentReviewServiceImpl.java

License:Apache License

/**
 * @param treatmentReview/*ww w. j  a  va 2s . c o m*/
 * @param user
 */
private void processUserOnCreateOrUpdate(DBObject treatmentReview, DBObject user) {
    DBObject newUserData = new BasicDBObject();
    for (String userProperty : new String[] { User.ATTR_YEAR_OF_BIRTH, User.ATTR_GENDER }) {
        if (treatmentReview.containsField(userProperty)) {
            newUserData.put(userProperty, treatmentReview.get(userProperty));
        }
    }

    if (newUserData.toMap().size() > 0) {
        this.userService.updateUser((String) user.get(EntityUtils.ID), newUserData);
    }

    treatmentReview.removeField(User.ATTR_YEAR_OF_BIRTH);
    treatmentReview.removeField(User.ATTR_GENDER);
}

From source file:com.redhat.lightblue.mongo.crud.DocTranslator.java

License:Open Source License

public static void populateCaseInsensitiveField(Object doc, Path field) {
    if (doc == null) {
        return;/*from w ww  .jav  a2s. c o  m*/
    } else if (field.numSegments() == 1) {
        DBObject docObj = (DBObject) doc;
        if (docObj.get(field.head(0)) == null) {
            // no value, so nothing to populate
            DBObject dbo = (DBObject) docObj.get(HIDDEN_SUB_PATH.toString());
            if (dbo != null && dbo.get(field.head(0)) != null) {
                dbo.removeField(field.head(0));
            }
            return;
        } else if (docObj.get(field.head(0)) instanceof List) {
            // primitive list - add hidden field to doc and populate list
            List<String> objList = (List<String>) docObj.get(field.head(0));
            BasicDBList hiddenList = new BasicDBList();
            objList.forEach(s -> hiddenList.add(s.toUpperCase()));
            DBObject dbo = (DBObject) docObj.get(HIDDEN_SUB_PATH.toString());
            if (dbo == null) {
                docObj.put(HIDDEN_SUB_PATH.toString(), new BasicDBObject(field.head(0), hiddenList));
            } else {
                dbo.put(field.head(0), hiddenList);
            }
        } else {
            // add hidden field to doc, populate field
            DBObject dbo = (DBObject) docObj.get(HIDDEN_SUB_PATH.toString());
            if (dbo == null) {
                docObj.put(HIDDEN_SUB_PATH.toString(),
                        new BasicDBObject(field.head(0), docObj.get(field.head(0)).toString().toUpperCase()));
            } else {
                dbo.put(field.head(0), docObj.get(field.head(0)).toString().toUpperCase());
            }
        }
    } else if (field.head(0).equals(Path.ANY)) {
        // doc is a list
        List<?> docList = ((List<?>) doc);
        docList.forEach(key -> populateCaseInsensitiveField(key, field.suffix(-1)));
    } else {
        DBObject docObj = (DBObject) doc;
        populateCaseInsensitiveField(docObj.get(field.head(0)), field.suffix(-1));
    }
}

From source file:com.redhat.thermostat.gateway.common.mongodb.executor.MongoExecutor.java

License:Open Source License

public MongoDataResultContainer execPostRequest(MongoCollection<DBObject> collection, String body,
        Set<String> realms) {
    MongoDataResultContainer metaDataContainer = new MongoDataResultContainer();

    if (body.length() > 0) {
        List<DBObject> inputList = (List<DBObject>) JSON.parse(body);

        for (DBObject object : inputList) {
            object.removeField(KeycloakFields.REALMS_KEY);
            if (realms != null && !realms.isEmpty()) {
                object.put(KeycloakFields.REALMS_KEY, realms);
            }/*from   ww w  .  j a v a  2  s . c o m*/

        }

        collection.insertMany(inputList);
    }

    return metaDataContainer;
}

From source file:com.restfeel.controller.rest.EntityDataController.java

License:Apache License

@RequestMapping(value = "/api/{projectId}/entities/{name}/list", method = RequestMethod.GET, headers = "Accept=application/json")
public @ResponseBody String getEntityDataList(@PathVariable("projectId") String projectId,
        @PathVariable("name") String entityName, @RequestParam(value = "page", required = false) Integer page,
        @RequestParam(value = "limit", required = false) Integer limit,
        @RequestParam(value = "sort", required = false) String sort,
        @RequestParam(value = "query", required = false) String query,
        @RequestHeader(value = "authToken", required = false) String authToken) {

    JSONObject authRes = authService.authorize(projectId, authToken, "USER");
    if (!authRes.getBoolean(SUCCESS)) {
        return authRes.toString(4);
    }//w ww.  j  a  v  a 2 s  .  c o  m

    DBCollection dbCollection = mongoTemplate.getCollection(projectId + "_" + entityName);
    DBCursor cursor;
    if (query != null && !query.isEmpty()) {
        Object queryObject = JSON.parse(query);
        cursor = dbCollection.find((BasicDBObject) queryObject);
    } else {
        cursor = dbCollection.find();
    }

    if (sort != null && !sort.isEmpty()) {
        Object sortObject = JSON.parse(sort);
        cursor.sort((BasicDBObject) sortObject);
    }

    if (limit != null && limit > 0) {
        if (page != null && page > 0) {
            cursor.skip((page - 1) * limit);
        }
        cursor.limit(limit);
    }
    List<DBObject> array = cursor.toArray();

    if (entityName.equals("User")) {
        for (DBObject dbObject : array) {
            dbObject.removeField(PASSWORD);
        }
    }

    for (DBObject dbObject : array) {
        dbRefToRelation(dbObject);
    }
    String json = JSON.serialize(array);

    // Indentation
    JSONArray jsonArr = new JSONArray(json);
    return jsonArr.toString(4);
}