Example usage for com.mongodb DBCollection getDB

List of usage examples for com.mongodb DBCollection getDB

Introduction

In this page you can find the example usage for com.mongodb DBCollection getDB.

Prototype

public DB getDB() 

Source Link

Document

Returns the database this collection is a member of.

Usage

From source file:CollectionConfigurationExample.java

License:Apache License

@Inject
CollectionConfigurationExample(@MongoCollection(Collections.Data) DBCollection collection) {
    System.out.println(collection.getDB() + " : " + collection);
}

From source file:com.bosscs.spark.mongodb.extractor.MongoNativeExtractor.java

License:Apache License

/**
 * Gets shards.//  ww  w . jav a 2s.c o  m
 *
 * @param collection the collection
 * @return the shards
 */
private Map<String, String[]> getShards(DBCollection collection) {
    DB config = collection.getDB().getSisterDB("config");
    DBCollection configShards = config.getCollection("shards");

    DBCursor cursorShards = configShards.find();

    Map<String, String[]> map = new HashMap<>();
    while (cursorShards.hasNext()) {
        DBObject currentShard = cursorShards.next();
        String currentHost = (String) currentShard.get("host");
        int slashIndex = currentHost.indexOf("/");
        if (slashIndex > 0) {
            map.put((String) currentShard.get(MONGO_DEFAULT_ID),
                    currentHost.substring(slashIndex + 1).split(","));
        }
    }
    return map;
}

From source file:com.bosscs.spark.mongodb.extractor.MongoNativeExtractor.java

License:Apache License

/**
 * Gets chunks./*from  w  w w  .j av  a 2  s .c  o  m*/
 *
 * @param collection the collection
 * @return the chunks
 */
private DBCursor getChunks(DBCollection collection) {
    DB config = collection.getDB().getSisterDB("config");
    DBCollection configChunks = config.getCollection("chunks");
    return configChunks.find(new BasicDBObject("ns", collection.getFullName()));
}

From source file:com.bosscs.spark.mongodb.extractor.MongoNativeExtractor.java

License:Apache License

/**
 * Calculate splits./*from   w  ww . j  a  v  a  2s .  c o m*/
 *
 * @param collection the collection
 * @return the deep partition [ ]
 */
private HadoopPartition[] calculateSplits(DBCollection collection) {

    BasicDBList splitData = getSplitData(collection);
    List<ServerAddress> serverAddressList = collection.getDB().getMongo().getServerAddressList();

    if (splitData == null) {
        Pair<BasicDBList, List<ServerAddress>> pair = getSplitDataCollectionShardEnviroment(
                getShards(collection), collection.getDB().getName(), collection.getName());
        splitData = pair.left;
        serverAddressList = pair.right;
    }

    Object lastKey = null; // Lower boundary of the first min split

    List<String> stringHosts = new ArrayList<>();

    for (ServerAddress serverAddress : serverAddressList) {
        stringHosts.add(serverAddress.toString());
    }
    int i = 0;

    MongoPartition[] partitions = new MongoPartition[splitData.size() + 1];

    for (Object aSplitData : splitData) {

        BasicDBObject currentKey = (BasicDBObject) aSplitData;

        Object currentO = currentKey.get(MONGO_DEFAULT_ID);

        partitions[i] = new MongoPartition(mongoDeepJobConfig.getRddId(), i,
                new TokenRange(lastKey, currentO, stringHosts), MONGO_DEFAULT_ID);

        lastKey = currentO;
        i++;
    }
    QueryBuilder queryBuilder = QueryBuilder.start(MONGO_DEFAULT_ID);
    queryBuilder.greaterThanEquals(lastKey);
    partitions[i] = new MongoPartition(0, i, new TokenRange(lastKey, null, stringHosts), MONGO_DEFAULT_ID);
    return partitions;
}

From source file:com.bosscs.spark.mongodb.extractor.MongoNativeExtractor.java

License:Apache License

/**
 * Gets split data./* www  .ja  v a  2  s  . c o m*/
 *
 * @param collection the collection
 * @return the split data
 */
private BasicDBList getSplitData(DBCollection collection) {

    final DBObject cmd = BasicDBObjectBuilder.start("splitVector", collection.getFullName())
            .add("keyPattern", new BasicDBObject(MONGO_DEFAULT_ID, 1)).add("force", false)
            .add("maxChunkSize", splitSize).get();

    CommandResult splitVectorResult = collection.getDB().getSisterDB("admin").command(cmd);
    return (BasicDBList) splitVectorResult.get(SPLIT_KEYS);

}

From source file:com.edgytech.umongo.CmdField.java

License:Apache License

void updateFromCmd(DBCollection col) {
    updateFromCmd(col.getDB(), new BasicDBObject(cmd, col.getName()));
}

From source file:com.edgytech.umongo.CollectionPanel.java

License:Apache License

public void rename(ButtonBase button) {
    final CollectionNode colNode = getCollectionNode();
    final DBCollection col = colNode.getCollection();
    // select parent since this is getting renamed
    UMongo.instance.displayNode(colNode.getDbNode());

    final String name = getStringFieldValue(Item.newName);
    final boolean dropTarget = getBooleanFieldValue(Item.dropTarget);

    DBObject cmd = BasicDBObjectBuilder.start().add("renameCollection", col.getFullName())
            .add("to", col.getDB().getName() + "." + name).add("dropTarget", dropTarget).get();
    new DbJobCmd(col.getDB().getSisterDB("admin"), cmd, null, null).addJob();
}

From source file:com.edgytech.umongo.CollectionPanel.java

License:Apache License

public void group(final ButtonBase button) {
    final DBCollection col = getCollectionNode().getCollection();
    DBObject keys = ((DocBuilderField) getBoundUnit(Item.grpKeys)).getDBObject();
    DBObject initial = ((DocBuilderField) getBoundUnit(Item.grpInitialValue)).getDBObject();
    DBObject query = ((DocBuilderField) getBoundUnit(Item.grpQuery)).getDBObject();
    String reduce = getStringFieldValue(Item.grpReduce);
    String finalize = getStringFieldValue(Item.grpFinalize);
    final GroupCommand cmd = new GroupCommand(col, keys, query, initial, reduce, finalize);
    //        new DocView(null, "Group", col.getDB(), cmd.toDBObject()).addToTabbedDiv();

    new DbJobCmd(col.getDB(), cmd.toDBObject(), null, button).addJob();
}

From source file:com.edgytech.umongo.CollectionPanel.java

License:Apache License

public void distinct(final ButtonBase button) {
    final DBCollection col = getCollectionNode().getCollection();
    final BasicDBObject cmd = new BasicDBObject("distinct", col.getName());
    cmd.put("key", getStringFieldValue(Item.distinctKey));
    DBObject query = ((DocBuilderField) getBoundUnit(Item.distinctQuery)).getDBObject();
    if (query != null)
        cmd.put("query", query);
    new DbJobCmd(col.getDB(), cmd, null, button).addJob();
}

From source file:com.edgytech.umongo.CollectionPanel.java

License:Apache License

public void mapReduce(final ButtonBase button) {
    final DBCollection col = getCollectionNode().getCollection();
    String map = getStringFieldValue(Item.mrMap);
    String reduce = getStringFieldValue(Item.mrReduce);
    String finalize = getStringFieldValue(Item.mrFinalize);
    String stype = getStringFieldValue(Item.mrType);
    final OutputType type = OutputType.valueOf(stype.toUpperCase());
    String out = getStringFieldValue(Item.mrOut);
    if (type != OutputType.INLINE && (out.isEmpty())) {
        new InfoDialog(id, null, null, "Output collection cannot be empty if type is not inline.").show();
        return;//from   ww  w .ja v a2  s  . co  m
    }

    String outDB = getStringFieldValue(Item.mrOutDB);
    DBObject query = ((DocBuilderField) getBoundUnit(Item.mrQuery)).getDBObject();
    int limit = getIntFieldValue(Item.mrLimit);
    final MapReduceCommand cmd = new MapReduceCommand(col, map, reduce, out, type, query);
    DBObject sort = ((DocBuilderField) getBoundUnit(Item.mrSort)).getDBObject();
    if (sort != null) {
        cmd.setSort(sort);
    }
    if (!outDB.isEmpty()) {
        cmd.setOutputDB(outDB);
    }
    if (!finalize.isEmpty()) {
        cmd.setFinalize(finalize);
    }
    if (limit > 0) {
        cmd.setLimit(limit);
    }

    if (getBooleanFieldValue(Item.mrJSMode)) {
        cmd.addExtraOption("jsMode", true);
    }

    final BasicDBObject cmdobj = (BasicDBObject) cmd.toDBObject();
    if (getBooleanFieldValue(Item.mrOutSharded)) {
        ((BasicDBObject) cmdobj.get("out")).put("sharded", true);
    }
    if (getBooleanFieldValue(Item.mrNonAtomic)) {
        ((BasicDBObject) cmdobj.get("out")).put("nonAtomic", true);
    }

    new DbJob() {
        MapReduceOutput output;

        @Override
        public Object doRun() {
            //                output = col.mapReduce(cmd);

            // if type in inline, then query options like slaveOk is fine
            CommandResult res = null;
            if (type == MapReduceCommand.OutputType.INLINE) {
                res = col.getDB().command(cmdobj, col.getOptions());
                return res;
            }

            res = col.getDB().command(cmdobj);
            res.throwOnError();
            output = new MapReduceOutput(col, cmdobj, res);
            return output;
        }

        @Override
        public void wrapUp(Object res) {
            if (output != null) {
                if (cmd.getOutputType() == OutputType.INLINE) {
                    res = output.results();
                } else {
                    // spawn a find
                    doFind(output.getOutputCollection(), null);
                    res = output.getRaw();
                }
            }
            super.wrapUp(res);
        }

        @Override
        public String getNS() {
            return col.getFullName();
        }

        @Override
        public String getShortName() {
            return "MR";
        }

        @Override
        public DBObject getRoot(Object result) {
            return cmdobj;
        }

        @Override
        public ButtonBase getButton() {
            return button;
        }

        @Override
        DBObject getCommand() {
            return cmdobj;
        }

        @Override
        DB getDB() {
            return col.getDB();
        }
    }.addJob();
}