Example usage for com.mongodb.client.gridfs GridFSBucket find

List of usage examples for com.mongodb.client.gridfs GridFSBucket find

Introduction

In this page you can find the example usage for com.mongodb.client.gridfs GridFSBucket find.

Prototype

GridFSFindIterable find(ClientSession clientSession);

Source Link

Document

Finds all documents in the files collection.

Usage

From source file:com.imaginea.mongodb.services.impl.GridFSServiceImpl.java

License:Apache License

private JSONObject executeFind(GridFSBucket gridFS, String query, String sortBy, String limit, String skip)
        throws JSONException {
    Document queryObj = Document.parse(query);
    Document sortObj = Document.parse(sortBy);
    int filesLimit = Integer.parseInt(limit);
    int filesSkip = Integer.parseInt(skip);
    // Partial Keys cant be fetched for a file

    MongoCursor<GridFSFile> it = gridFS.find(queryObj).sort(sortObj).skip(filesSkip).limit(filesLimit)
            .iterator();/*from   w  w  w.  j a  v a 2 s  .c o  m*/

    JSONArray fileList = new JSONArray();
    while (it.hasNext()) {
        GridFSFile fsFile = it.next();

        JSONObject file = new JSONObject();

        file.put("_id", fsFile.getId().asObjectId().getValue());
        file.put("fileName", fsFile.getFilename());
        file.put("length", fsFile.getLength());
        file.put("chunkSize", fsFile.getChunkSize());
        file.put("uploadDate", fsFile.getUploadDate());
        file.put("md5", fsFile.getMD5());
        if (fsFile.getMetadata() != null) {
            file.put("metadata", fsFile.getMetadata());
        }

        fileList.put(file);

    }
    JSONObject result = new JSONObject();
    long count = fileList.length();
    result.put("documents", fileList);
    result.put("editable", true);
    result.put("count", count);
    return result;
}

From source file:com.imaginea.mongodb.services.impl.GridFSServiceImpl.java

License:Apache License

/**
 * Service implementation for retrieving the specified file stored in GridFS.
 *
 * @param dbName Name of Database//  w w  w. ja va 2 s.co m
 * @param bucketName Name of GridFS Bucket
 * @param _id ObjectId of the file to be retrieved
 * @return Requested multipartfile for viewing or download based on 'download' param.
 */
public File getFile(String dbName, String bucketName, String _id)
        throws ValidationException, DatabaseException, CollectionException {
    if (dbName == null) {
        throw new DatabaseException(ErrorCodes.DB_NAME_EMPTY, "Database Name Is Null");
    }
    if (dbName.equals("")) {
        throw new DatabaseException(ErrorCodes.DB_NAME_EMPTY, "Database Name Empty");
    }
    File tempFile = null;
    try {
        // if (!databaseService.getDbList().contains(dbName)) {
        //   throw new DatabaseException(ErrorCodes.DB_DOES_NOT_EXISTS,

        //       "Database with dbName [ " + dbName + "] does not exist");
        // }

        ObjectId objectId = new ObjectId(_id);

        MongoDatabase db = mongoInstance.getDatabase(dbName);

        GridFSBucket gridFS = GridFSBuckets.create(db, bucketName);

        Document id = new Document();

        id.put("_id", objectId);

        GridFSFile fsFile = gridFS.find(id).first();

        if (fsFile != null) {

            String tempDir = System.getProperty("java.io.tmpdir");

            tempFile = new File(tempDir + "/" + fsFile.getFilename());

            FileOutputStream streamToDownloadTo = new FileOutputStream(tempFile);

            gridFS.downloadToStream(objectId, streamToDownloadTo);

            streamToDownloadTo.close();

        }

    } catch (MongoException m) {
        throw new CollectionException(ErrorCodes.GET_COLLECTION_LIST_EXCEPTION, m.getMessage());
    } catch (IOException e) {
        throw new CollectionException(ErrorCodes.GET_COLLECTION_LIST_EXCEPTION, e.getMessage());
    }
    return tempFile;
}

From source file:com.imaginea.mongodb.services.impl.GridFSServiceImpl.java

License:Apache License

/**
 * Service implementation for dropping a file from GridFS.
 *
 * @param dbName Name of Database/*  w  w  w  .jav a2 s. c  o  m*/
 * @param bucketName Name of GridFS Bucket
 * @param _id Object id of file to be deleted
 * @return Status message.
 */
public String deleteFile(String dbName, String bucketName, String _id)
        throws DatabaseException, DocumentException, CollectionException, ValidationException {
    if (dbName == null) {
        throw new DatabaseException(ErrorCodes.DB_NAME_EMPTY, "Database name is null");

    }
    if (dbName.equals("")) {
        throw new DatabaseException(ErrorCodes.DB_NAME_EMPTY, "Database Name Empty");
    }

    if (bucketName == null) {
        throw new CollectionException(ErrorCodes.COLLECTION_NAME_EMPTY, "Bucket name is null");
    }
    if (bucketName.equals("")) {
        throw new CollectionException(ErrorCodes.COLLECTION_NAME_EMPTY, "Bucket Name Empty");
    }

    String result = null;
    GridFSFile gridFSFile = null;
    try {
        // if (!databaseService.getDbList().contains(dbName)) {
        //   throw new DatabaseException(ErrorCodes.DB_DOES_NOT_EXISTS,
        //       "DB [" + dbName + "] DOES NOT EXIST");
        // }
        if (_id == null) {
            throw new DocumentException(ErrorCodes.DOCUMENT_EMPTY, "File is empty");
        }

        ObjectId objectId = new ObjectId(_id);

        MongoDatabase db = mongoInstance.getDatabase(dbName);

        GridFSBucket gridFS = GridFSBuckets.create(db, bucketName);

        Document id = new Document();

        id.put("_id", objectId);

        gridFSFile = gridFS.find(id).first();
        if (gridFSFile == null) {
            throw new DocumentException(ErrorCodes.DOCUMENT_DOES_NOT_EXIST, "Document does not exist !");
        }

        gridFS.delete(objectId);

    } catch (MongoException e) {
        throw new DocumentException(ErrorCodes.DOCUMENT_DELETION_EXCEPTION, e.getMessage());
    }
    result = "File [" + gridFSFile.getFilename() + "] has been deleted.";
    return result;
}

From source file:net.liaocy.ml4j.nlp.word2vec.Train.java

private void saveModel(String modelName, Word2Vec vec) throws IOException {
    MongoDatabase db = Mongo.getDB();/* ww  w .  ja  v a 2 s  . co  m*/
    GridFSBucket gridFSBucket = GridFSBuckets.create(db, "word2vecmodels");

    GridFSFile gfsfi = gridFSBucket.find(new Document("filename", modelName)).first();
    if (gfsfi != null) {
        ObjectId id = gfsfi.getObjectId();
        gridFSBucket.delete(id);
    }

    try (GridFSUploadStream uploadStream = gridFSBucket.openUploadStream(modelName)) {
        WordVectorSerializer.writeWord2VecModel(vec, uploadStream);
        System.out.println("Save Model Successed!");
    }
}

From source file:net.liaocy.ml4j.tfidf.tfidf.java

public void save() throws IOException {
    MongoDatabase db = Mongo.getDB();/*from w  w  w.  j  a  v a  2s .c o  m*/
    GridFSBucket gridFSBucket = GridFSBuckets.create(db, "tfidfmodels");

    GridFSFile gfsfi = gridFSBucket.find(new Document("filename", this.modelName)).first();
    if (gfsfi != null) {
        ObjectId id = gfsfi.getObjectId();
        gridFSBucket.delete(id);
    }

    for (Entry<Integer, Integer> word : wordCount.entrySet()) {
        double idf = this.getIdf(word.getValue());
        this.idfMax = Math.max(this.idfMax, idf);
        this.idfMin = Math.min(this.idfMax, idf);
    }

    try (GridFSUploadStream uploadStream = gridFSBucket.openUploadStream(this.modelName)) {
        try (ObjectOutputStream o = new ObjectOutputStream(uploadStream)) {
            o.writeObject(this.wordDocCount);
            o.writeObject(this.docCount);
            o.writeObject(this.wordCount);
            o.writeDouble(this.idfMax.doubleValue());
            o.writeDouble(this.idfMin.doubleValue());
        }
    }

    System.out.println("Save Model Successed!");
}

From source file:org.apache.nifi.processors.mongodb.gridfs.DeleteGridFS.java

License:Apache License

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    FlowFile input = session.get();/*from w  w w  .  j ava 2  s . c o  m*/
    if (input == null) {
        return;
    }

    final String deleteQuery = getQuery(context, input);
    final String queryAttribute = context.getProperty(QUERY_ATTRIBUTE).isSet()
            ? context.getProperty(QUERY_ATTRIBUTE).evaluateAttributeExpressions(input).getValue()
            : null;
    GridFSBucket bucket = getBucket(input, context);

    try {
        Document query = Document.parse(deleteQuery);
        MongoCursor cursor = bucket.find(query).iterator();
        if (cursor.hasNext()) {
            GridFSFile file = (GridFSFile) cursor.next();
            bucket.delete(file.getObjectId());

            if (!StringUtils.isEmpty(queryAttribute)) {
                input = session.putAttribute(input, queryAttribute, deleteQuery);
            }

            session.transfer(input, REL_SUCCESS);
        } else {
            getLogger().error(String.format("Query %s did not delete anything in %s", deleteQuery,
                    bucket.getBucketName()));
            session.transfer(input, REL_FAILURE);
        }

        cursor.close();
    } catch (Exception ex) {
        getLogger().error(String.format("Error deleting using query: %s", deleteQuery), ex);
        session.transfer(input, REL_FAILURE);
    }
}

From source file:org.apache.nifi.processors.mongodb.gridfs.FetchGridFS.java

License:Apache License

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    FlowFile input = session.get();//from w  w w  . j  ava  2  s .c  om
    if (input == null) {
        return;
    }

    final String operatingMode = context.getProperty(OPERATION_MODE).getValue();
    final Map<String, String> originalAttributes = input.getAttributes();

    String queryStr;
    try {
        queryStr = getQuery(session, context, input);
        if (StringUtils.isEmpty(queryStr)) {
            getLogger().error("No query could be found or built from the supplied input.");
            session.transfer(input, REL_FAILURE);
            return;
        }
    } catch (IOException ex) {
        getLogger().error("No query could be found from supplied input", ex);
        session.transfer(input, REL_FAILURE);
        return;
    }

    Document query = Document.parse(queryStr);

    try {
        final GridFSBucket bucket = getBucket(input, context);
        final String queryPtr = queryStr;
        final FlowFile parent = operatingMode.equals(MODE_ONE_COMMIT.getValue()) ? input : null;

        MongoCursor it = bucket.find(query).iterator();
        if (operatingMode.equals(MODE_MANY_COMMITS.getValue())) {
            session.transfer(input, REL_ORIGINAL);
            input = null;
        }

        while (it.hasNext()) {
            GridFSFile gridFSFile = (GridFSFile) it.next();
            handleFile(bucket, session, context, parent, gridFSFile, queryPtr);

            if (operatingMode.equals(MODE_MANY_COMMITS.getValue())) {
                session.commit();
            }
        }

        if (input != null) {
            session.transfer(input, REL_ORIGINAL);
        }
    } catch (Exception ex) {
        getLogger().error("An error occurred wile trying to run the query.", ex);
        if (input != null && operatingMode.equals(MODE_ONE_COMMIT.getValue())) {
            session.transfer(input, REL_FAILURE);
        } else if (input != null && operatingMode.equals(MODE_MANY_COMMITS.getValue())) {
            final String queryPtr = queryStr;
            FlowFile cloned = session.create();
            cloned = session.putAllAttributes(cloned, originalAttributes);
            cloned = session.write(cloned, out -> out.write(queryPtr.getBytes()));
            session.transfer(cloned, REL_FAILURE);
        }
    }
}

From source file:org.apache.nifi.processors.mongodb.gridfs.GridFSITTestBase.java

License:Apache License

public boolean fileExists(String name, String bucketName) {
    GridFSBucket bucket = GridFSBuckets.create(client.getDatabase(DB), bucketName);
    MongoCursor it = bucket.find(Document.parse(String.format("{ \"filename\": \"%s\" }", name))).iterator();
    boolean retVal = it.hasNext();
    it.close();//from w  ww. j  a va  2s. c o  m

    return retVal;
}

From source file:org.apache.nifi.processors.mongodb.gridfs.GridFSITTestBase.java

License:Apache License

public boolean fileHasProperties(String name, String bucketName, Map<String, String> attrs) {
    GridFSBucket bucket = GridFSBuckets.create(client.getDatabase(DB), bucketName);
    MongoCursor it = bucket.find(Document.parse(String.format("{ \"filename\": \"%s\" }", name))).iterator();
    boolean retVal = false;

    if (it.hasNext()) {
        GridFSFile file = (GridFSFile) it.next();
        Document metadata = file.getMetadata();
        if (metadata != null && metadata.size() == attrs.size()) {
            retVal = true;/* w  ww .  ja  v  a  2 s  .co m*/
            for (Map.Entry<String, Object> entry : metadata.entrySet()) {
                Object val = attrs.get(entry.getKey());
                if (val == null || !entry.getValue().equals(val)) {
                    retVal = false;
                    break;
                }
            }
        }
    }

    it.close();

    return retVal;
}

From source file:org.hibernate.ogm.datastore.mongodb.binarystorage.GridFSStorageManager.java

License:LGPL

private void deleteExistingContent(String fieldName, Object documentId, GridFSBucket gridFSFilesBucket) {
    GridFSFindIterable results = gridFSFilesBucket
            .find(Filters.and(Filters.eq("filename", fileName(fieldName, documentId))));
    try (MongoCursor<GridFSFile> iterator = results.iterator()) {
        while (iterator.hasNext()) {
            GridFSFile next = iterator.next();
            gridFSFilesBucket.delete(next.getId());
        }//www .j ava 2 s.  co  m
    }
}