List of usage examples for com.mongodb.client.gridfs.model GridFSFile getObjectId
public ObjectId getObjectId()
From source file:com.jaeksoft.searchlib.crawler.cache.MongoDbCrawlCache.java
License:Open Source License
@Override public long flush(long expiration) throws IOException { rwl.r.lock();/*from ww w .j av a2s .c o m*/ try { final Bson filter = expiration == 0 ? Filters.exists("uri") : Filters.lt("_id", new ObjectId(new Date(expiration))); indexedCollection.deleteMany(filter); for (GridFSFile f : contentGrid.find(filter)) contentGrid.delete(f.getObjectId()); long l = metaCollection.deleteMany(filter).getDeletedCount(); return l; } finally { rwl.r.unlock(); } }
From source file:net.liaocy.ml4j.nlp.word2vec.Train.java
private void saveModel(String modelName, Word2Vec vec) throws IOException { MongoDatabase db = Mongo.getDB();/* w w w . ja v a 2 s . c o m*/ GridFSBucket gridFSBucket = GridFSBuckets.create(db, "word2vecmodels"); GridFSFile gfsfi = gridFSBucket.find(new Document("filename", modelName)).first(); if (gfsfi != null) { ObjectId id = gfsfi.getObjectId(); gridFSBucket.delete(id); } try (GridFSUploadStream uploadStream = gridFSBucket.openUploadStream(modelName)) { WordVectorSerializer.writeWord2VecModel(vec, uploadStream); System.out.println("Save Model Successed!"); } }
From source file:net.liaocy.ml4j.tfidf.tfidf.java
public void save() throws IOException { MongoDatabase db = Mongo.getDB();//from ww w. jav a 2 s . co m GridFSBucket gridFSBucket = GridFSBuckets.create(db, "tfidfmodels"); GridFSFile gfsfi = gridFSBucket.find(new Document("filename", this.modelName)).first(); if (gfsfi != null) { ObjectId id = gfsfi.getObjectId(); gridFSBucket.delete(id); } for (Entry<Integer, Integer> word : wordCount.entrySet()) { double idf = this.getIdf(word.getValue()); this.idfMax = Math.max(this.idfMax, idf); this.idfMin = Math.min(this.idfMax, idf); } try (GridFSUploadStream uploadStream = gridFSBucket.openUploadStream(this.modelName)) { try (ObjectOutputStream o = new ObjectOutputStream(uploadStream)) { o.writeObject(this.wordDocCount); o.writeObject(this.docCount); o.writeObject(this.wordCount); o.writeDouble(this.idfMax.doubleValue()); o.writeDouble(this.idfMin.doubleValue()); } } System.out.println("Save Model Successed!"); }
From source file:org.apache.nifi.processors.mongodb.gridfs.DeleteGridFS.java
License:Apache License
@Override public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException { FlowFile input = session.get();/*from w w w . jav a 2 s . co m*/ if (input == null) { return; } final String deleteQuery = getQuery(context, input); final String queryAttribute = context.getProperty(QUERY_ATTRIBUTE).isSet() ? context.getProperty(QUERY_ATTRIBUTE).evaluateAttributeExpressions(input).getValue() : null; GridFSBucket bucket = getBucket(input, context); try { Document query = Document.parse(deleteQuery); MongoCursor cursor = bucket.find(query).iterator(); if (cursor.hasNext()) { GridFSFile file = (GridFSFile) cursor.next(); bucket.delete(file.getObjectId()); if (!StringUtils.isEmpty(queryAttribute)) { input = session.putAttribute(input, queryAttribute, deleteQuery); } session.transfer(input, REL_SUCCESS); } else { getLogger().error(String.format("Query %s did not delete anything in %s", deleteQuery, bucket.getBucketName())); session.transfer(input, REL_FAILURE); } cursor.close(); } catch (Exception ex) { getLogger().error(String.format("Error deleting using query: %s", deleteQuery), ex); session.transfer(input, REL_FAILURE); } }
From source file:org.apache.nifi.processors.mongodb.gridfs.FetchGridFS.java
License:Apache License
private void handleFile(GridFSBucket bucket, ProcessSession session, ProcessContext context, FlowFile parent, GridFSFile input, String query) { Map<String, String> attrs = new HashMap<>(); attrs.put(METADATA_ATTRIBUTE, input.getMetadata() != null ? input.getMetadata().toJson() : "{}"); if (context.getProperty(QUERY_ATTRIBUTE).isSet()) { String key = context.getProperty(QUERY_ATTRIBUTE).evaluateAttributeExpressions(parent).getValue(); attrs.put(key, query);/* w ww .jav a 2 s . c om*/ } attrs.put(CoreAttributes.FILENAME.key(), input.getFilename()); FlowFile output = parent != null ? session.create(parent) : session.create(); output = session.write(output, out -> bucket.downloadToStream(input.getObjectId(), out)); output = session.putAllAttributes(output, attrs); session.transfer(output, REL_SUCCESS); session.getProvenanceReporter().receive(output, getTransitUri(input.getObjectId(), output, context)); }