List of usage examples for com.mongodb.gridfs GridFS GridFS
public GridFS(final DB db, final String bucket)
From source file:net.ymate.platform.persistence.mongodb.impl.MongoGridFSSession.java
License:Apache License
public MongoGridFSSession(IMongoDataSourceAdapter dataSourceAdapter, String bucketName) throws Exception { this.__id = UUIDUtils.UUID(); this.__dataSourceHolder = dataSourceAdapter; this.__bucketName = StringUtils.defaultIfBlank(bucketName, GridFS.DEFAULT_BUCKET); ///*from ww w . j av a 2s. c o m*/ __gridFS = new GridFS(new DB(dataSourceAdapter.getMongoClient(), dataSourceAdapter.getDataSourceCfgMeta().getDatabaseName()), __bucketName); __dbCollection = __gridFS.getDB().getCollection(__bucketName.concat(".files")); }
From source file:org.apache.camel.component.gridfs.GridFsEndpoint.java
License:Apache License
@SuppressWarnings("deprecation") public void initializeConnection() throws Exception { LOG.info("Initialize GridFS endpoint: {}", this.toString()); if (database == null) { throw new IllegalStateException("Missing required endpoint configuration: database"); }/*from w w w.jav a2 s .co m*/ db = mongoConnection.getDB(database); if (db == null) { throw new IllegalStateException( "Could not initialize GridFsComponent. Database " + database + " does not exist."); } gridFs = new GridFS(db, bucket == null ? GridFS.DEFAULT_BUCKET : bucket) { { filesCollection = getFilesCollection(); } }; }
From source file:org.apache.manifoldcf.crawler.connectors.gridfs.GridFSRepositoryConnector.java
License:Apache License
/** Process a set of documents. * This is the method that should cause each document to be fetched, processed, and the results either added * to the queue of documents for the current job, and/or entered into the incremental ingestion manager. * The document specification allows this class to filter what is done based on the job. * The connector will be connected before this method can be called. *@param documentIdentifiers is the set of document identifiers to process. *@param statuses are the currently-stored document versions for each document in the set of document identifiers * passed in above./*from ww w. j av a 2s. c o m*/ *@param activities is the interface this method should use to queue up new document references * and ingest documents. *@param jobMode is an integer describing how the job is being run, whether continuous or once-only. *@param usesDefaultAuthority will be true only if the authority in use for these documents is the default one. */ @Override public void processDocuments(String[] documentIdentifiers, IExistingVersions statuses, Specification spec, IProcessActivity activities, int jobMode, boolean usesDefaultAuthority) throws ManifoldCFException, ServiceInterruption { for (String documentIdentifier : documentIdentifiers) { String versionString; GridFS gfs; GridFSDBFile document; getSession(); String _id = documentIdentifier; gfs = new GridFS(session, bucket); document = gfs.findOne(new ObjectId(_id)); if (document == null) { activities.deleteDocument(documentIdentifier); continue; } else { DBObject metadata = document.getMetaData(); versionString = document.getMD5() + "+" + metadata != null ? Integer.toString(metadata.hashCode()) : StringUtils.EMPTY; } if (versionString.length() == 0 || activities.checkDocumentNeedsReindexing(documentIdentifier, versionString)) { long startTime = System.currentTimeMillis(); String errorCode = null; String errorDesc = null; String version = versionString; try { if (Logging.connectors.isDebugEnabled()) { Logging.connectors.debug("GridFS: Processing document _id = " + _id); } DBObject metadata = document.getMetaData(); if (metadata == null) { errorCode = "NULLMETADATA"; errorDesc = "Excluded because document had a null Metadata"; Logging.connectors.warn("GridFS: Document " + _id + " has a null metadata - skipping."); activities.noDocument(_id, version); continue; } String urlValue = document.getMetaData().get(this.url) == null ? StringUtils.EMPTY : document.getMetaData().get(this.url).toString(); if (!StringUtils.isEmpty(urlValue)) { boolean validURL; try { new java.net.URI(urlValue); validURL = true; } catch (java.net.URISyntaxException e) { validURL = false; } if (validURL) { long fileLenght = document.getLength(); Date createdDate = document.getUploadDate(); String fileName = document.getFilename(); String mimeType = document.getContentType(); if (!activities.checkURLIndexable(urlValue)) { Logging.connectors.warn( "GridFS: Document " + _id + " has a URL excluded by the output connector ('" + urlValue + "') - skipping."); errorCode = activities.EXCLUDED_URL; errorDesc = "Excluded because of URL (" + urlValue + ")"; activities.noDocument(_id, version); continue; } if (!activities.checkLengthIndexable(fileLenght)) { Logging.connectors.warn("GridFS: Document " + _id + " has a length excluded by the output connector (" + fileLenght + ") - skipping."); errorCode = activities.EXCLUDED_LENGTH; errorDesc = "Excluded because of length (" + fileLenght + ")"; activities.noDocument(_id, version); continue; } if (!activities.checkMimeTypeIndexable(mimeType)) { Logging.connectors.warn("GridFS: Document " + _id + " has a mime type excluded by the output connector ('" + mimeType + "') - skipping."); errorCode = activities.EXCLUDED_MIMETYPE; errorDesc = "Excluded because of mime type (" + mimeType + ")"; activities.noDocument(_id, version); continue; } if (!activities.checkDateIndexable(createdDate)) { Logging.connectors.warn( "GridFS: Document " + _id + " has a date excluded by the output connector (" + createdDate + ") - skipping."); errorCode = activities.EXCLUDED_DATE; errorDesc = "Excluded because of date (" + createdDate + ")"; activities.noDocument(_id, version); continue; } RepositoryDocument rd = new RepositoryDocument(); rd.setCreatedDate(createdDate); rd.setModifiedDate(createdDate); rd.setFileName(fileName); rd.setMimeType(mimeType); String[] aclsArray = null; String[] denyAclsArray = null; if (acl != null) { try { Object aclObject = document.getMetaData().get(acl); if (aclObject != null) { List<String> acls = (List<String>) aclObject; aclsArray = (String[]) acls.toArray(); } } catch (ClassCastException e) { // This is bad because security will fail Logging.connectors.warn("GridFS: Document " + _id + " metadata ACL field doesn't contain List<String> type."); errorCode = "ACLTYPE"; errorDesc = "Allow ACL field doesn't contain List<String> type."; throw new ManifoldCFException("Security decoding error: " + e.getMessage(), e); } } if (denyAcl != null) { try { Object denyAclObject = document.getMetaData().get(denyAcl); if (denyAclObject != null) { List<String> denyAcls = (List<String>) denyAclObject; denyAcls.add(GLOBAL_DENY_TOKEN); denyAclsArray = (String[]) denyAcls.toArray(); } } catch (ClassCastException e) { // This is bad because security will fail Logging.connectors.warn("GridFS: Document " + _id + " metadata DenyACL field doesn't contain List<String> type."); errorCode = "ACLTYPE"; errorDesc = "Deny ACL field doesn't contain List<String> type."; throw new ManifoldCFException("Security decoding error: " + e.getMessage(), e); } } rd.setSecurity(RepositoryDocument.SECURITY_TYPE_DOCUMENT, aclsArray, denyAclsArray); InputStream is = document.getInputStream(); try { rd.setBinary(is, fileLenght); try { activities.ingestDocumentWithException(_id, version, urlValue, rd); } catch (IOException e) { handleIOException(e); } } finally { try { is.close(); } catch (IOException e) { handleIOException(e); } } gfs.getDB().getMongo().getConnector().close(); session = null; errorCode = "OK"; } else { Logging.connectors.warn( "GridFS: Document " + _id + " has a invalid URL: " + urlValue + " - skipping."); errorCode = activities.BAD_URL; errorDesc = "Excluded because document had illegal URL ('" + urlValue + "')"; activities.noDocument(_id, version); } } else { Logging.connectors.warn("GridFS: Document " + _id + " has a null URL - skipping."); errorCode = activities.NULL_URL; errorDesc = "Excluded because document had a null URL."; activities.noDocument(_id, version); } } finally { if (errorCode != null) { activities.recordActivity(startTime, ACTIVITY_FETCH, document.getLength(), _id, errorCode, errorDesc, null); } } } } }
From source file:org.aw20.mongoworkbench.command.GridFSCreateBucketCommand.java
License:Open Source License
@Override public void execute() throws Exception { MongoClient mdb = MongoFactory.getInst().getMongo(sName); if (mdb == null) throw new Exception("no server selected"); if (sDb == null) throw new Exception("no database selected"); MongoFactory.getInst().setActiveDB(sDb); DB db = mdb.getDB(sDb);//from ww w. j a v a 2 s.c o m GridFS gfs = new GridFS(db, sColl); gfs.getBucketName(); setMessage("bucketCreated=" + sColl); }
From source file:org.aw20.mongoworkbench.command.GridFSGetFileCommand.java
License:Open Source License
@Override public void execute() throws Exception { MongoClient mdb = MongoFactory.getInst().getMongo(sName); if (mdb == null) throw new Exception("no server selected"); if (sDb == null) throw new Exception("no database selected"); MongoFactory.getInst().setActiveDB(sDb); DB db = mdb.getDB(sDb);// w w w .ja v a 2 s. c o m GridFS gfs = new GridFS(db, sColl.substring(0, sColl.lastIndexOf("."))); GridFSDBFile gridFSDBFile = gfs.find(id); gridFSDBFile.writeTo(saveFile); setMessage("fileSaved=" + saveFile + "; size=" + saveFile.length()); }
From source file:org.aw20.mongoworkbench.command.GridFSPutFileCommand.java
License:Open Source License
@Override public void execute() throws Exception { MongoClient mdb = MongoFactory.getInst().getMongo(sName); if (mdb == null) throw new Exception("no server selected"); if (sDb == null) throw new Exception("no database selected"); MongoFactory.getInst().setActiveDB(sDb); DB db = mdb.getDB(sDb);/*w ww . j av a 2 s . com*/ GridFS gfs = new GridFS(db, sColl.substring(0, sColl.lastIndexOf("."))); GridFSInputFile gridFSInputFile = gfs.createFile(getFile); gridFSInputFile.setContentType(MimetypesFileTypeMap.getDefaultFileTypeMap().getContentType(getFile)); gridFSInputFile.save(); setMessage("fileLoaded=" + getFile + "; size=" + getFile.length()); }
From source file:org.aw20.mongoworkbench.command.GridFSRemoveFileCommand.java
License:Open Source License
@Override public void execute() throws Exception { MongoClient mdb = MongoFactory.getInst().getMongo(sName); if (mdb == null) throw new Exception("no server selected"); if (sDb == null) throw new Exception("no database selected"); MongoFactory.getInst().setActiveDB(sDb); DB db = mdb.getDB(sDb);//from w w w . j a v a 2 s.c o m GridFS gfs = new GridFS(db, sColl.substring(0, sColl.lastIndexOf("."))); gfs.remove(new ObjectId(id)); setMessage("fileRemoved=" + id); }
From source file:org.bananaforscale.cormac.dao.gridfs.GridFsDataServiceImpl.java
License:Apache License
/** * Adds a bucket to the database./* w w w . j a v a 2s . co m*/ * * @param databaseName the database * @param bucketName the bucket * @return the result of the operation * @throws DatasourceException * @throws ExistsException * @throws NotFoundException */ @Override public boolean addBucket(String databaseName, String bucketName) throws DatasourceException, ExistsException, NotFoundException { try { if (!databaseExists(databaseName)) { throw new NotFoundException("The database doesn't exist in the datasource"); } if (bucketExists(databaseName, bucketName)) { throw new ExistsException("The bucket already exists in the database"); } DB mongoDatabase = mongoClient.getDB(databaseName); new GridFS(mongoDatabase, bucketName); return true; } catch (MongoException ex) { logger.error("An error occured while adding a bucket", ex); throw new DatasourceException("An error occured while adding a bucket"); } }
From source file:org.bananaforscale.cormac.dao.gridfs.GridFsDataServiceImpl.java
License:Apache License
/** * Returns all the files in a bucket./* www.ja va 2 s . c o m*/ * * @param databaseName the database * @param bucketName the bucket * @return the files in the bucket * @throws DatasourceException * @throws NotFoundException */ @Override public List<String> getAll(String databaseName, String bucketName) throws DatasourceException, NotFoundException { try { if (!databaseExists(databaseName)) { throw new NotFoundException("The database doesn't exist in the datasource"); } if (!bucketExists(databaseName, bucketName)) { throw new NotFoundException("The bucket doesn't exist in the database"); } DB mongoDatabase = mongoClient.getDB(databaseName); GridFS gfsBucket = new GridFS(mongoDatabase, bucketName); DBCursor cursor = gfsBucket.getFileList(); Iterator<DBObject> curIter = cursor.iterator(); List<String> fileList = new ArrayList<>(); while (curIter.hasNext()) { DBObject current = curIter.next(); fileList.add(JSON.serialize(current)); } return fileList; } catch (MongoException ex) { logger.error("An error occured while retrieving file list", ex); throw new DatasourceException("An error occured while retrieving file list"); } }
From source file:org.bananaforscale.cormac.dao.gridfs.GridFsDataServiceImpl.java
License:Apache License
/** * Removes all files in a bucket.//from ww w.j av a 2 s . c om * * @param databaseName the database * @param bucketName the bucket * @return a status message with the outcome of the operation * @throws DatasourceException * @throws NotFoundException */ @Override public boolean removeAll(String databaseName, String bucketName) throws DatasourceException, NotFoundException { try { if (!databaseExists(databaseName)) { throw new NotFoundException("The database doesn't exist in the datasource"); } DB mongoDatabase = mongoClient.getDB(databaseName); GridFS gfsBucket = new GridFS(mongoDatabase, bucketName); // TODO: determine behavior if bucket doesnt exist DBCursor cursor = gfsBucket.getFileList(); Iterator<DBObject> curIter = cursor.iterator(); while (curIter.hasNext()) { DBObject current = curIter.next(); gfsBucket.remove(current); } return true; } catch (MongoException ex) { logger.error("An error occured while removing files", ex); throw new DatasourceException("An error occured while removing files"); } }