Example usage for com.mongodb.gridfs GridFSInputFile setContentType

List of usage examples for com.mongodb.gridfs GridFSInputFile setContentType

Introduction

In this page you can find the example usage for com.mongodb.gridfs GridFSInputFile setContentType.

Prototype

public void setContentType(final String contentType) 

Source Link

Document

Sets the content type (MIME type) on the GridFS entry.

Usage

From source file:mx.org.cedn.avisosconagua.engine.processors.Pronostico.java

License:Open Source License

/**
 * Processes an uploaded file and stores it in MongoDB.
 * @param item file item from the parsed servlet request
 * @param currentId ID for the current MongoDB object for the advice
 * @return file name/*from   w w  w. j ava2 s .  c om*/
 * @throws IOException 
 */
private String processUploadedFile(FileItem item, String currentId) throws IOException {
    GridFS gridfs = MongoInterface.getInstance().getImagesFS();
    GridFSInputFile gfsFile = gridfs.createFile(item.getInputStream());
    String filename = currentId + ":" + item.getFieldName() + "_" + item.getName();
    gfsFile.setFilename(filename);
    gfsFile.setContentType(item.getContentType());
    gfsFile.save();
    return filename;
}

From source file:mx.org.cedn.avisosconagua.mongo.CAPFileGenerator.java

License:Open Source License

/**
 * Generates and stores the CAP file.//from ww  w.j ava 2s .  c o m
 */
public void generate() {
    try {
        GridFS fs = MongoInterface.getInstance().getGeneratedFS();
        fs.remove(name);
        GridFSInputFile infile = fs.createFile(generator.generate().getBytes("UTF-8"));
        infile.setContentType("text/xml");
        infile.setFilename(name);
        infile.save();
        isOK = true;
    } catch (UnsupportedEncodingException uex) {
        uex.printStackTrace();
    }
}

From source file:mx.org.cedn.avisosconagua.mongo.HtmlZipGenerator.java

License:Open Source License

/**
 * Generates the ZIP file of the HTMl advice.
 *///w  w  w  .  j a v  a2  s .co m
public void generate() {
    try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
        String localFolder = "./" + adviceID + "/";
        ZipOutputStream zout = new ZipOutputStream(baos);
        zout.setLevel(9);
        zout.putNextEntry(new ZipEntry(name));
        zout.write(html.generate(true).getBytes("ISO8859-1"));
        //zout.putNextEntry(new ZipEntry(localFolder));
        if (html.getPrincipalFile() != null) {
            GridFS gridfs = MongoInterface.getInstance().getImagesFS();
            GridFSDBFile imageForOutput = gridfs.findOne(html.getPrincipalFile());
            zout.putNextEntry(new ZipEntry(prefix + "_1"
                    + html.getPrincipalFile().substring(html.getPrincipalFile().lastIndexOf(".")))); //localFolder + 
            imageForOutput.writeTo(zout);
        }
        if (html.getPronosticoFile() != null) {
            GridFS gridfs = MongoInterface.getInstance().getImagesFS();
            GridFSDBFile imageForOutput = gridfs.findOne(html.getPronosticoFile());
            zout.putNextEntry(new ZipEntry(prefix + "_2"
                    + html.getPrincipalFile().substring(html.getPrincipalFile().lastIndexOf(".")))); //localFolder +
            imageForOutput.writeTo(zout);
        }
        zout.putNextEntry(new ZipEntry(prefix + "_f.gif"));
        InputStream fin = HtmlZipGenerator.class.getResourceAsStream("/fondo.gif");
        byte[] buff = new byte[8192];
        int lenght;
        while ((lenght = fin.read(buff)) > -1) {
            zout.write(buff, 0, lenght);
        }
        //            ArrayList<String> lista = MongoInterface.getInstance().listFilesFromAdvice(adviceID);
        //            for (String filename : lista) {
        //                GridFS gridfs = MongoInterface.getInstance().getImagesFS();
        //                GridFSDBFile imageForOutput = gridfs.findOne(filename);
        //                String fnpart[] = filename.split(":");
        //                zout.putNextEntry(new ZipEntry(localFolder + fnpart[1]));
        //                imageForOutput.writeTo(zout);
        //            }
        zout.close();
        GridFS fs = MongoInterface.getInstance().getGeneratedFS();
        fs.remove(nameZip);
        GridFSInputFile infile = fs.createFile(baos.toByteArray());
        infile.setContentType("application/zip");
        infile.setFilename(nameZip);
        infile.save();
        isOK = true;
    } catch (IOException ioe) {
        ioe.printStackTrace();
    }
}

From source file:net.kamradtfamily.mongorest.GridfsServlet.java

License:GNU General Public License

@Override
protected void doPost(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException {

    log.fine("doPost()");

    InputStream tmp = req.getInputStream();
    InputStream is = new BufferedInputStream(tmp);
    String db_name = req.getParameter("dbname");
    String bucket_name = req.getParameter("bucketname");
    if (db_name == null || bucket_name == null) {
        String names[] = req2mongonames(req);
        if (names != null) {
            db_name = names[0];/*w ww .  j a  v  a 2 s .  c  o  m*/
            bucket_name = names[1];
        }
        if (db_name == null) {
            error(res, SC_BAD_REQUEST, Status.get("param name missing"));
            return;
        }
    }

    if (bucket_name == null)
        bucket_name = "fs";

    String file_name = req.getParameter("filename");

    if (file_name == null) {
        error(res, SC_BAD_REQUEST, Status.get("param name missing"));
        return;
    }

    DB db = mongo.getDB(db_name);

    String fs_cache_key = db_name + bucket_name;
    GridFS fs = fs_cache.get(fs_cache_key);
    if (fs == null) {
        fs = new GridFS(db, bucket_name);
        fs_cache.put(fs_cache_key, fs);
    }

    GridFSDBFile db_file_old = fs.findOne(file_name);
    if (db_file_old == null) {
        error(res, SC_NOT_FOUND, Status.get("file doe not exists, use PUT"));
        return;
    }

    String ct = req.getContentType();
    GridFSInputFile db_file = fs.createFile(file_name);
    if (ct != null)
        db_file.setContentType(ct);
    OutputStream os = db_file.getOutputStream();

    final int len = 4096;
    byte data[] = new byte[len];
    int n;
    while ((n = is.read(data, 0, len)) > 0) {
        os.write(data, 0, n);
    }
    os.close();

    is.close();

    out_json(req, Status.OK);

}

From source file:net.kamradtfamily.mongorest.GridfsServlet.java

License:GNU General Public License

@Override
protected void doPut(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException {

    log.fine("doPut()");

    InputStream tmp = req.getInputStream();
    InputStream is = new BufferedInputStream(tmp);
    String db_name = req.getParameter("dbname");
    String bucket_name = req.getParameter("bucketname");
    if (db_name == null || bucket_name == null) {
        String names[] = req2mongonames(req);
        if (names != null) {
            db_name = names[0];//w  w w  . java  2  s .c  o m
            bucket_name = names[1];
        }
        if (db_name == null) {
            error(res, SC_BAD_REQUEST, Status.get("param name missing"));
            return;
        }
    }

    if (bucket_name == null)
        bucket_name = "fs";

    String file_name = req.getParameter("filename");

    if (file_name == null) {
        error(res, SC_BAD_REQUEST, Status.get("param name missing"));
        return;
    }

    DB db = mongo.getDB(db_name);

    String fs_cache_key = db_name + bucket_name;
    GridFS fs = fs_cache.get(fs_cache_key);
    if (fs == null) {
        fs = new GridFS(db, bucket_name);
        fs_cache.put(fs_cache_key, fs);
    }

    GridFSDBFile db_file_old = fs.findOne(file_name);
    if (db_file_old != null) {
        error(res, SC_BAD_REQUEST, Status.get("file already exists, use POST"));
        return;
    }

    String ct = req.getContentType();
    GridFSInputFile db_file = fs.createFile(file_name);
    if (ct != null)
        db_file.setContentType(ct);
    OutputStream os = db_file.getOutputStream();

    final int len = 4096;
    byte data[] = new byte[len];
    int n;
    while ((n = is.read(data, 0, len)) > 0) {
        os.write(data, 0, n);
    }
    os.flush();
    os.close();

    is.close();

    out_json(req, Status.OK);

}

From source file:net.ymate.platform.persistence.mongodb.support.GridFSFileBuilder.java

License:Apache License

public GridFSInputFile build(IGridFSSession gridFS) throws Exception {
    GridFSInputFile _inFile = null;
    switch (__type) {
    case 1: // is File
        _inFile = gridFS.getGridFS().createFile((File) __targetObject);
        break;//from  w  ww.j  ava  2s . co  m
    case 2: // is InputStream
        _inFile = gridFS.getGridFS().createFile((InputStream) __targetObject);
        break;
    case 3: // is Array
        _inFile = gridFS.getGridFS().createFile((byte[]) __targetObject);
    }
    if (_inFile != null) {
        _inFile.setFilename(__filename);
        _inFile.setContentType(__contentType);
        if (__chunkSize > 0) {
            _inFile.setChunkSize(__chunkSize);
        }
        if (!__attributes.isEmpty()) {
            for (Map.Entry<String, Object> _entry : __attributes.entrySet()) {
                _inFile.put(_entry.getKey(), _entry.getValue());
            }
        }
    }
    return _inFile;
}

From source file:org.apache.camel.component.gridfs.GridFsProducer.java

License:Apache License

public void process(Exchange exchange) throws Exception {
    String operation = endpoint.getOperation();
    if (operation == null) {
        operation = exchange.getIn().getHeader(GridFsEndpoint.GRIDFS_OPERATION, String.class);
    }//from   w w w .j av a2  s . c o m
    if (operation == null || "create".equals(operation)) {
        final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
        Long chunkSize = exchange.getIn().getHeader(GridFsEndpoint.GRIDFS_CHUNKSIZE, Long.class);

        InputStream ins = exchange.getIn().getMandatoryBody(InputStream.class);
        GridFSInputFile gfsFile = endpoint.getGridFs().createFile(ins, filename, true);
        if (chunkSize != null && chunkSize > 0) {
            gfsFile.setChunkSize(chunkSize);
        }
        final String ct = exchange.getIn().getHeader(Exchange.CONTENT_TYPE, String.class);
        if (ct != null) {
            gfsFile.setContentType(ct);
        }
        String metaData = exchange.getIn().getHeader(GridFsEndpoint.GRIDFS_METADATA, String.class);
        DBObject dbObject = (DBObject) JSON.parse(metaData);
        gfsFile.setMetaData(dbObject);
        gfsFile.save();
        exchange.getIn().setHeader(Exchange.FILE_NAME_PRODUCED, gfsFile.getFilename());
    } else if ("remove".equals(operation)) {
        final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
        endpoint.getGridFs().remove(filename);
    } else if ("findOne".equals(operation)) {
        final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
        GridFSDBFile file = endpoint.getGridFs().findOne(filename);
        if (file != null) {
            exchange.getIn().setHeader(GridFsEndpoint.GRIDFS_METADATA, JSON.serialize(file.getMetaData()));
            exchange.getIn().setHeader(Exchange.FILE_CONTENT_TYPE, file.getContentType());
            exchange.getIn().setHeader(Exchange.FILE_LENGTH, file.getLength());
            exchange.getIn().setHeader(Exchange.FILE_LAST_MODIFIED, file.getUploadDate());
            exchange.getIn().setBody(file.getInputStream(), InputStream.class);
        } else {
            throw new FileNotFoundException("No GridFS file for " + filename);
        }
    } else if ("listAll".equals(operation)) {
        final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
        DBCursor cursor;
        if (filename == null) {
            cursor = endpoint.getGridFs().getFileList();
        } else {
            cursor = endpoint.getGridFs().getFileList(new BasicDBObject("filename", filename));
        }
        exchange.getIn().setBody(new DBCursorFilenameReader(cursor), Reader.class);
    } else if ("count".equals(operation)) {
        final String filename = exchange.getIn().getHeader(Exchange.FILE_NAME, String.class);
        DBCursor cursor;
        if (filename == null) {
            cursor = endpoint.getGridFs().getFileList();
        } else {
            cursor = endpoint.getGridFs().getFileList(new BasicDBObject("filename", filename));
        }
        exchange.getIn().setBody(cursor.count(), Integer.class);
    }

}

From source file:org.aw20.mongoworkbench.command.GridFSPutFileCommand.java

License:Open Source License

@Override
public void execute() throws Exception {
    MongoClient mdb = MongoFactory.getInst().getMongo(sName);

    if (mdb == null)
        throw new Exception("no server selected");

    if (sDb == null)
        throw new Exception("no database selected");

    MongoFactory.getInst().setActiveDB(sDb);
    DB db = mdb.getDB(sDb);//from  w  ww.j  a v a  2 s.  c o m

    GridFS gfs = new GridFS(db, sColl.substring(0, sColl.lastIndexOf(".")));

    GridFSInputFile gridFSInputFile = gfs.createFile(getFile);
    gridFSInputFile.setContentType(MimetypesFileTypeMap.getDefaultFileTypeMap().getContentType(getFile));
    gridFSInputFile.save();

    setMessage("fileLoaded=" + getFile + "; size=" + getFile.length());
}

From source file:org.bananaforscale.cormac.dao.gridfs.GridFsDataServiceImpl.java

License:Apache License

/**
 * Saves a file to the database by file name. This is used during a form upload. We use tika to
 * determine the content type.// w  w w .j  a va2 s . c  o  m
 *
 * TODO: Refactor this mess
 *
 * @param databaseName the name of the database
 * @param bucketName the name of the bucket
 * @param fileName the name of the file
 * @param overwrite whether to overwrite an existing file with the same name
 * @param stream the file byte stream
 * @return the Mongo ID of the file
 * @throws DatasourceException
 * @throws ExistsException
 * @throws NotFoundException
 */
@Override
public String addByForm(String databaseName, String bucketName, String fileName, boolean overwrite,
        InputStream stream) throws DatasourceException, ExistsException, NotFoundException {
    String fileId = null;
    try {
        if (!databaseExists(databaseName)) {
            throw new NotFoundException("The database doesn't exist in the datasource");
        }
        DB mongoDatabase = mongoClient.getDB(databaseName);
        GridFS gfsBucket = new GridFS(mongoDatabase, bucketName);
        GridFSDBFile gfsFile = gfsBucket.findOne(fileName);
        if (gfsFile == null) {
            // the file does not exist -- create
            GridFSInputFile dbFile = gfsBucket.createFile(stream, fileName);
            dbFile.setContentType(tika.detect(fileName));
            dbFile.save();
            fileId = dbFile.getId().toString();
        } else {
            // the file exists
            if (overwrite) {
                // overwrite the existing file
                gfsBucket.remove(gfsFile);
                GridFSInputFile inputFile = gfsBucket.createFile(stream, fileName);
                inputFile.setContentType(tika.detect(fileName));
                inputFile.save();
                fileId = inputFile.getId().toString();
            } else {
                throw new ExistsException("The file already exists in the bucket");
            }
        }
    } catch (MongoException ex) {
        logger.error("Could not persist entity to bucket", ex);
        throw new DatasourceException("Could not persist file to bucket");
    }
    if (fileId == null || fileId.isEmpty()) {
        throw new DatasourceException("Could not persist file to bucket");
    }
    return fileId;
}

From source file:org.bananaforscale.cormac.dao.gridfs.GridFsDataServiceImpl.java

License:Apache License

/**
 * Saves a document to the database by file name. If the document already exists this request
 * will be dropped and the existing file will not be overwritten.
 *
 * @param databaseName the database//  w w  w .  ja  v a2 s  .c  o m
 * @param bucketName the bucket
 * @param fileName the file name
 * @param inputStream the binary payload
 * @return the identifier of the file
 * @throws DatasourceException
 * @throws ExistsException
 * @throws NotFoundException
 */
@Override
public String addByFileName(String databaseName, String bucketName, String fileName, InputStream inputStream)
        throws DatasourceException, ExistsException, NotFoundException {
    try {
        if (!databaseExists(databaseName)) {
            throw new NotFoundException("The database doesn't exist in the datasource");
        }
        DB mongoDatabase = mongoClient.getDB(databaseName);
        GridFS gfsBucket = new GridFS(mongoDatabase, bucketName);
        if (gfsBucket.findOne(fileName) != null) {
            throw new ExistsException("The file already exists");
        }
        GridFSInputFile inputFile = gfsBucket.createFile(inputStream, fileName);
        inputFile.setContentType(tika.detect(fileName));
        inputFile.save();
        return inputFile.getId().toString();
    } catch (MongoException ex) {
        logger.error("An error occured while adding the file", ex);
        throw new DatasourceException("An error occured while adding the file");
    }
}