Example usage for com.mongodb.gridfs GridFSDBFile getInputStream

List of usage examples for com.mongodb.gridfs GridFSDBFile getInputStream

Introduction

In this page you can find the example usage for com.mongodb.gridfs GridFSDBFile getInputStream.

Prototype

public InputStream getInputStream() 

Source Link

Document

Returns an InputStream from which data can be read.

Usage

From source file:org.craftercms.studio.impl.repository.mongodb.services.impl.GridFSServiceImpl.java

License:Open Source License

@Override
public InputStream getFile(final String fileId) throws MongoRepositoryException {
    if (StringUtils.isBlank(fileId)) {
        log.error("Given Id for getting a fileInputStream is null, empty or blank, check if node is a Folder");
        throw new IllegalArgumentException("File id is either null,empty or blank");
    }//from  www .j av  a 2s  .com
    log.debug("Getting fileInputStream with Id {}", fileId);
    try {
        GridFSDBFile foundFile = gridFs.findOne(new ObjectId(fileId));
        if (foundFile == null) {
            log.debug("Unable to find a fileInputStream with id {}", fileId);
            log.debug("If this Id was obtain using a node, this node may be broken");
            return null;
        } else {
            log.debug("Found fileInputStream with id {}, named {} and md5 is {}", fileId,
                    foundFile.getFilename(), foundFile.getMD5());
            return foundFile.getInputStream();
        }
    } catch (MongoException ex) {
        log.error("Unable to get File with id {} due a MongoException {} ", fileId, ex.getMessage());
        log.error("DataAccessException is ", ex);
        throw new MongoRepositoryException(ex);
    }
}

From source file:org.elasticsearch.river.mongodb.util.GridFSHelper.java

License:Apache License

public static XContentBuilder serialize(GridFSDBFile file) throws IOException {

    XContentBuilder builder = XContentFactory.jsonBuilder();

    ByteArrayOutputStream buffer = new ByteArrayOutputStream();

    int nRead;/*from   w  w  w. j  av a  2  s.co m*/
    byte[] data = new byte[1024];

    InputStream stream = file.getInputStream();
    while ((nRead = stream.read(data, 0, data.length)) != -1) {
        buffer.write(data, 0, nRead);
    }

    buffer.flush();
    stream.close();

    String encodedContent = Base64.encodeBytes(buffer.toByteArray());

    // Probably not necessary...
    buffer.close();

    builder.startObject();
    builder.startObject("content");
    builder.field("content_type", file.getContentType());
    builder.field("title", file.getFilename());
    builder.field("content", encodedContent);
    builder.endObject();
    builder.field("filename", file.getFilename());
    builder.field("contentType", file.getContentType());
    builder.field("md5", file.getMD5());
    builder.field("length", file.getLength());
    builder.field("chunkSize", file.getChunkSize());
    builder.field("uploadDate", file.getUploadDate());
    builder.startObject("metadata");
    DBObject metadata = file.getMetaData();
    if (metadata != null) {
        for (String key : metadata.keySet()) {
            builder.field(key, metadata.get(key));
        }
    }
    builder.endObject();
    builder.endObject();

    return builder;
}

From source file:org.elasticsearch.river.mongodb.util.MongoDBHelper.java

License:Apache License

public static XContentBuilder serialize(GridFSDBFile file) throws IOException {

    XContentBuilder builder = XContentFactory.jsonBuilder();

    ByteArrayOutputStream buffer = new ByteArrayOutputStream();

    int nRead;/*from w w  w.j  a  v  a2s  .  c  o m*/
    byte[] data = new byte[1024];

    try (InputStream stream = file.getInputStream()) {
        while ((nRead = stream.read(data, 0, data.length)) != -1) {
            buffer.write(data, 0, nRead);
        }

        buffer.flush();
    }

    String encodedContent = Base64.encodeBytes(buffer.toByteArray());

    // Probably not necessary...
    buffer.close();

    builder.startObject();
    builder.startObject("content");
    builder.field("_content_type", file.getContentType());
    builder.field("_title", file.getFilename());
    builder.field("_content", encodedContent);
    builder.endObject();
    builder.field("filename", file.getFilename());
    builder.field("contentType", file.getContentType());
    builder.field("md5", file.getMD5());
    builder.field("length", file.getLength());
    builder.field("chunkSize", file.getChunkSize());
    builder.field("uploadDate", file.getUploadDate());
    builder.startObject("metadata");
    DBObject metadata = file.getMetaData();
    if (metadata != null) {
        for (String key : metadata.keySet()) {
            builder.field(key, metadata.get(key));
        }
    }
    builder.endObject();
    builder.endObject();

    return builder;
}

From source file:org.exist.mongodb.xquery.gridfs.Get.java

License:Open Source License

/**
 *  Get document from GridFS/*from   w ww.  j a v a  2 s .  c om*/
 */
Sequence get(GridFSDBFile gfsFile, boolean forceBinary) throws IOException, XPathException {

    // Obtain meta-data
    DBObject metadata = gfsFile.getMetaData();

    // Decompress when needed
    String compression = (metadata == null) ? null : (String) metadata.get(EXIST_COMPRESSION);
    boolean isGzipped = StringUtils.equals(compression, Constants.GZIP);
    InputStream is = isGzipped ? new GZIPInputStream(gfsFile.getInputStream()) : gfsFile.getInputStream();

    // Find what kind of data is stored
    int datatype = (metadata == null) ? Type.UNTYPED : (int) metadata.get(EXIST_DATATYPE);
    boolean hasXMLContentType = StringUtils.contains(gfsFile.getContentType(), "xml");
    boolean isXMLtype = (Type.DOCUMENT == datatype || Type.ELEMENT == datatype || hasXMLContentType);

    // Convert input stream to eXist-db object
    Sequence retVal;
    if (forceBinary || !isXMLtype) {
        retVal = Base64BinaryDocument.getInstance(context, is);

    } else {
        retVal = processXML(context, is);
    }
    return retVal;
}

From source file:org.exist.mongodb.xquery.gridfs.Stream.java

License:Open Source License

/**
 * Stream document to HTTP agent/*  ww w.  j a  v a2  s  . c  o  m*/
 */
void stream(GridFSDBFile gfsFile, String documentId, Boolean setDisposition)
        throws IOException, XPathException {
    if (gfsFile == null) {
        throw new XPathException(this, GridfsModule.GRFS0004,
                String.format("Document '%s' could not be found.", documentId));
    }

    DBObject metadata = gfsFile.getMetaData();

    // Determine actual size
    String compression = (metadata == null) ? null : (String) metadata.get(EXIST_COMPRESSION);
    Long originalSize = (metadata == null) ? null : (Long) metadata.get(EXIST_ORIGINAL_SIZE);

    long length = gfsFile.getLength();
    if (originalSize != null) {
        length = originalSize;
    }

    // Stream response stream
    ResponseWrapper rw = getResponseWrapper(context);

    // Set HTTP Headers
    rw.addHeader(Constants.CONTENT_LENGTH, String.format("%s", length));

    // Set filename when required
    String filename = determineFilename(documentId, gfsFile);
    if (setDisposition && StringUtils.isNotBlank(filename)) {
        rw.addHeader(Constants.CONTENT_DISPOSITION, String.format("attachment;filename=%s", filename));
    }

    String contentType = getMimeType(gfsFile.getContentType(), filename);
    if (contentType != null) {
        rw.setContentType(contentType);
    }

    boolean isGzipSupported = isGzipEncodingSupported(context);

    // Stream data
    if ((StringUtils.isBlank(compression))) {
        // Write data as-is, no marker available that data is stored compressed
        try (OutputStream os = rw.getOutputStream()) {
            gfsFile.writeTo(os);
            os.flush();
        }

    } else {

        if (isGzipSupported && StringUtils.contains(compression, GZIP)) {
            // Write compressend data as-is, since data is stored as gzipped data and
            // the agent suports it.
            rw.addHeader(Constants.CONTENT_ENCODING, GZIP);
            try (OutputStream os = rw.getOutputStream()) {
                gfsFile.writeTo(os);
                os.flush();
            }

        } else {
            // Write data uncompressed
            try (OutputStream os = rw.getOutputStream()) {
                InputStream is = gfsFile.getInputStream();
                try (GZIPInputStream gzis = new GZIPInputStream(is)) {
                    IOUtils.copyLarge(gzis, os);
                    os.flush();
                }
            }
        }
    }
}

From source file:org.obiba.mica.file.impl.GridFsService.java

License:Open Source License

@Override
public InputStream getFile(String id) throws FileRuntimeException {
    GridFSDBFile f = gridFsOperations.findOne(new Query().addCriteria(Criteria.where("filename").is(id)));

    if (f == null)
        throw new FileRuntimeException(id);

    return f.getInputStream();
}

From source file:org.openmhealth.dsu.controller.DataPointController.java

License:Apache License

/**
 * Reads media data of a data point.//w w w.j ava 2 s .c o  m
 *
 * @param id the identifier of the data point
 * @param mId the desired media id
 * @param authentication  user authentication
 * @return a matching data point, if found
 */
// only allow clients with read scope to read a data point
@PreAuthorize("#oauth2.clientHasRole('" + CLIENT_ROLE + "') and #oauth2.hasScope('" + DATA_POINT_READ_SCOPE
        + "')")
@RequestMapping(value = "/dataPoints/{id}/media/{mId}", method = { HEAD, GET }, produces = {
        MediaType.APPLICATION_OCTET_STREAM_VALUE, MediaType.ALL_VALUE })
public ResponseEntity<InputStreamResource> readDataPointMedia(@PathVariable String id, @PathVariable String mId,
        Authentication authentication) {
    Query query = new Query();

    query.addCriteria(where("metadata.data_point_id").is(id));
    query.addCriteria(where("metadata.user_id").is(getEndUserId(authentication)));
    query.addCriteria(where("metadata.media_id").is(mId));

    GridFSDBFile gridFsFile = gridFsOperations.findOne(query);

    if (gridFsFile != null) {
        HttpHeaders respHeaders = new HttpHeaders();
        respHeaders.setContentType(MediaType.parseMediaType(gridFsFile.getContentType()));
        respHeaders.setContentLength(gridFsFile.getLength());
        respHeaders.setContentDispositionFormData("attachment", mId);
        InputStreamResource inputStreamResource = new InputStreamResource(gridFsFile.getInputStream());
        return new ResponseEntity<>(inputStreamResource, respHeaders, OK);
    }
    return new ResponseEntity<>(NOT_FOUND);

}

From source file:org.openspotlight.storage.mongodb.MongoStorageSessionImpl.java

License:Open Source License

public byte[] readAsGridFS(final Partition partition, final Property property) throws Exception {
    final String key = getFileName(partition, property);
    final GridFS fs = getCachedGridFSForPartition(partition);
    final GridFSDBFile file = fs.findOne(key);
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    IOUtils.copy(file.getInputStream(), baos);
    return baos.toByteArray();
}

From source file:org.opentestsystem.authoring.testitembank.service.impl.ApipZipOutputFileBuilderService.java

License:Open Source License

private static final void unzipToLocalDirectory(final String parentDir, final GridFSDBFile zipFile)
        throws IOException {
    ZipInputStream zipInputStream = null;
    try {/*from  w  ww .  j  a  v a2s  . co  m*/
        zipInputStream = new ZipInputStream(zipFile.getInputStream());

        ZipEntry nextEntry = null;
        while ((nextEntry = zipInputStream.getNextEntry()) != null) {
            if (!nextEntry.isDirectory()) {
                // create local file
                final File f = new File(parentDir + "/" + nextEntry.getName());
                f.getParentFile().mkdirs();
                f.createNewFile();

                int len = 0;
                final byte[] buff = new byte[1024];

                // write entry to file
                FileOutputStream fos = null;
                try {
                    fos = new FileOutputStream(f);
                    while ((len = zipInputStream.read(buff)) > 0) {
                        fos.write(buff, 0, len);
                    }
                } catch (final FileNotFoundException e) {
                    LOGGER.error("unexcepted FileNotFoundException: ", e);
                } finally {
                    closeAndFlushQuietly(fos);
                    closeEntryQuiety(zipInputStream);
                }
            }
        }
    } finally {
        IOUtils.closeQuietly(zipInputStream);
    }
}

From source file:org.opentestsystem.delivery.testreg.rest.FileUploadDataController.java

License:Open Source License

/**
 * Validates a file given its format and gridFsId.
 *
 * @param gridFsId//from w ww . j  ava 2s  .  c  o m
 *        A {@link ModelAttribute} whose value is bound from the request mapping variables.
 * @param result
 *        An interface for binding results of all forms of validation
 * @param response
 *        HttpServletResponse for sending HTTP-specific responses
 * @return Returns {@link FileValidationResult}
 * @throws Exception
 */
@RequestMapping(value = "/validateFile/{gridFsId}", method = RequestMethod.GET, produces = {
        MediaType.APPLICATION_JSON_VALUE })
@Secured({ "ROLE_Accommodations Upload", "ROLE_Student Upload", "ROLE_Entity Upload",
        "ROLE_StudentGroup Upload", "ROLE_User Upload", "ROLE_ExplicitEligibility Upload" })
@ResponseBody
public List<FileValidationResult> validate(@ModelAttribute("gridFsId") final String gridFsId,
        final BindingResult result, final HttpServletResponse response) throws Exception {

    final long start = System.currentTimeMillis();
    final GridFSDBFile file = getGridFSDBFile(gridFsId);
    this.metricClient.sendPerformanceMetricToMna(buildMetricMessage(gridFsId, "validateFile->getGridFSDBFile"),
            System.currentTimeMillis() - start);

    long startMarker = System.currentTimeMillis();

    final List<FileValidationResult> returnList = Lists.newArrayList();

    // fileType cannot be null
    final FileType fileType = FileType.findByFilename(file.getFilename());
    // throws IllegalArgumentException when no enum is found from file-extension
    final UploadFileParser<Map<String, List<DataRecord>>> fileParser = this.fileParserMap.get(fileType);
    ParserResult<Map<String, List<DataRecord>>> parsedResult = null;
    try {
        parsedResult = fileParser.parse(file.getInputStream(), retrieveFormatTypeFromFileMetadata(file));
    } catch (final LocalizedException loc) {
        final FileValidationResult validationResult = new FileValidationResult();
        validationResult.addError(new ValidationMessage("Invalid File type: " + loc.getLocalizedMessage(),
                ValidationMessageType.FATAL_ERROR));
        returnList.add(validationResult);
        return returnList;
    }
    this.metricClient.sendPerformanceMetricToMna(buildMetricMessage(gridFsId, "validateFile->file parse"),
            System.currentTimeMillis() - startMarker);

    if (parsedResult.isEmpty()) {
        final FileValidationResult validationResult = new FileValidationResult();
        validationResult
                .addError(new ValidationMessage("Invalid File type", ValidationMessageType.FATAL_ERROR));
        returnList.add(validationResult);
        return returnList;
    }

    startMarker = System.currentTimeMillis();
    ValidationUtils.invokeValidator(this.fileUploadValidator, parsedResult.getParsedObject(), result);
    this.metricClient.sendPerformanceMetricToMna(buildMetricMessage(gridFsId, "validateFile->validation"),
            System.currentTimeMillis() - startMarker);

    if (result.hasErrors()) {
        Integer errorCountThresh = null;
        try {
            errorCountThresh = Integer.parseInt(this.errorCountThreshold);
        } catch (final NumberFormatException e) {
            errorCountThresh = DEFAULT_ERROR_THRESHOLD;
        }
        returnList.addAll(ValidationHelper.transform(result, errorCountThresh));
    }
    this.metricClient.sendPerformanceMetricToMna(buildMetricMessage(gridFsId, "validateFile->total time"),
            System.currentTimeMillis() - start);
    return returnList;
}