List of usage examples for javax.imageio ImageIO getUseCache
public static boolean getUseCache()
From source file:fr.gael.dhus.datastore.processing.impl.ProcessProductImages.java
@Override public void run(final Product product) { if (ImageIO.getUseCache()) ImageIO.setUseCache(false); DrbNode node = null;//from w ww.j av a2 s . c o m URL url = product.getPath(); // Prepare the DRb node to be processed try { // First : force loading the model before accessing items. @SuppressWarnings("unused") DrbCortexModel model = DrbCortexModel.getDefaultModel(); node = ProcessingUtils.getNodeFromPath(url.getPath()); if (node == null) { throw new IOException("Cannot Instantiate Drb with URI \"" + url.toExternalForm() + "\"."); } } catch (Exception e) { logger.error("Exception raised while processing Quicklook", e); return; } if (!ImageFactory.isImage(node)) { logger.debug("No Image."); return; } RenderedImageList input_list = null; RenderedImage input_image = null; try { input_list = ImageFactory.createImage(node); input_image = RenderingFactory.createDefaultRendering(input_list); } catch (Exception e) { logger.debug("Cannot retrieve default rendering"); if (logger.isDebugEnabled()) { logger.debug("Error occurs during rendered image reader", e); } if (input_list == null) return; input_image = input_list; } int quicklook_width = cfgManager.getProductConfiguration().getQuicklookConfiguration().getWidth(); int quicklook_height = cfgManager.getProductConfiguration().getQuicklookConfiguration().getHeight(); boolean quicklook_cutting = cfgManager.getProductConfiguration().getQuicklookConfiguration().isCutting(); logger.info("Generating Quicklook " + quicklook_width + "x" + quicklook_height + " from " + input_image.getWidth() + "x" + input_image.getHeight()); RenderedImage image = ProcessingUtils.ResizeImage(input_image, quicklook_width, quicklook_height, 10f, quicklook_cutting); String product_id = product.getIdentifier(); if (product_id == null) product_id = "unknown"; // Manages the quicklook output File image_directory = incomingManager.getNewIncomingPath(); LockFactory lf = new NativeFSLockFactory(image_directory); Lock lock = lf.makeLock(".lock-writing"); try { lock.obtain(900000); } catch (Exception e) { logger.warn("Cannot lock incoming directory - continuing without (" + e.getMessage() + ")"); } File file = new File(image_directory, product_id + "-ql.jpg"); try { ImageIO.write(image, "jpg", file); product.setQuicklookPath(file.getPath()); product.setQuicklookSize(file.length()); } catch (IOException e) { logger.error("Cannot save quicklook.", e); } // Thumbnail int thumbnail_width = cfgManager.getProductConfiguration().getThumbnailConfiguration().getWidth(); int thumbnail_height = cfgManager.getProductConfiguration().getThumbnailConfiguration().getHeight(); boolean thumbnail_cutting = cfgManager.getProductConfiguration().getThumbnailConfiguration().isCutting(); logger.info("Generating Thumbnail " + thumbnail_width + "x" + thumbnail_height + " from " + input_image.getWidth() + "x" + input_image.getHeight() + " image."); image = ProcessingUtils.ResizeImage(input_image, thumbnail_width, thumbnail_height, 10f, thumbnail_cutting); // Manages the quicklook output file = new File(image_directory, product_id + "-th.jpg"); try { ImageIO.write(image, "jpg", file); product.setThumbnailPath(file.getPath()); product.setThumbnailSize(file.length()); } catch (IOException e) { logger.error("Cannot save thumbnail.", e); } SdiImageFactory.close(input_list); try { lock.close(); } catch (IOException e) { } }
From source file:fr.gael.dhus.datastore.processing.ProcessingManager.java
/** * Loads product images from Drb node and stores information inside the * product before returning it//from w w w .ja va 2 s . co m */ private Product extractImages(DrbNode productNode, Product product) { if (ImageIO.getUseCache()) ImageIO.setUseCache(false); if (!ImageFactory.isImage(productNode)) { LOGGER.debug("No Image."); return product; } RenderedImageList input_list = null; RenderedImage input_image = null; try { input_list = ImageFactory.createImage(productNode); input_image = RenderingFactory.createDefaultRendering(input_list); } catch (Exception e) { LOGGER.debug("Cannot retrieve default rendering"); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Error occurs during rendered image reader", e); } if (input_list == null) { return product; } input_image = input_list; } if (input_image == null) { return product; } // Generate Quicklook int quicklook_width = cfgManager.getProductConfiguration().getQuicklookConfiguration().getWidth(); int quicklook_height = cfgManager.getProductConfiguration().getQuicklookConfiguration().getHeight(); boolean quicklook_cutting = cfgManager.getProductConfiguration().getQuicklookConfiguration().isCutting(); LOGGER.info("Generating Quicklook " + quicklook_width + "x" + quicklook_height + " from " + input_image.getWidth() + "x" + input_image.getHeight()); RenderedImage image = ProcessingUtils.resizeImage(input_image, quicklook_width, quicklook_height, 10f, quicklook_cutting); // Manages the quicklook output File image_directory = incomingManager.getNewIncomingPath(); AsyncFileLock afl = null; try { Path path = Paths.get(image_directory.getAbsolutePath(), ".lock-writing"); afl = new AsyncFileLock(path); afl.obtain(900000); } catch (IOException | InterruptedException | TimeoutException e) { LOGGER.warn("Cannot lock incoming directory - continuing without (" + e.getMessage() + ")"); } String identifier = product.getIdentifier(); File file = new File(image_directory, identifier + "-ql.jpg"); try { if (ImageIO.write(image, "jpg", file)) { product.setQuicklookPath(file.getPath()); product.setQuicklookSize(file.length()); } } catch (IOException e) { LOGGER.error("Cannot save quicklook.", e); } // Generate Thumbnail int thumbnail_width = cfgManager.getProductConfiguration().getThumbnailConfiguration().getWidth(); int thumbnail_height = cfgManager.getProductConfiguration().getThumbnailConfiguration().getHeight(); boolean thumbnail_cutting = cfgManager.getProductConfiguration().getThumbnailConfiguration().isCutting(); LOGGER.info("Generating Thumbnail " + thumbnail_width + "x" + thumbnail_height + " from " + input_image.getWidth() + "x" + input_image.getHeight() + " image."); image = ProcessingUtils.resizeImage(input_image, thumbnail_width, thumbnail_height, 10f, thumbnail_cutting); // Manages the thumbnail output file = new File(image_directory, identifier + "-th.jpg"); try { if (ImageIO.write(image, "jpg", file)) { product.setThumbnailPath(file.getPath()); product.setThumbnailSize(file.length()); } } catch (IOException e) { LOGGER.error("Cannot save thumbnail.", e); } SdiImageFactory.close(input_list); if (afl != null) { afl.close(); } return product; }
From source file:org.geotools.gce.imagemosaic.GranuleDescriptor.java
private void init(final BoundingBox granuleBBOX, final URL granuleUrl, final ImageReaderSpi suggestedSPI, final MultiLevelROI roiProvider, final boolean heterogeneousGranules, final boolean handleArtifactsFiltering, final Hints hints) { this.granuleBBOX = ReferencedEnvelope.reference(granuleBBOX); this.granuleUrl = granuleUrl; this.roiProvider = roiProvider; this.handleArtifactsFiltering = handleArtifactsFiltering; filterMe = handleArtifactsFiltering && roiProvider != null; // create the base grid to world transformation ImageInputStream inStream = null; ImageReader reader = null;/*from w ww. ja v a2 s . c o m*/ try { // //get info about the raster we have to read // // get a stream if (cachedStreamSPI == null) { cachedStreamSPI = ImageIOExt.getImageInputStreamSPI(granuleUrl, true); if (cachedStreamSPI == null) { final File file = DataUtilities.urlToFile(granuleUrl); if (file != null) { if (LOGGER.isLoggable(Level.WARNING)) { LOGGER.log(Level.WARNING, Utils.getFileInfo(file)); } } throw new IllegalArgumentException( "Unable to get an input stream for the provided granule " + granuleUrl.toString()); } } assert cachedStreamSPI != null : "no cachedStreamSPI available!"; inStream = cachedStreamSPI.createInputStreamInstance(granuleUrl, ImageIO.getUseCache(), ImageIO.getCacheDirectory()); if (inStream == null) { final File file = DataUtilities.urlToFile(granuleUrl); if (file != null) { if (LOGGER.isLoggable(Level.WARNING)) { LOGGER.log(Level.WARNING, Utils.getFileInfo(file)); } } throw new IllegalArgumentException( "Unable to get an input stream for the provided file " + granuleUrl.toString()); } // get a reader and try to cache the suggested SPI first if (cachedReaderSPI == null) { inStream.mark(); if (suggestedSPI != null && suggestedSPI.canDecodeInput(inStream)) { cachedReaderSPI = suggestedSPI; inStream.reset(); } else { inStream.mark(); reader = ImageIOExt.getImageioReader(inStream); if (reader != null) cachedReaderSPI = reader.getOriginatingProvider(); inStream.reset(); } } if (reader == null) { if (cachedReaderSPI == null) { throw new IllegalArgumentException( "Unable to get a ReaderSPI for the provided input: " + granuleUrl.toString()); } reader = cachedReaderSPI.createReaderInstance(); } if (reader == null) throw new IllegalArgumentException( "Unable to get an ImageReader for the provided file " + granuleUrl.toString()); boolean ignoreMetadata = customizeReaderInitialization(reader, hints); reader.setInput(inStream, false, ignoreMetadata); //get selected level and base level dimensions final Rectangle originalDimension = Utils.getDimension(0, reader); // build the g2W for this tile, in principle we should get it // somehow from the tile itself or from the index, but at the moment // we do not have such info, hence we assume that it is a simple // scale and translate this.geMapper = new GridToEnvelopeMapper(new GridEnvelope2D(originalDimension), granuleBBOX); geMapper.setPixelAnchor(PixelInCell.CELL_CENTER);//this is the default behavior but it is nice to write it down anyway this.baseGridToWorld = geMapper.createAffineTransform(); // add the base level this.granuleLevels.put(Integer.valueOf(0), new GranuleOverviewLevelDescriptor(1, 1, originalDimension.width, originalDimension.height)); ////////////////////// Setting overviewController /////////////////////// if (heterogeneousGranules) { // // // // Right now we are setting up overviewsController by assuming that // overviews are internal images as happens in TIFF images // We can improve this by leveraging on coverageReaders // // // // Getting the first level descriptor final GranuleOverviewLevelDescriptor baseOverviewLevelDescriptor = granuleLevels.get(0); // Variables initialization final int numberOfOvervies = reader.getNumImages(true) - 1; final AffineTransform2D baseG2W = baseOverviewLevelDescriptor.getGridToWorldTransform(); final int width = baseOverviewLevelDescriptor.getWidth(); final int height = baseOverviewLevelDescriptor.getHeight(); final double resX = AffineTransform2D.getScaleX0(baseG2W); final double resY = AffineTransform2D.getScaleY0(baseG2W); final double[] highestRes = new double[] { resX, resY }; final double[][] overviewsResolution = new double[numberOfOvervies][2]; // Populating overviews and initializing overviewsController for (int i = 0; i < numberOfOvervies; i++) { overviewsResolution[i][0] = (highestRes[0] * width) / reader.getWidth(i + 1); overviewsResolution[i][1] = (highestRes[1] * height) / reader.getHeight(i + 1); } overviewsController = new OverviewsController(highestRes, numberOfOvervies, overviewsResolution); } ////////////////////////////////////////////////////////////////////////// if (hints != null && hints.containsKey(Utils.CHECK_AUXILIARY_METADATA)) { boolean checkAuxiliaryMetadata = (Boolean) hints.get(Utils.CHECK_AUXILIARY_METADATA); if (checkAuxiliaryMetadata) { checkPamDataset(); } } } catch (IllegalStateException e) { throw new IllegalArgumentException(e); } catch (IOException e) { throw new IllegalArgumentException(e); } finally { // close/dispose stream and readers try { if (inStream != null) { inStream.close(); } } catch (Throwable e) { throw new IllegalArgumentException(e); } finally { if (reader != null) { reader.dispose(); } } } }
From source file:org.geotools.gce.imagemosaic.GranuleDescriptor.java
/** * Load a specified a raster as a portion of the granule describe by this {@link GranuleDescriptor}. * * @param imageReadParameters the {@link ImageReadParam} to use for reading. * @param index the index to use for the {@link ImageReader}. * @param cropBBox the bbox to use for cropping. * @param mosaicWorldToGrid the cropping grid to world transform. * @param request the incoming request to satisfy. * @param hints {@link Hints} to be used for creating this raster. * @return a specified a raster as a portion of the granule describe by this {@link GranuleDescriptor}. * @throws IOException in case an error occurs. *///w ww. j ava 2s . c om public GranuleLoadingResult loadRaster(final ImageReadParam imageReadParameters, final int index, final ReferencedEnvelope cropBBox, final MathTransform2D mosaicWorldToGrid, final RasterLayerRequest request, final Hints hints) throws IOException { if (LOGGER.isLoggable(java.util.logging.Level.FINER)) { final String name = Thread.currentThread().getName(); LOGGER.finer("Thread:" + name + " Loading raster data for granuleDescriptor " + this.toString()); } ImageReadParam readParameters = null; int imageIndex; final boolean useFootprint = roiProvider != null && request.getFootprintBehavior() != FootprintBehavior.None; Geometry inclusionGeometry = useFootprint ? roiProvider.getFootprint() : null; final ReferencedEnvelope bbox = useFootprint ? new ReferencedEnvelope(granuleBBOX.intersection(inclusionGeometry.getEnvelopeInternal()), granuleBBOX.getCoordinateReferenceSystem()) : granuleBBOX; boolean doFiltering = false; if (filterMe && useFootprint) { doFiltering = Utils.areaIsDifferent(inclusionGeometry, baseGridToWorld, granuleBBOX); } // intersection of this tile bound with the current crop bbox final ReferencedEnvelope intersection = new ReferencedEnvelope(bbox.intersection(cropBBox), cropBBox.getCoordinateReferenceSystem()); if (intersection.isEmpty()) { if (LOGGER.isLoggable(java.util.logging.Level.FINE)) { LOGGER.fine(new StringBuilder("Got empty intersection for granule ").append(this.toString()) .append(" with request ").append(request.toString()) .append(" Resulting in no granule loaded: Empty result").toString()); } return null; } // check if the requested bbox intersects or overlaps the requested area if (useFootprint && inclusionGeometry != null && !JTS.toGeometry(cropBBox).intersects(inclusionGeometry)) { if (LOGGER.isLoggable(java.util.logging.Level.FINE)) { LOGGER.fine(new StringBuilder("Got empty intersection for granule ").append(this.toString()) .append(" with request ").append(request.toString()) .append(" Resulting in no granule loaded: Empty result").toString()); } return null; } ImageInputStream inStream = null; ImageReader reader = null; try { // //get info about the raster we have to read // // get a stream assert cachedStreamSPI != null : "no cachedStreamSPI available!"; inStream = cachedStreamSPI.createInputStreamInstance(granuleUrl, ImageIO.getUseCache(), ImageIO.getCacheDirectory()); if (inStream == null) return null; // get a reader and try to cache the relevant SPI if (cachedReaderSPI == null) { reader = ImageIOExt.getImageioReader(inStream); if (reader != null) cachedReaderSPI = reader.getOriginatingProvider(); } else reader = cachedReaderSPI.createReaderInstance(); if (reader == null) { if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) { LOGGER.warning(new StringBuilder("Unable to get s reader for granuleDescriptor ") .append(this.toString()).append(" with request ").append(request.toString()) .append(" Resulting in no granule loaded: Empty result").toString()); } return null; } // set input customizeReaderInitialization(reader, hints); reader.setInput(inStream); // Checking for heterogeneous granules if (request.isHeterogeneousGranules()) { // create read parameters readParameters = new ImageReadParam(); //override the overviews controller for the base layer imageIndex = ReadParamsController.setReadParams( request.spatialRequestHelper.getRequestedResolution(), request.getOverviewPolicy(), request.getDecimationPolicy(), readParameters, request.rasterManager, overviewsController); } else { imageIndex = index; readParameters = imageReadParameters; } //get selected level and base level dimensions final GranuleOverviewLevelDescriptor selectedlevel = getLevel(imageIndex, reader); // now create the crop grid to world which can be used to decide // which source area we need to crop in the selected level taking // into account the scale factors imposed by the selection of this // level together with the base level grid to world transformation AffineTransform2D cropWorldToGrid = new AffineTransform2D(selectedlevel.gridToWorldTransformCorner); cropWorldToGrid = (AffineTransform2D) cropWorldToGrid.inverse(); // computing the crop source area which lives into the // selected level raster space, NOTICE that at the end we need to // take into account the fact that we might also decimate therefore // we cannot just use the crop grid to world but we need to correct // it. final Rectangle sourceArea = CRS.transform(cropWorldToGrid, intersection).toRectangle2D().getBounds(); //gutter if (selectedlevel.baseToLevelTransform.isIdentity()) { sourceArea.grow(2, 2); } XRectangle2D.intersect(sourceArea, selectedlevel.rasterDimensions, sourceArea);//make sure roundings don't bother us // is it empty?? if (sourceArea.isEmpty()) { if (LOGGER.isLoggable(java.util.logging.Level.FINE)) { LOGGER.fine("Got empty area for granuleDescriptor " + this.toString() + " with request " + request.toString() + " Resulting in no granule loaded: Empty result"); } return null; } else if (LOGGER.isLoggable(java.util.logging.Level.FINER)) { LOGGER.finer("Loading level " + imageIndex + " with source region: " + sourceArea + " subsampling: " + readParameters.getSourceXSubsampling() + "," + readParameters.getSourceYSubsampling() + " for granule:" + granuleUrl); } // Setting subsampling int newSubSamplingFactor = 0; final String pluginName = cachedReaderSPI.getPluginClassName(); if (pluginName != null && pluginName.equals(ImageUtilities.DIRECT_KAKADU_PLUGIN)) { final int ssx = readParameters.getSourceXSubsampling(); final int ssy = readParameters.getSourceYSubsampling(); newSubSamplingFactor = ImageIOUtilities.getSubSamplingFactor2(ssx, ssy); if (newSubSamplingFactor != 0) { if (newSubSamplingFactor > maxDecimationFactor && maxDecimationFactor != -1) { newSubSamplingFactor = maxDecimationFactor; } readParameters.setSourceSubsampling(newSubSamplingFactor, newSubSamplingFactor, 0, 0); } } // set the source region readParameters.setSourceRegion(sourceArea); RenderedImage raster; try { // read raster = request.getReadType().read(readParameters, imageIndex, granuleUrl, selectedlevel.rasterDimensions, reader, hints, false); } catch (Throwable e) { if (LOGGER.isLoggable(java.util.logging.Level.FINE)) { LOGGER.log(java.util.logging.Level.FINE, "Unable to load raster for granuleDescriptor " + this.toString() + " with request " + request.toString() + " Resulting in no granule loaded: Empty result", e); } return null; } // use fixed source area sourceArea.setRect(readParameters.getSourceRegion()); // // setting new coefficients to define a new affineTransformation // to be applied to the grid to world transformation // ----------------------------------------------------------------------------------- // // With respect to the original envelope, the obtained planarImage // needs to be rescaled. The scaling factors are computed as the // ratio between the cropped source region sizes and the read // image sizes. // // place it in the mosaic using the coords created above; double decimationScaleX = ((1.0 * sourceArea.width) / raster.getWidth()); double decimationScaleY = ((1.0 * sourceArea.height) / raster.getHeight()); final AffineTransform decimationScaleTranform = XAffineTransform.getScaleInstance(decimationScaleX, decimationScaleY); // keep into account translation to work into the selected level raster space final AffineTransform afterDecimationTranslateTranform = XAffineTransform .getTranslateInstance(sourceArea.x, sourceArea.y); // now we need to go back to the base level raster space final AffineTransform backToBaseLevelScaleTransform = selectedlevel.baseToLevelTransform; // now create the overall transform final AffineTransform finalRaster2Model = new AffineTransform(baseGridToWorld); finalRaster2Model.concatenate(CoverageUtilities.CENTER_TO_CORNER); if (!XAffineTransform.isIdentity(backToBaseLevelScaleTransform, Utils.AFFINE_IDENTITY_EPS)) finalRaster2Model.concatenate(backToBaseLevelScaleTransform); if (!XAffineTransform.isIdentity(afterDecimationTranslateTranform, Utils.AFFINE_IDENTITY_EPS)) finalRaster2Model.concatenate(afterDecimationTranslateTranform); if (!XAffineTransform.isIdentity(decimationScaleTranform, Utils.AFFINE_IDENTITY_EPS)) finalRaster2Model.concatenate(decimationScaleTranform); // adjust roi if (useFootprint) { ROIGeometry transformed; try { transformed = roiProvider.getTransformedROI(finalRaster2Model.createInverse()); if (transformed.getAsGeometry().isEmpty()) { // inset might have killed the geometry fully return null; } PlanarImage pi = PlanarImage.wrapRenderedImage(raster); if (!transformed.intersects(pi.getBounds())) { return null; } pi.setProperty("ROI", transformed); raster = pi; } catch (NoninvertibleTransformException e) { if (LOGGER.isLoggable(java.util.logging.Level.INFO)) LOGGER.info("Unable to create a granuleDescriptor " + this.toString() + " due to a problem when managing the ROI"); return null; } } // keep into account translation factors to place this tile finalRaster2Model.preConcatenate((AffineTransform) mosaicWorldToGrid); final Interpolation interpolation = request.getInterpolation(); //paranoiac check to avoid that JAI freaks out when computing its internal layouT on images that are too small Rectangle2D finalLayout = ImageUtilities.layoutHelper(raster, (float) finalRaster2Model.getScaleX(), (float) finalRaster2Model.getScaleY(), (float) finalRaster2Model.getTranslateX(), (float) finalRaster2Model.getTranslateY(), interpolation); if (finalLayout.isEmpty()) { if (LOGGER.isLoggable(java.util.logging.Level.INFO)) LOGGER.info("Unable to create a granuleDescriptor " + this.toString() + " due to jai scale bug creating a null source area"); return null; } // apply the affine transform conserving indexed color model final RenderingHints localHints = new RenderingHints(JAI.KEY_REPLACE_INDEX_COLOR_MODEL, interpolation instanceof InterpolationNearest ? Boolean.FALSE : Boolean.TRUE); if (XAffineTransform.isIdentity(finalRaster2Model, Utils.AFFINE_IDENTITY_EPS)) { return new GranuleLoadingResult(raster, null, granuleUrl, doFiltering, pamDataset); } else { // // In case we are asked to use certain tile dimensions we tile // also at this stage in case the read type is Direct since // buffered images comes up untiled and this can affect the // performances of the subsequent affine operation. // final Dimension tileDimensions = request.getTileDimensions(); if (tileDimensions != null && request.getReadType().equals(ReadType.DIRECT_READ)) { final ImageLayout layout = new ImageLayout(); layout.setTileHeight(tileDimensions.width).setTileWidth(tileDimensions.height); localHints.add(new RenderingHints(JAI.KEY_IMAGE_LAYOUT, layout)); } else { if (hints != null && hints.containsKey(JAI.KEY_IMAGE_LAYOUT)) { final Object layout = hints.get(JAI.KEY_IMAGE_LAYOUT); if (layout != null && layout instanceof ImageLayout) { localHints .add(new RenderingHints(JAI.KEY_IMAGE_LAYOUT, ((ImageLayout) layout).clone())); } } } if (hints != null && hints.containsKey(JAI.KEY_TILE_CACHE)) { final Object cache = hints.get(JAI.KEY_TILE_CACHE); if (cache != null && cache instanceof TileCache) localHints.add(new RenderingHints(JAI.KEY_TILE_CACHE, (TileCache) cache)); } if (hints != null && hints.containsKey(JAI.KEY_TILE_SCHEDULER)) { final Object scheduler = hints.get(JAI.KEY_TILE_SCHEDULER); if (scheduler != null && scheduler instanceof TileScheduler) localHints.add(new RenderingHints(JAI.KEY_TILE_SCHEDULER, (TileScheduler) scheduler)); } boolean addBorderExtender = true; if (hints != null && hints.containsKey(JAI.KEY_BORDER_EXTENDER)) { final Object extender = hints.get(JAI.KEY_BORDER_EXTENDER); if (extender != null && extender instanceof BorderExtender) { localHints.add(new RenderingHints(JAI.KEY_BORDER_EXTENDER, (BorderExtender) extender)); addBorderExtender = false; } } // BORDER extender if (addBorderExtender) { localHints.add(ImageUtilities.BORDER_EXTENDER_HINTS); } ImageWorker iw = new ImageWorker(raster); iw.setRenderingHints(localHints); iw.affine(finalRaster2Model, interpolation, request.getBackgroundValues()); return new GranuleLoadingResult(iw.getRenderedImage(), null, granuleUrl, doFiltering, pamDataset); } } catch (IllegalStateException e) { if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) { LOGGER.log(java.util.logging.Level.WARNING, new StringBuilder("Unable to load raster for granuleDescriptor ").append(this.toString()) .append(" with request ").append(request.toString()) .append(" Resulting in no granule loaded: Empty result").toString(), e); } return null; } catch (org.opengis.referencing.operation.NoninvertibleTransformException e) { if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) { LOGGER.log(java.util.logging.Level.WARNING, new StringBuilder("Unable to load raster for granuleDescriptor ").append(this.toString()) .append(" with request ").append(request.toString()) .append(" Resulting in no granule loaded: Empty result").toString(), e); } return null; } catch (TransformException e) { if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) { LOGGER.log(java.util.logging.Level.WARNING, new StringBuilder("Unable to load raster for granuleDescriptor ").append(this.toString()) .append(" with request ").append(request.toString()) .append(" Resulting in no granule loaded: Empty result").toString(), e); } return null; } finally { try { if (request.getReadType() != ReadType.JAI_IMAGEREAD && inStream != null) { inStream.close(); } } finally { if (request.getReadType() != ReadType.JAI_IMAGEREAD && reader != null) { reader.dispose(); } } } }
From source file:org.geotools.gce.imagemosaic.GranuleDescriptor.java
GranuleOverviewLevelDescriptor getLevel(final int index) { //load level// w w w . j a v a 2 s.co m // create the base grid to world transformation ImageInputStream inStream = null; ImageReader reader = null; try { // get a stream assert cachedStreamSPI != null : "no cachedStreamSPI available!"; inStream = cachedStreamSPI.createInputStreamInstance(granuleUrl, ImageIO.getUseCache(), ImageIO.getCacheDirectory()); if (inStream == null) throw new IllegalArgumentException("Unable to create an inputstream for the granuleurl:" + (granuleUrl != null ? granuleUrl : "null")); // get a reader and try to cache the relevant SPI if (cachedReaderSPI == null) { reader = ImageIOExt.getImageioReader(inStream); if (reader != null) cachedReaderSPI = reader.getOriginatingProvider(); } else reader = cachedReaderSPI.createReaderInstance(); if (reader == null) throw new IllegalArgumentException( "Unable to get an ImageReader for the provided file " + granuleUrl.toString()); final boolean ignoreMetadata = customizeReaderInitialization(reader, null); reader.setInput(inStream, false, ignoreMetadata); // call internal method which will close everything return getLevel(index, reader); } catch (IllegalStateException e) { // clean up try { if (inStream != null) inStream.close(); } catch (Throwable ee) { } finally { if (reader != null) reader.dispose(); } throw new IllegalArgumentException(e); } catch (IOException e) { // clean up try { if (inStream != null) inStream.close(); } catch (Throwable ee) { } finally { if (reader != null) reader.dispose(); } throw new IllegalArgumentException(e); } }