List of usage examples for javax.imageio ImageIO getCacheDirectory
public static File getCacheDirectory()
From source file:com.google.gwt.dev.shell.jetty.JettyLauncher.java
/** * This is a modified version of JreMemoryLeakPreventionListener.java found * in the Apache Tomcat project at// w w w . j a v a2s. c o m * * http://svn.apache.org/repos/asf/tomcat/trunk/java/org/apache/catalina/core/ * JreMemoryLeakPreventionListener.java * * Relevant part of the Tomcat NOTICE, retrieved from * http://svn.apache.org/repos/asf/tomcat/trunk/NOTICE Apache Tomcat Copyright * 1999-2010 The Apache Software Foundation * * This product includes software developed by The Apache Software Foundation * (http://www.apache.org/). */ private void jreLeakPrevention(TreeLogger logger) { // Trigger a call to sun.awt.AppContext.getAppContext(). This will // pin the common class loader in memory but that shouldn't be an // issue. ImageIO.getCacheDirectory(); /* * Several components end up calling: sun.misc.GC.requestLatency(long) * * Those libraries / components known to trigger memory leaks due to * eventual calls to requestLatency(long) are: - * javax.management.remote.rmi.RMIConnectorServer.start() */ try { Class<?> clazz = Class.forName("sun.misc.GC"); Method method = clazz.getDeclaredMethod("requestLatency", new Class[] { long.class }); method.invoke(null, Long.valueOf(3600000)); } catch (ClassNotFoundException e) { logger.log(TreeLogger.ERROR, "jreLeakPrevention.gcDaemonFail", e); } catch (SecurityException e) { logger.log(TreeLogger.ERROR, "jreLeakPrevention.gcDaemonFail", e); } catch (NoSuchMethodException e) { logger.log(TreeLogger.ERROR, "jreLeakPrevention.gcDaemonFail", e); } catch (IllegalArgumentException e) { logger.log(TreeLogger.ERROR, "jreLeakPrevention.gcDaemonFail", e); } catch (IllegalAccessException e) { logger.log(TreeLogger.ERROR, "jreLeakPrevention.gcDaemonFail", e); } catch (InvocationTargetException e) { logger.log(TreeLogger.ERROR, "jreLeakPrevention.gcDaemonFail", e); } /* * Calling getPolicy retains a static reference to the context class loader. */ try { // Policy.getPolicy(); Class<?> policyClass = Class.forName("javax.security.auth.Policy"); Method method = policyClass.getMethod("getPolicy"); method.invoke(null); } catch (ClassNotFoundException e) { // Ignore. The class is deprecated. } catch (SecurityException e) { // Ignore. Don't need call to getPolicy() to be successful, // just need to trigger static initializer. } catch (NoSuchMethodException e) { logger.log(TreeLogger.WARN, "jreLeakPrevention.authPolicyFail", e); } catch (IllegalArgumentException e) { logger.log(TreeLogger.WARN, "jreLeakPrevention.authPolicyFail", e); } catch (IllegalAccessException e) { logger.log(TreeLogger.WARN, "jreLeakPrevention.authPolicyFail", e); } catch (InvocationTargetException e) { logger.log(TreeLogger.WARN, "jreLeakPrevention.authPolicyFail", e); } /* * Creating a MessageDigest during web application startup initializes the * Java Cryptography Architecture. Under certain conditions this starts a * Token poller thread with TCCL equal to the web application class loader. * * Instead we initialize JCA right now. */ java.security.Security.getProviders(); /* * Several components end up opening JarURLConnections without first * disabling caching. This effectively locks the file. Whilst more * noticeable and harder to ignore on Windows, it affects all operating * systems. * * Those libraries/components known to trigger this issue include: - log4j * versions 1.2.15 and earlier - javax.xml.bind.JAXBContext.newInstance() */ // Set the default URL caching policy to not to cache try { // Doesn't matter that this JAR doesn't exist - just as long as // the URL is well-formed URL url = new URL("jar:file://dummy.jar!/"); URLConnection uConn = url.openConnection(); uConn.setDefaultUseCaches(false); } catch (MalformedURLException e) { logger.log(TreeLogger.ERROR, "jreLeakPrevention.jarUrlConnCacheFail", e); } catch (IOException e) { logger.log(TreeLogger.ERROR, "jreLeakPrevention.jarUrlConnCacheFail", e); } /* * Haven't got to the root of what is going on with this leak but if a web * app is the first to make the calls below the web application class loader * will be pinned in memory. */ DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); try { factory.newDocumentBuilder(); } catch (ParserConfigurationException e) { logger.log(TreeLogger.ERROR, "jreLeakPrevention.xmlParseFail", e); } }
From source file:org.geotools.gce.imagemosaic.GranuleDescriptor.java
private void init(final BoundingBox granuleBBOX, final URL granuleUrl, final ImageReaderSpi suggestedSPI, final MultiLevelROI roiProvider, final boolean heterogeneousGranules, final boolean handleArtifactsFiltering, final Hints hints) { this.granuleBBOX = ReferencedEnvelope.reference(granuleBBOX); this.granuleUrl = granuleUrl; this.roiProvider = roiProvider; this.handleArtifactsFiltering = handleArtifactsFiltering; filterMe = handleArtifactsFiltering && roiProvider != null; // create the base grid to world transformation ImageInputStream inStream = null; ImageReader reader = null;/*w w w .j a v a2s. co m*/ try { // //get info about the raster we have to read // // get a stream if (cachedStreamSPI == null) { cachedStreamSPI = ImageIOExt.getImageInputStreamSPI(granuleUrl, true); if (cachedStreamSPI == null) { final File file = DataUtilities.urlToFile(granuleUrl); if (file != null) { if (LOGGER.isLoggable(Level.WARNING)) { LOGGER.log(Level.WARNING, Utils.getFileInfo(file)); } } throw new IllegalArgumentException( "Unable to get an input stream for the provided granule " + granuleUrl.toString()); } } assert cachedStreamSPI != null : "no cachedStreamSPI available!"; inStream = cachedStreamSPI.createInputStreamInstance(granuleUrl, ImageIO.getUseCache(), ImageIO.getCacheDirectory()); if (inStream == null) { final File file = DataUtilities.urlToFile(granuleUrl); if (file != null) { if (LOGGER.isLoggable(Level.WARNING)) { LOGGER.log(Level.WARNING, Utils.getFileInfo(file)); } } throw new IllegalArgumentException( "Unable to get an input stream for the provided file " + granuleUrl.toString()); } // get a reader and try to cache the suggested SPI first if (cachedReaderSPI == null) { inStream.mark(); if (suggestedSPI != null && suggestedSPI.canDecodeInput(inStream)) { cachedReaderSPI = suggestedSPI; inStream.reset(); } else { inStream.mark(); reader = ImageIOExt.getImageioReader(inStream); if (reader != null) cachedReaderSPI = reader.getOriginatingProvider(); inStream.reset(); } } if (reader == null) { if (cachedReaderSPI == null) { throw new IllegalArgumentException( "Unable to get a ReaderSPI for the provided input: " + granuleUrl.toString()); } reader = cachedReaderSPI.createReaderInstance(); } if (reader == null) throw new IllegalArgumentException( "Unable to get an ImageReader for the provided file " + granuleUrl.toString()); boolean ignoreMetadata = customizeReaderInitialization(reader, hints); reader.setInput(inStream, false, ignoreMetadata); //get selected level and base level dimensions final Rectangle originalDimension = Utils.getDimension(0, reader); // build the g2W for this tile, in principle we should get it // somehow from the tile itself or from the index, but at the moment // we do not have such info, hence we assume that it is a simple // scale and translate this.geMapper = new GridToEnvelopeMapper(new GridEnvelope2D(originalDimension), granuleBBOX); geMapper.setPixelAnchor(PixelInCell.CELL_CENTER);//this is the default behavior but it is nice to write it down anyway this.baseGridToWorld = geMapper.createAffineTransform(); // add the base level this.granuleLevels.put(Integer.valueOf(0), new GranuleOverviewLevelDescriptor(1, 1, originalDimension.width, originalDimension.height)); ////////////////////// Setting overviewController /////////////////////// if (heterogeneousGranules) { // // // // Right now we are setting up overviewsController by assuming that // overviews are internal images as happens in TIFF images // We can improve this by leveraging on coverageReaders // // // // Getting the first level descriptor final GranuleOverviewLevelDescriptor baseOverviewLevelDescriptor = granuleLevels.get(0); // Variables initialization final int numberOfOvervies = reader.getNumImages(true) - 1; final AffineTransform2D baseG2W = baseOverviewLevelDescriptor.getGridToWorldTransform(); final int width = baseOverviewLevelDescriptor.getWidth(); final int height = baseOverviewLevelDescriptor.getHeight(); final double resX = AffineTransform2D.getScaleX0(baseG2W); final double resY = AffineTransform2D.getScaleY0(baseG2W); final double[] highestRes = new double[] { resX, resY }; final double[][] overviewsResolution = new double[numberOfOvervies][2]; // Populating overviews and initializing overviewsController for (int i = 0; i < numberOfOvervies; i++) { overviewsResolution[i][0] = (highestRes[0] * width) / reader.getWidth(i + 1); overviewsResolution[i][1] = (highestRes[1] * height) / reader.getHeight(i + 1); } overviewsController = new OverviewsController(highestRes, numberOfOvervies, overviewsResolution); } ////////////////////////////////////////////////////////////////////////// if (hints != null && hints.containsKey(Utils.CHECK_AUXILIARY_METADATA)) { boolean checkAuxiliaryMetadata = (Boolean) hints.get(Utils.CHECK_AUXILIARY_METADATA); if (checkAuxiliaryMetadata) { checkPamDataset(); } } } catch (IllegalStateException e) { throw new IllegalArgumentException(e); } catch (IOException e) { throw new IllegalArgumentException(e); } finally { // close/dispose stream and readers try { if (inStream != null) { inStream.close(); } } catch (Throwable e) { throw new IllegalArgumentException(e); } finally { if (reader != null) { reader.dispose(); } } } }
From source file:org.geotools.gce.imagemosaic.GranuleDescriptor.java
/** * Load a specified a raster as a portion of the granule describe by this {@link GranuleDescriptor}. * * @param imageReadParameters the {@link ImageReadParam} to use for reading. * @param index the index to use for the {@link ImageReader}. * @param cropBBox the bbox to use for cropping. * @param mosaicWorldToGrid the cropping grid to world transform. * @param request the incoming request to satisfy. * @param hints {@link Hints} to be used for creating this raster. * @return a specified a raster as a portion of the granule describe by this {@link GranuleDescriptor}. * @throws IOException in case an error occurs. *///from w w w . j a v a 2 s.co m public GranuleLoadingResult loadRaster(final ImageReadParam imageReadParameters, final int index, final ReferencedEnvelope cropBBox, final MathTransform2D mosaicWorldToGrid, final RasterLayerRequest request, final Hints hints) throws IOException { if (LOGGER.isLoggable(java.util.logging.Level.FINER)) { final String name = Thread.currentThread().getName(); LOGGER.finer("Thread:" + name + " Loading raster data for granuleDescriptor " + this.toString()); } ImageReadParam readParameters = null; int imageIndex; final boolean useFootprint = roiProvider != null && request.getFootprintBehavior() != FootprintBehavior.None; Geometry inclusionGeometry = useFootprint ? roiProvider.getFootprint() : null; final ReferencedEnvelope bbox = useFootprint ? new ReferencedEnvelope(granuleBBOX.intersection(inclusionGeometry.getEnvelopeInternal()), granuleBBOX.getCoordinateReferenceSystem()) : granuleBBOX; boolean doFiltering = false; if (filterMe && useFootprint) { doFiltering = Utils.areaIsDifferent(inclusionGeometry, baseGridToWorld, granuleBBOX); } // intersection of this tile bound with the current crop bbox final ReferencedEnvelope intersection = new ReferencedEnvelope(bbox.intersection(cropBBox), cropBBox.getCoordinateReferenceSystem()); if (intersection.isEmpty()) { if (LOGGER.isLoggable(java.util.logging.Level.FINE)) { LOGGER.fine(new StringBuilder("Got empty intersection for granule ").append(this.toString()) .append(" with request ").append(request.toString()) .append(" Resulting in no granule loaded: Empty result").toString()); } return null; } // check if the requested bbox intersects or overlaps the requested area if (useFootprint && inclusionGeometry != null && !JTS.toGeometry(cropBBox).intersects(inclusionGeometry)) { if (LOGGER.isLoggable(java.util.logging.Level.FINE)) { LOGGER.fine(new StringBuilder("Got empty intersection for granule ").append(this.toString()) .append(" with request ").append(request.toString()) .append(" Resulting in no granule loaded: Empty result").toString()); } return null; } ImageInputStream inStream = null; ImageReader reader = null; try { // //get info about the raster we have to read // // get a stream assert cachedStreamSPI != null : "no cachedStreamSPI available!"; inStream = cachedStreamSPI.createInputStreamInstance(granuleUrl, ImageIO.getUseCache(), ImageIO.getCacheDirectory()); if (inStream == null) return null; // get a reader and try to cache the relevant SPI if (cachedReaderSPI == null) { reader = ImageIOExt.getImageioReader(inStream); if (reader != null) cachedReaderSPI = reader.getOriginatingProvider(); } else reader = cachedReaderSPI.createReaderInstance(); if (reader == null) { if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) { LOGGER.warning(new StringBuilder("Unable to get s reader for granuleDescriptor ") .append(this.toString()).append(" with request ").append(request.toString()) .append(" Resulting in no granule loaded: Empty result").toString()); } return null; } // set input customizeReaderInitialization(reader, hints); reader.setInput(inStream); // Checking for heterogeneous granules if (request.isHeterogeneousGranules()) { // create read parameters readParameters = new ImageReadParam(); //override the overviews controller for the base layer imageIndex = ReadParamsController.setReadParams( request.spatialRequestHelper.getRequestedResolution(), request.getOverviewPolicy(), request.getDecimationPolicy(), readParameters, request.rasterManager, overviewsController); } else { imageIndex = index; readParameters = imageReadParameters; } //get selected level and base level dimensions final GranuleOverviewLevelDescriptor selectedlevel = getLevel(imageIndex, reader); // now create the crop grid to world which can be used to decide // which source area we need to crop in the selected level taking // into account the scale factors imposed by the selection of this // level together with the base level grid to world transformation AffineTransform2D cropWorldToGrid = new AffineTransform2D(selectedlevel.gridToWorldTransformCorner); cropWorldToGrid = (AffineTransform2D) cropWorldToGrid.inverse(); // computing the crop source area which lives into the // selected level raster space, NOTICE that at the end we need to // take into account the fact that we might also decimate therefore // we cannot just use the crop grid to world but we need to correct // it. final Rectangle sourceArea = CRS.transform(cropWorldToGrid, intersection).toRectangle2D().getBounds(); //gutter if (selectedlevel.baseToLevelTransform.isIdentity()) { sourceArea.grow(2, 2); } XRectangle2D.intersect(sourceArea, selectedlevel.rasterDimensions, sourceArea);//make sure roundings don't bother us // is it empty?? if (sourceArea.isEmpty()) { if (LOGGER.isLoggable(java.util.logging.Level.FINE)) { LOGGER.fine("Got empty area for granuleDescriptor " + this.toString() + " with request " + request.toString() + " Resulting in no granule loaded: Empty result"); } return null; } else if (LOGGER.isLoggable(java.util.logging.Level.FINER)) { LOGGER.finer("Loading level " + imageIndex + " with source region: " + sourceArea + " subsampling: " + readParameters.getSourceXSubsampling() + "," + readParameters.getSourceYSubsampling() + " for granule:" + granuleUrl); } // Setting subsampling int newSubSamplingFactor = 0; final String pluginName = cachedReaderSPI.getPluginClassName(); if (pluginName != null && pluginName.equals(ImageUtilities.DIRECT_KAKADU_PLUGIN)) { final int ssx = readParameters.getSourceXSubsampling(); final int ssy = readParameters.getSourceYSubsampling(); newSubSamplingFactor = ImageIOUtilities.getSubSamplingFactor2(ssx, ssy); if (newSubSamplingFactor != 0) { if (newSubSamplingFactor > maxDecimationFactor && maxDecimationFactor != -1) { newSubSamplingFactor = maxDecimationFactor; } readParameters.setSourceSubsampling(newSubSamplingFactor, newSubSamplingFactor, 0, 0); } } // set the source region readParameters.setSourceRegion(sourceArea); RenderedImage raster; try { // read raster = request.getReadType().read(readParameters, imageIndex, granuleUrl, selectedlevel.rasterDimensions, reader, hints, false); } catch (Throwable e) { if (LOGGER.isLoggable(java.util.logging.Level.FINE)) { LOGGER.log(java.util.logging.Level.FINE, "Unable to load raster for granuleDescriptor " + this.toString() + " with request " + request.toString() + " Resulting in no granule loaded: Empty result", e); } return null; } // use fixed source area sourceArea.setRect(readParameters.getSourceRegion()); // // setting new coefficients to define a new affineTransformation // to be applied to the grid to world transformation // ----------------------------------------------------------------------------------- // // With respect to the original envelope, the obtained planarImage // needs to be rescaled. The scaling factors are computed as the // ratio between the cropped source region sizes and the read // image sizes. // // place it in the mosaic using the coords created above; double decimationScaleX = ((1.0 * sourceArea.width) / raster.getWidth()); double decimationScaleY = ((1.0 * sourceArea.height) / raster.getHeight()); final AffineTransform decimationScaleTranform = XAffineTransform.getScaleInstance(decimationScaleX, decimationScaleY); // keep into account translation to work into the selected level raster space final AffineTransform afterDecimationTranslateTranform = XAffineTransform .getTranslateInstance(sourceArea.x, sourceArea.y); // now we need to go back to the base level raster space final AffineTransform backToBaseLevelScaleTransform = selectedlevel.baseToLevelTransform; // now create the overall transform final AffineTransform finalRaster2Model = new AffineTransform(baseGridToWorld); finalRaster2Model.concatenate(CoverageUtilities.CENTER_TO_CORNER); if (!XAffineTransform.isIdentity(backToBaseLevelScaleTransform, Utils.AFFINE_IDENTITY_EPS)) finalRaster2Model.concatenate(backToBaseLevelScaleTransform); if (!XAffineTransform.isIdentity(afterDecimationTranslateTranform, Utils.AFFINE_IDENTITY_EPS)) finalRaster2Model.concatenate(afterDecimationTranslateTranform); if (!XAffineTransform.isIdentity(decimationScaleTranform, Utils.AFFINE_IDENTITY_EPS)) finalRaster2Model.concatenate(decimationScaleTranform); // adjust roi if (useFootprint) { ROIGeometry transformed; try { transformed = roiProvider.getTransformedROI(finalRaster2Model.createInverse()); if (transformed.getAsGeometry().isEmpty()) { // inset might have killed the geometry fully return null; } PlanarImage pi = PlanarImage.wrapRenderedImage(raster); if (!transformed.intersects(pi.getBounds())) { return null; } pi.setProperty("ROI", transformed); raster = pi; } catch (NoninvertibleTransformException e) { if (LOGGER.isLoggable(java.util.logging.Level.INFO)) LOGGER.info("Unable to create a granuleDescriptor " + this.toString() + " due to a problem when managing the ROI"); return null; } } // keep into account translation factors to place this tile finalRaster2Model.preConcatenate((AffineTransform) mosaicWorldToGrid); final Interpolation interpolation = request.getInterpolation(); //paranoiac check to avoid that JAI freaks out when computing its internal layouT on images that are too small Rectangle2D finalLayout = ImageUtilities.layoutHelper(raster, (float) finalRaster2Model.getScaleX(), (float) finalRaster2Model.getScaleY(), (float) finalRaster2Model.getTranslateX(), (float) finalRaster2Model.getTranslateY(), interpolation); if (finalLayout.isEmpty()) { if (LOGGER.isLoggable(java.util.logging.Level.INFO)) LOGGER.info("Unable to create a granuleDescriptor " + this.toString() + " due to jai scale bug creating a null source area"); return null; } // apply the affine transform conserving indexed color model final RenderingHints localHints = new RenderingHints(JAI.KEY_REPLACE_INDEX_COLOR_MODEL, interpolation instanceof InterpolationNearest ? Boolean.FALSE : Boolean.TRUE); if (XAffineTransform.isIdentity(finalRaster2Model, Utils.AFFINE_IDENTITY_EPS)) { return new GranuleLoadingResult(raster, null, granuleUrl, doFiltering, pamDataset); } else { // // In case we are asked to use certain tile dimensions we tile // also at this stage in case the read type is Direct since // buffered images comes up untiled and this can affect the // performances of the subsequent affine operation. // final Dimension tileDimensions = request.getTileDimensions(); if (tileDimensions != null && request.getReadType().equals(ReadType.DIRECT_READ)) { final ImageLayout layout = new ImageLayout(); layout.setTileHeight(tileDimensions.width).setTileWidth(tileDimensions.height); localHints.add(new RenderingHints(JAI.KEY_IMAGE_LAYOUT, layout)); } else { if (hints != null && hints.containsKey(JAI.KEY_IMAGE_LAYOUT)) { final Object layout = hints.get(JAI.KEY_IMAGE_LAYOUT); if (layout != null && layout instanceof ImageLayout) { localHints .add(new RenderingHints(JAI.KEY_IMAGE_LAYOUT, ((ImageLayout) layout).clone())); } } } if (hints != null && hints.containsKey(JAI.KEY_TILE_CACHE)) { final Object cache = hints.get(JAI.KEY_TILE_CACHE); if (cache != null && cache instanceof TileCache) localHints.add(new RenderingHints(JAI.KEY_TILE_CACHE, (TileCache) cache)); } if (hints != null && hints.containsKey(JAI.KEY_TILE_SCHEDULER)) { final Object scheduler = hints.get(JAI.KEY_TILE_SCHEDULER); if (scheduler != null && scheduler instanceof TileScheduler) localHints.add(new RenderingHints(JAI.KEY_TILE_SCHEDULER, (TileScheduler) scheduler)); } boolean addBorderExtender = true; if (hints != null && hints.containsKey(JAI.KEY_BORDER_EXTENDER)) { final Object extender = hints.get(JAI.KEY_BORDER_EXTENDER); if (extender != null && extender instanceof BorderExtender) { localHints.add(new RenderingHints(JAI.KEY_BORDER_EXTENDER, (BorderExtender) extender)); addBorderExtender = false; } } // BORDER extender if (addBorderExtender) { localHints.add(ImageUtilities.BORDER_EXTENDER_HINTS); } ImageWorker iw = new ImageWorker(raster); iw.setRenderingHints(localHints); iw.affine(finalRaster2Model, interpolation, request.getBackgroundValues()); return new GranuleLoadingResult(iw.getRenderedImage(), null, granuleUrl, doFiltering, pamDataset); } } catch (IllegalStateException e) { if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) { LOGGER.log(java.util.logging.Level.WARNING, new StringBuilder("Unable to load raster for granuleDescriptor ").append(this.toString()) .append(" with request ").append(request.toString()) .append(" Resulting in no granule loaded: Empty result").toString(), e); } return null; } catch (org.opengis.referencing.operation.NoninvertibleTransformException e) { if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) { LOGGER.log(java.util.logging.Level.WARNING, new StringBuilder("Unable to load raster for granuleDescriptor ").append(this.toString()) .append(" with request ").append(request.toString()) .append(" Resulting in no granule loaded: Empty result").toString(), e); } return null; } catch (TransformException e) { if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) { LOGGER.log(java.util.logging.Level.WARNING, new StringBuilder("Unable to load raster for granuleDescriptor ").append(this.toString()) .append(" with request ").append(request.toString()) .append(" Resulting in no granule loaded: Empty result").toString(), e); } return null; } finally { try { if (request.getReadType() != ReadType.JAI_IMAGEREAD && inStream != null) { inStream.close(); } } finally { if (request.getReadType() != ReadType.JAI_IMAGEREAD && reader != null) { reader.dispose(); } } } }
From source file:org.geotools.gce.imagemosaic.GranuleDescriptor.java
GranuleOverviewLevelDescriptor getLevel(final int index) { //load level//from w ww . j a v a 2 s .c o m // create the base grid to world transformation ImageInputStream inStream = null; ImageReader reader = null; try { // get a stream assert cachedStreamSPI != null : "no cachedStreamSPI available!"; inStream = cachedStreamSPI.createInputStreamInstance(granuleUrl, ImageIO.getUseCache(), ImageIO.getCacheDirectory()); if (inStream == null) throw new IllegalArgumentException("Unable to create an inputstream for the granuleurl:" + (granuleUrl != null ? granuleUrl : "null")); // get a reader and try to cache the relevant SPI if (cachedReaderSPI == null) { reader = ImageIOExt.getImageioReader(inStream); if (reader != null) cachedReaderSPI = reader.getOriginatingProvider(); } else reader = cachedReaderSPI.createReaderInstance(); if (reader == null) throw new IllegalArgumentException( "Unable to get an ImageReader for the provided file " + granuleUrl.toString()); final boolean ignoreMetadata = customizeReaderInitialization(reader, null); reader.setInput(inStream, false, ignoreMetadata); // call internal method which will close everything return getLevel(index, reader); } catch (IllegalStateException e) { // clean up try { if (inStream != null) inStream.close(); } catch (Throwable ee) { } finally { if (reader != null) reader.dispose(); } throw new IllegalArgumentException(e); } catch (IOException e) { // clean up try { if (inStream != null) inStream.close(); } catch (Throwable ee) { } finally { if (reader != null) reader.dispose(); } throw new IllegalArgumentException(e); } }
From source file:org.polymap.service.geoserver.GeoServerClassLoader.java
public GeoServerClassLoader(ClassLoader parent) { super(new URL[] {}, parent); this.parent = parent; Bundle bundle = GeoServerPlugin.getDefault().getBundle(); for (Enumeration en = bundle.findEntries("lib/", "*.jar", false); en.hasMoreElements();) { URL entry = (URL) en.nextElement(); log.debug("JAR found: " + entry); addURL(entry);// w w w . j a v a 2s . com } // allow GeoServer/Spring to access my classes (have to be included in the bundle) addURL(bundle.getResource("build/eclipse/")); // Trigger a call to sun.awt.AppContext.getAppContext(). This will pin // the common class loader in memory but that shouldn't be an issue. ImageIO.getCacheDirectory(); }
From source file:org.springframework.data.hadoop.mapreduce.ExecutionUtils.java
/** * Utility method used before invoking custom code for preventing custom classloader, set as the Thread * context class-loader, to leak (typically through JDK classes). *//* w ww .ja va 2 s . c o m*/ static void preventJreTcclLeaks() { if (log.isDebugEnabled()) { log.debug("Preventing JRE TCCL leaks"); } // get the root CL to be used instead ClassLoader sysLoader = ClassLoader.getSystemClassLoader(); ClassLoader cl = Thread.currentThread().getContextClassLoader(); try { // set the sysCL as the TCCL Thread.currentThread().setContextClassLoader(sysLoader); // // Handle security // // Policy holds the TCCL as static ClassUtils.resolveClassName("javax.security.auth.Policy", sysLoader); // since the class init may be lazy, call the method directly Policy.getPolicy(); // Configuration holds the TCCL as static // call method with minimal side-effects (since just doing class loading doesn't seem to trigger the static init) try { javax.security.auth.login.Configuration.getInstance(null, null, (String) null); } catch (Exception ex) { // ignore } // seems to cause side-effects/exceptions // javax.security.auth.login.Configuration.getConfiguration(); java.security.Security.getProviders(); // load the JDBC drivers (used by Hive and co) DriverManager.getDrivers(); // Initialize // sun.awt.AppContext.getAppContext() ImageIO.getCacheDirectory(); } finally { Thread.currentThread().setContextClassLoader(cl); } }