Example usage for java.awt Transparency OPAQUE

List of usage examples for java.awt Transparency OPAQUE

Introduction

In this page you can find the example usage for java.awt Transparency OPAQUE.

Prototype

int OPAQUE

To view the source code for java.awt Transparency OPAQUE.

Click Source Link

Document

Represents image data that is guaranteed to be completely opaque, meaning that all pixels have an alpha value of 1.0.

Usage

From source file:org.photovault.dcraw.RawImage.java

/**
 * Load the raw image using dcraw. No processing is yet done for the image,
 * however, the histogram & white point is calculated.
 *///from   www . ja  v a2  s . c om
private void loadRawImage() {
    long startTime = System.currentTimeMillis();
    log.debug("begin:loadRawImage");
    if (lrd == null) {
        openRaw();
        log.debug("openRaw() " + (System.currentTimeMillis() - startTime));
        if (lrd == null) {
            throw new IllegalStateException("Called loadRawImage before opening file");
        }
        lr.libraw_unpack(lrd);
        log.debug("unpacked " + (System.currentTimeMillis() - startTime));
    }
    /*
     * Copy the unprocessed data to temporary array so that we can restore 
     * lrd to the state it had after unpack()
     */

    int oldFlags = lrd.progress_flags;
    int oldFilters = lrd.idata.filters;
    int rawImageSize = lrd.sizes.iwidth * lrd.sizes.iheight * 4;
    short rawWidth = lrd.sizes.width;
    short rawHeight = lrd.sizes.height;
    this.width = lrd.sizes.width;
    this.height = lrd.sizes.height;
    short[] rawData = lrd.image.getShortArray(0, rawImageSize);
    lr.libraw_dcraw_process(lrd);
    log.debug("processed " + (System.currentTimeMillis() - startTime));
    int procWidth = lrd.sizes.width;
    int procHeight = lrd.sizes.height;

    int postSubsample = (lrd.output_params.half_size > 0) ? subsample / 2 : subsample;
    /*
     * Copy the raw image to Java raster, using box filter to subsample
     */
    int scaledW = procWidth / postSubsample;
    int scaledH = procHeight / postSubsample;
    short[] buf = new short[scaledW * scaledH * 3];
    int pos = 0;
    for (int row = 0; row < scaledH; row++) {
        for (int col = 0; col < scaledW; col++) {
            int rsum = 0;
            int gsum = 0;
            int bsum = 0;
            for (int or = row * postSubsample; or < (row + 1) * postSubsample; or++) {
                for (int oc = col * postSubsample; oc < (col + 1) * postSubsample; oc++) {
                    int r = lrd.image.getShort(8 * (oc + procWidth * or));
                    rsum += (r & 0xffff);
                    int g = lrd.image.getShort(8 * (oc + procWidth * or) + 2);
                    gsum += (g & 0xffff);
                    int b = lrd.image.getShort(8 * (oc + procWidth * or) + 4);
                    bsum += (b & 0xffff);
                }
            }
            buf[pos++] = (short) (rsum / (postSubsample * postSubsample));
            buf[pos++] = (short) (gsum / (postSubsample * postSubsample));
            buf[pos++] = (short) (bsum / (postSubsample * postSubsample));
        }
    }
    log.debug("subsampled " + (System.currentTimeMillis() - startTime));

    // Restore LibRaw state to what it was before dcraw_process
    lrd.image.write(0, rawData, 0, rawImageSize);
    lrd.progress_flags = oldFlags;
    lrd.sizes.width = rawWidth;
    lrd.sizes.height = rawHeight;
    lrd.idata.filters = oldFilters;
    rawData = null;

    // closeRaw();

    DataBuffer db = new DataBufferUShort(buf, buf.length);
    SampleModel sampleModel = RasterFactory.createPixelInterleavedSampleModel(DataBuffer.TYPE_USHORT, scaledW,
            scaledH, 3, 3 * scaledW, new int[] { 0, 1, 2 });
    WritableRaster r = Raster.createWritableRaster(sampleModel, db, new Point(0, 0));
    log.debug("raster created " + (System.currentTimeMillis() - startTime));

    if (this.chanMultipliers == null) {
        chanMultipliers = cameraMultipliers.clone();
        calcCTemp();
    }

    ColorSpace cs = ColorSpace.getInstance(ColorSpace.CS_LINEAR_RGB);
    ColorModel targetCM = new ComponentColorModel(cs, new int[] { 16, 16, 16 }, false, false,
            Transparency.OPAQUE, DataBuffer.TYPE_USHORT);
    rawImage = new TiledImage(new BufferedImage(targetCM, r, true, null), 256, 256);
    rawImage.setProperty("org.photovault.opname", "dcraw_data");

    if (preRotation.getJaiTransposeType() != null) {
        rawImage = TransposeDescriptor.create(rawImage, preRotation.getJaiTransposeType(), null);
        rawImage.setProperty("org.photovault.opname", "orientation_adjustment");
    }

    final float[] DEFAULT_KERNEL_1D = { 0.25f, 0.5f, 0.25f };
    ParameterBlock pb = new ParameterBlock();
    KernelJAI kernel = new KernelJAI(DEFAULT_KERNEL_1D.length, DEFAULT_KERNEL_1D.length,
            DEFAULT_KERNEL_1D.length / 2, DEFAULT_KERNEL_1D.length / 2, DEFAULT_KERNEL_1D, DEFAULT_KERNEL_1D);
    pb.add(kernel);
    BorderExtender extender = BorderExtender.createInstance(BorderExtender.BORDER_COPY);
    RenderingHints hints = JAI.getDefaultInstance().getRenderingHints();
    if (hints == null) {
        hints = new RenderingHints(JAI.KEY_BORDER_EXTENDER, extender);
    } else {
        hints.put(JAI.KEY_BORDER_EXTENDER, extender);
    }

    RenderedOp filter = new RenderedOp("convolve", pb, hints);
    // javax.media.jai.operator.BoxFilterDescriptor.create( null, new Integer(2), new Integer(2), new Integer(0), new Integer(0), null );

    // Add the subsampling operation.
    pb = new ParameterBlock();
    pb.addSource(filter);
    pb.add(new Float(0.5F)).add(new Float(0.5F));
    pb.add(new Float(0.0F)).add(new Float(0.0F));
    pb.add(Interpolation.getInstance(Interpolation.INTERP_NEAREST));
    RenderedOp downSampler = new RenderedOp("scale", pb, null);
    // downSampler = javax.media.jai.operator.BoxFilterDescriptor.create( null, new Integer(2), new Integer(2), new Integer(0), new Integer(0), null );

    RenderableOp rawImageRenderable = RenderableDescriptor.createRenderable(rawImage, downSampler, null, null,
            null, null, null);
    double colorCorrMat[][] = new double[][] { { colorCorr[0], 0.0, 0.0, 0.0 }, { 0.0, colorCorr[1], 0.0, 0.0 },
            { 0.0, 0.0, colorCorr[2], 0.0 } };

    RenderingHints nonCachedHints = new RenderingHints(JAI.KEY_TILE_CACHE, null);
    wbAdjustedRawImage = BandCombineDescriptor.createRenderable(rawImageRenderable, colorCorrMat,
            nonCachedHints);
    wbAdjustedRawImage.setProperty("org.photovault.opname", "wb_adjusted_image");

    //            reader.getImageMetadata( 0 );
    //            rawIsHalfSized = dcraw.ishalfSize();
    //
    //            createHistogram();
    //        } catch (FileNotFoundException ex) {
    //            ex.printStackTrace();
    //        } catch (IOException ex) {
    //            ex.printStackTrace();
    //        } catch (PhotovaultException ex) {
    //            ex.printStackTrace();
    //        }
    log.debug("image ready " + (System.currentTimeMillis() - startTime));

    if (autoExposeRequested) {
        doAutoExpose();
    }
    log.debug("exit: loadRawImage " + (System.currentTimeMillis() - startTime));

}

From source file:org.photovault.image.PhotovaultImage.java

/**
 Add saturation mapping into from of the image processing pipeline.
 @param src The image to which saturation correction is applied
 @return Saturation change operator.//from   w  ww .j  av  a  2s. c  o  m
 */
protected RenderableOp getSaturated(RenderableOp src) {
    IHSColorSpace ihs = IHSColorSpace.getInstance();
    ColorModel srcCm = getCorrectedImageColorModel();
    int[] componentSizes = srcCm.getComponentSize();
    if (componentSizes.length != 3) {
        // This is not an RGB image
        // TODO: handle also images with alpha channel
        return null;
    }
    ColorModel ihsColorModel = new ComponentColorModel(ihs, componentSizes, false, false, Transparency.OPAQUE,
            srcCm.getTransferType());

    // Create a ParameterBlock for the conversion.
    ParameterBlock pb = new ParameterBlock();
    pb.addSource(src);
    pb.add(ihsColorModel);
    // Do the conversion.
    RenderableOp ihsImage = JAI.createRenderable("colorconvert", pb);
    ihsImage.setProperty("org.photovault.opname", "color_corrected_ihs_image");

    //        saturatedIhsImage =
    //                MultiplyConstDescriptor.createRenderable( ihsImage, new double[] {1.0, 1.0, saturation}, null );
    LookupTableJAI jailut = createSaturationMappingLUT();
    saturatedIhsImage = LookupDescriptor.createRenderable(ihsImage, jailut, null);
    saturatedIhsImage.setProperty("org.photovault.opname", "saturated_ihs_image");
    pb = new ParameterBlock();
    pb.addSource(saturatedIhsImage);
    ColorSpace sRGB = ColorSpace.getInstance(ColorSpace.CS_sRGB);
    ColorModel srgbColorModel = new ComponentColorModel(sRGB, componentSizes, false, false, Transparency.OPAQUE,
            srcCm.getTransferType());
    pb.add(srgbColorModel); // RGB color model!        
    RenderableOp saturatedImage = JAI.createRenderable("colorconvert", pb);
    saturatedImage.setProperty("org.photovault.opname", "saturated_image");

    return saturatedImage;
}

From source file:org.shaman.terrain.polygonal.GraphToHeightmap.java

private void saveMatrix(float[][] matrix, String filename, float min, float max) {
    byte[] buffer = new byte[size * size];
    int i = 0;/*from  w  w  w .  ja  va 2 s . c om*/
    for (int x = 0; x < size; ++x) {
        for (int y = 0; y < size; ++y) {
            buffer[i] = (byte) ((matrix[x][y] - min) * 255 / (max - min));
            i++;
        }
    }
    ColorSpace cs = ColorSpace.getInstance(ColorSpace.CS_GRAY);
    int[] nBits = { 8 };
    ColorModel cm = new ComponentColorModel(cs, nBits, false, true, Transparency.OPAQUE, DataBuffer.TYPE_BYTE);
    SampleModel sm = cm.createCompatibleSampleModel(size, size);
    DataBufferByte db = new DataBufferByte(buffer, size * size);
    WritableRaster raster = Raster.createWritableRaster(sm, db, null);
    BufferedImage result = new BufferedImage(cm, raster, false, null);
    try {
        ImageIO.write(result, "png", new File(filename));
    } catch (IOException ex) {
        Logger.getLogger(SketchTerrain.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:org.shaman.terrain.polygonal.GraphToHeightmap.java

private void saveColorMatrix(float[][][] matrix, String filename, float min, float max) {
    byte[] buffer = new byte[size * size * 3];
    int i = 0;//from ww  w. j  a v  a2 s . c o m
    for (int x = 0; x < size; ++x) {
        for (int y = 0; y < size; ++y) {
            buffer[i] = (byte) ((matrix[x][y][0] - min) * 255 / (max - min));
            i++;
            buffer[i] = (byte) ((matrix[x][y][1] - min) * 255 / (max - min));
            i++;
            buffer[i] = (byte) ((matrix[x][y][2] - min) * 255 / (max - min));
            i++;
        }
    }
    ColorSpace cs = ColorSpace.getInstance(ColorSpace.CS_LINEAR_RGB);
    int[] nBits = { 8, 8, 8 };
    ColorModel cm = new ComponentColorModel(cs, nBits, false, true, Transparency.OPAQUE, DataBuffer.TYPE_BYTE);
    SampleModel sm = cm.createCompatibleSampleModel(size, size);
    DataBufferByte db = new DataBufferByte(buffer, size * size * 3);
    WritableRaster raster = Raster.createWritableRaster(sm, db, null);
    BufferedImage result = new BufferedImage(cm, raster, false, null);
    try {
        ImageIO.write(result, "png", new File(filename));
    } catch (IOException ex) {
        Logger.getLogger(SketchTerrain.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:org.yccheok.jstock.gui.Utils.java

public static BufferedImage toBufferedImage(Image image) {
    if (image instanceof BufferedImage) {
        return (BufferedImage) image;
    }//from w w w . ja v a2 s. co m

    // This code ensures that all the pixels in the image are loaded
    image = new ImageIcon(image).getImage();

    // Determine if the image has transparent pixels; for this method's
    // implementation, see e661 Determining If an Image Has Transparent Pixels
    boolean hasAlpha = hasAlpha(image);

    // Create a buffered image with a format that's compatible with the screen
    BufferedImage bimage = null;
    GraphicsEnvironment ge = GraphicsEnvironment.getLocalGraphicsEnvironment();
    try {
        // Determine the type of transparency of the new buffered image
        int transparency = Transparency.OPAQUE;
        if (hasAlpha) {
            transparency = Transparency.BITMASK;
        }

        // Create the buffered image
        GraphicsDevice gs = ge.getDefaultScreenDevice();
        GraphicsConfiguration gc = gs.getDefaultConfiguration();
        bimage = gc.createCompatibleImage(image.getWidth(null), image.getHeight(null), transparency);
    } catch (HeadlessException e) {
        // The system does not have a screen
    }

    if (bimage == null) {
        // Create a buffered image using the default color model
        int type = BufferedImage.TYPE_INT_RGB;
        if (hasAlpha) {
            type = BufferedImage.TYPE_INT_ARGB;
        }
        bimage = new BufferedImage(image.getWidth(null), image.getHeight(null), type);
    }

    // Copy image to buffered image
    Graphics g = bimage.createGraphics();

    // Paint the image onto the buffered image
    g.drawImage(image, 0, 0, null);
    g.dispose();

    return bimage;
}

From source file:plugin.exporttokens.PortraitToken.java

/**
 * Convenience method that returns a scaled instance of the
 * provided {@code BufferedImage}./*w w w  .  j  a v a  2 s.co  m*/
 *
 * @param img the original image to be scaled
 * @param targetWidth the desired width of the scaled instance,
 *    in pixels
 * @param targetHeight the desired height of the scaled instance,
 *    in pixels
 * @param hint one of the rendering hints that corresponds to
 *    {@code RenderingHints.KEY_INTERPOLATION} (e.g.
 *    {@code RenderingHints.VALUE_INTERPOLATION_NEAREST_NEIGHBOR},
 *    {@code RenderingHints.VALUE_INTERPOLATION_BILINEAR},
 *    {@code RenderingHints.VALUE_INTERPOLATION_BICUBIC})
 * @param higherQuality if true, this method will use a multi-step
 *    scaling technique that provides higher quality than the usual
 *    one-step technique (only useful in down scaling cases, where
 *    {@code targetWidth} or {@code targetHeight} is
 *    smaller than the original dimensions, and generally only when
 *    the {@code BILINEAR} hint is specified)
 * @return a scaled version of the original {@code BufferedImage}
 */
public BufferedImage getScaledInstance(BufferedImage img, int targetWidth, int targetHeight, Object hint,
        boolean higherQuality) {
    int type = (img.getTransparency() == Transparency.OPAQUE) ? BufferedImage.TYPE_INT_RGB
            : BufferedImage.TYPE_INT_ARGB;
    BufferedImage ret = img;
    int w, h;
    if (higherQuality) {
        // Use multi-step technique: start with original size, then
        // scale down in multiple passes with drawImage()
        // until the target size is reached
        w = img.getWidth();
        h = img.getHeight();
    } else {
        // Use one-step technique: scale directly from original
        // size to target size with a single drawImage() call
        w = targetWidth;
        h = targetHeight;
    }

    // If we are scaling up, just do the one pass.
    if (w < targetWidth || h < targetWidth) {
        // Use one-step technique: scale directly from original
        // size to target size with a single drawImage() call
        w = targetWidth;
        h = targetHeight;
    }

    do {
        if (higherQuality && w > targetWidth) {
            w /= 2;
            if (w < targetWidth) {
                w = targetWidth;
            }
        }

        if (higherQuality && h > targetHeight) {
            h /= 2;
            if (h < targetHeight) {
                h = targetHeight;
            }
        }

        BufferedImage tmp = new BufferedImage(w, h, type);
        Graphics2D g2 = tmp.createGraphics();
        g2.setRenderingHint(RenderingHints.KEY_INTERPOLATION, hint);
        g2.drawImage(ret, 0, 0, w, h, null);
        g2.dispose();

        ret = tmp;
    } while (w != targetWidth || h != targetHeight);

    return ret;
}

From source file:tufts.vue.LWComponent.java

/**
 * Create a new buffered image, of max dimension maxSize, and render the LWComponent
 * (and all it's children), to it using the given alpha.
 * @param alpha 0.0 (invisible) to 1.0 (no alpha)
 * @param maxSize max dimensions for image. May be null.  Image may be smaller than maxSize.
 * @param fillColor -- if non-null, will be rendered as background for image.  If null, presume alpha 0 fill.
 * @param zoomRequest -- desired zoom; ignored if maxSize is non-null
 * also set, background fill will have transparency of alpha^3 to enhance contrast.
 *//*from  w  w w  . j  ava2 s. c  o m*/

// Note: as of Mac OS X 10.4.10 (Intel), when a java drag source declares it can
// generate an image (as we do when we Apple-drag something), if you drop it on the
// desktop, it will create a special mac "picture clipping", which is some kind of
// raw format, probabaly TIFF, tho you CANNOT open these in Preview.  Apparently
// there's some kind of bug in the special .pictClipping, where sometimes when
// opening it up it shows entirely as a blank space (I think if the image starts to
// get "very large"), tho the data is actually there -- if you drag the picture
// clipping into an Apple Mail message, it shows up again (and if you dragged from
// VUE to Apple Mail in the first place, it also works fine).  Note that AFTER
// dragging into Apple Mail, you can THEN double-click the attachment, and it will
// open it up in Preview as a .tiff file (Apple Mail appears to be converting the
// .pictClipping to tiff).  Note that uncompressed TIFF isn't exactly a friendly
// mail attachment format as it's huge.  But once you open the image in Preview, you
// have the option of saving it / exporting it as a jpeg, and you can even adjust
// the quality to your liking.

public BufferedImage createImage(double alpha, Dimension maxSize, Color fillColor, double zoomRequest) {
    final Rectangle2D.Float bounds = getImageBounds();

    if (DEBUG.IMAGE) {
        System.out.println();
        System.out.println(TERM_CYAN + "createImage: " + this + "\n\t zoomRequst: " + zoomRequest
                + "\n\t    maxSize: " + maxSize + "\n\t  mapBounds: " + fmt(bounds) + "\n\t  fillColor: "
                + fillColor + "\n\t      alpha: " + alpha + TERM_CLEAR);
    }

    final Size imageSize = new Size(bounds);
    final double usedZoom = computeZoomAndSize(bounds, maxSize, zoomRequest, imageSize);

    // Image type ARGB is needed if at any point in the generated image, there is a
    // not 100% opaque pixel all the way through the background.  So TYPE_INT_RGB
    // will handle transparency with a map fine -- but we need TYPE_INT_ARGB if,
    // say, we're generating drag image that we want to be a borderless node (fully
    // transparent image border), or if the whole drag image itself is
    // semi-transparent.

    final int imageType;
    final int transparency;

    if (fillColor == null || alpha != OPAQUE || fillColor.getAlpha() != 255) {
        imageType = BufferedImage.TYPE_INT_ARGB;
        transparency = Transparency.TRANSLUCENT;
    } else {
        imageType = BufferedImage.TYPE_INT_RGB;
        transparency = Transparency.OPAQUE;
    }

    //        final boolean fillHasAlpha = (fillColor != null && fillColor.getAlpha() != 255);
    //         //if (alpha == OPAQUE && fillColor != null && fillColor.getAlpha() == 255) {
    //         if (alpha == OPAQUE && (fillColor == null || fillColor.getAlpha() == 255)) {
    //             imageType = BufferedImage.TYPE_INT_RGB;
    //             transparency = Transparency.OPAQUE;
    //         } else {
    //             imageType = BufferedImage.TYPE_INT_ARGB;
    //             transparency = Transparency.TRANSLUCENT;
    //         }

    final int width = imageSize.pixelWidth();
    final int height = imageSize.pixelHeight();

    if (width >= 512 || height >= 512)
        Log.info("creating large image: " + imageSize + " = approx " + Util.abbrevBytes(width * height * 4));

    try {
        Log.info(this + "; createImage:" + "\n\t requestSize: " + imageSize + "\n\trequestAlpha: " + alpha
                + "\n\t requestFill: " + fillColor + "\n\t   pixelSize: " + width + "x" + height
                + "\n\t renderScale: " + usedZoom + "\n\t        type: "
                + (imageType == BufferedImage.TYPE_INT_RGB ? "RGB (opaque)" : "ARGB (translucent)"));
    } catch (Throwable t) {
        Log.error("logging", t);
    }

    //         if (DEBUG.IMAGE) out(TERM_CYAN
    //                              + "createImage:"
    //                              //+ "\n\tfinal size: " + width + "x" + height
    //                              + "\n\t neededSize: " + imageSize
    //                              + "\n\t   usedZoom: " + usedZoom
    //                              + "\n\t       type: " + (imageType == BufferedImage.TYPE_INT_RGB ? "OPAQUE" : "TRANSPARENT")
    //                              + TERM_CLEAR);

    if (mImageBuffer != null && mImageBuffer.getWidth() == width && mImageBuffer.getHeight() == height
            && mImageBuffer.getType() == imageType) {
        // todo: could also re-use if cached image is > our needed size as long it's
        // an ARGB and we fill it with full alpha first, tho we really shouldn't
        // have each component caching it's own image: some kind of small
        // recently used image buffers cache would make more sense.
        if (DEBUG.DND || DEBUG.IMAGE)
            out(TERM_CYAN + "\ngot cached image: " + mImageBuffer + TERM_CLEAR);
    } else {
        try {

            // TODO: manage this in a separate cache -- not one per node

            mImageBuffer = tufts.vue.gui.GUI.getDeviceConfigForWindow(null).createCompatibleImage(width, height,
                    transparency);

        } catch (Throwable t) {
            Log.error("creating image", t);
            Log.error("creating image: failing node: " + Util.tags(this));
            return null;
        }

        if (DEBUG.DND || DEBUG.IMAGE)
            out(TERM_RED + "created image: " + mImageBuffer + TERM_CLEAR);
        else
            Log.info("created image " + mImageBuffer);

    }

    drawImage((Graphics2D) mImageBuffer.getGraphics(), alpha, maxSize, fillColor, zoomRequest);

    return mImageBuffer;
}

From source file:util.ui.UiUtilities.java

/**
 * Convenience method that returns a scaled instance of the
 * provided {@code BufferedImage}.//w  ww.  j  a  v a2s.co  m
 *
 * @param img the original image to be scaled
 * @param targetWidth the desired width of the scaled instance,
 *    in pixels
 * @param targetHeight the desired height of the scaled instance,
 *    in pixels
 * @return a scaled version of the original {@code BufferedImage}
 */
public static BufferedImage scaleDown(final BufferedImage img, final int targetWidth, final int targetHeight) {
    if (targetWidth > img.getWidth() || targetHeight > img.getHeight()) {
        return scaleIconToBufferedImage(img, targetWidth, targetHeight);
    }

    int type = (img.getTransparency() == Transparency.OPAQUE) ? BufferedImage.TYPE_INT_RGB
            : BufferedImage.TYPE_INT_ARGB;
    BufferedImage result = img;
    int w = img.getWidth();
    int h = img.getHeight();

    do {
        w /= 2;
        if (w < targetWidth) {
            w = targetWidth;
        }
        h /= 2;
        if (h < targetHeight) {
            h = targetHeight;
        }

        BufferedImage tmp = new BufferedImage(w, h, type);
        Graphics2D g2 = tmp.createGraphics();
        g2.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BILINEAR);
        g2.drawImage(result, 0, 0, w, h, null);
        g2.dispose();

        result = tmp;
    } while (w != targetWidth || h != targetHeight);

    return result;
}

From source file:zz.pseas.ghost.utils.BrowserUtil.java

public static BufferedImage toBufferedImage(Image image) {
    if (image instanceof BufferedImage) {
        return (BufferedImage) image;
    }//from w ww  .j  a  v a  2 s .  c  o  m
    // This code ensures that all the pixels in the image are loaded
    image = new ImageIcon(image).getImage();
    BufferedImage bimage = null;
    GraphicsEnvironment ge = GraphicsEnvironment.getLocalGraphicsEnvironment();
    try {
        int transparency = Transparency.OPAQUE;
        GraphicsDevice gs = ge.getDefaultScreenDevice();
        GraphicsConfiguration gc = gs.getDefaultConfiguration();
        bimage = gc.createCompatibleImage(image.getWidth(null), image.getHeight(null), transparency);
    } catch (HeadlessException e) {
        // The system does not have a screen
    }
    if (bimage == null) {
        // Create a buffered image using the default color model
        int type = BufferedImage.TYPE_INT_RGB;
        bimage = new BufferedImage(image.getWidth(null), image.getHeight(null), type);
    }
    // Copy image to buffered image
    Graphics g = bimage.createGraphics();
    // Paint the image onto the buffered image
    g.drawImage(image, 0, 0, null);
    g.dispose();
    return bimage;
}