Example usage for java.awt.image DataBuffer TYPE_BYTE

List of usage examples for java.awt.image DataBuffer TYPE_BYTE

Introduction

In this page you can find the example usage for java.awt.image DataBuffer TYPE_BYTE.

Prototype

int TYPE_BYTE

To view the source code for java.awt.image DataBuffer TYPE_BYTE.

Click Source Link

Document

Tag for unsigned byte data.

Usage

From source file:TextureByReference.java

public static BufferedImage convertToCustomRGBA(BufferedImage bImage) {
    if (bImage.getType() != BufferedImage.TYPE_INT_ARGB) {
        ImageOps.convertImage(bImage, BufferedImage.TYPE_INT_ARGB);
    }/*  w ww .ja  va 2  s. co m*/

    int width = bImage.getWidth();
    int height = bImage.getHeight();

    ColorSpace cs = ColorSpace.getInstance(ColorSpace.CS_sRGB);
    int[] nBits = { 8, 8, 8, 8 };
    ColorModel cm = new ComponentColorModel(cs, nBits, true, false, Transparency.OPAQUE, 0);
    int[] bandOffset = { 0, 1, 2, 3 };

    WritableRaster newRaster = Raster.createInterleavedRaster(DataBuffer.TYPE_BYTE, width, height, width * 4, 4,
            bandOffset, null);
    byte[] byteData = ((DataBufferByte) newRaster.getDataBuffer()).getData();
    Raster origRaster = bImage.getData();
    int[] pixel = new int[4];
    int k = 0;
    for (int j = 0; j < height; j++) {
        for (int i = 0; i < width; i++) {
            pixel = origRaster.getPixel(i, j, pixel);
            byteData[k++] = (byte) (pixel[0]);
            byteData[k++] = (byte) (pixel[1]);
            byteData[k++] = (byte) (pixel[2]);
            byteData[k++] = (byte) (pixel[3]);
        }
    }
    BufferedImage newImage = new BufferedImage(cm, newRaster, false, null);
    //  if (newImage.getType() == BufferedImage.TYPE_CUSTOM) {
    //    System.out.println("Type is custom");
    //  }
    return newImage;
}

From source file:TextureByReference.java

public static BufferedImage convertToCustomRGB(BufferedImage bImage) {
    if (bImage.getType() != BufferedImage.TYPE_INT_ARGB) {
        ImageOps.convertImage(bImage, BufferedImage.TYPE_INT_ARGB);
    }/*from w  w w  . j a  v a  2  s  .  c om*/

    int width = bImage.getWidth();
    int height = bImage.getHeight();

    ColorSpace cs = ColorSpace.getInstance(ColorSpace.CS_sRGB);
    int[] nBits = { 8, 8, 8 };
    ColorModel cm = new ComponentColorModel(cs, nBits, false, false, Transparency.OPAQUE, 0);
    int[] bandOffset = { 0, 1, 2 };

    WritableRaster newRaster = Raster.createInterleavedRaster(DataBuffer.TYPE_BYTE, width, height, width * 3, 3,
            bandOffset, null);
    byte[] byteData = ((DataBufferByte) newRaster.getDataBuffer()).getData();
    Raster origRaster = bImage.getData();
    int[] pixel = new int[4];
    int k = 0;
    for (int j = 0; j < height; j++) {
        for (int i = 0; i < width; i++) {
            pixel = origRaster.getPixel(i, j, pixel);
            byteData[k++] = (byte) (pixel[0]);
            byteData[k++] = (byte) (pixel[1]);
            byteData[k++] = (byte) (pixel[2]);
        }
    }
    BufferedImage newImage = new BufferedImage(cm, newRaster, false, null);
    //  if (newImage.getType() == BufferedImage.TYPE_CUSTOM) {
    //    System.out.println("Type is custom");
    //  }
    return newImage;
}

From source file:edu.stanford.epad.epadws.handlers.dicom.DSOUtil.java

public static String getPixelValues(SourceImage sImg, int frameNum) {
    int signMask = 0;
    int signBit = 0;

    BufferedImage src = sImg.getBufferedImage(frameNum);
    if (sImg.isSigned()) {
        // the source image will already have been sign extended to the data type size
        // so we don't need to worry about other than exactly 8 and 16 bits
        if (src.getSampleModel().getDataType() == DataBuffer.TYPE_BYTE) {
            signBit = 0x0080;//from   ww w .  ja  v a2 s.  c om
            signMask = 0xffffff80;
        } else { // assume short or ushort
            signBit = 0x8000;
            signMask = 0xffff8000;
        }
    }
    double[] storedPixelValueArray;
    if (src.getRaster().getDataBuffer() instanceof DataBufferFloat) {
        float[] storedPixelValues = src.getSampleModel().getPixels(0, 0, src.getWidth(), src.getHeight(),
                (float[]) null, src.getRaster().getDataBuffer());
        //copy to double array
        storedPixelValueArray = new double[storedPixelValues.length];
        for (int i = 0; i < storedPixelValues.length; i++) {
            storedPixelValueArray[i] = storedPixelValues[i];
        }
    } else if (src.getRaster().getDataBuffer() instanceof DataBufferDouble) {
        double[] storedPixelValues = src.getSampleModel().getPixels(0, 0, src.getWidth(), src.getHeight(),
                (double[]) null, src.getRaster().getDataBuffer());
        storedPixelValueArray = storedPixelValues;
    } else {
        int[] storedPixelValues = src.getSampleModel().getPixels(0, 0, src.getWidth(), src.getHeight(),
                (int[]) null, src.getRaster().getDataBuffer());
        int storedPixelValueInt = 0;
        //copy to double array
        storedPixelValueArray = new double[storedPixelValues.length];
        for (int i = 0; i < storedPixelValues.length; i++) {
            storedPixelValueInt = storedPixelValues[i];

            if (sImg.isSigned() && (storedPixelValueInt & signBit) != 0) {
                storedPixelValueInt |= signMask; // sign extend
            }
            storedPixelValueArray[i] = storedPixelValueInt;
        }

    }
    return JSON.toString(storedPixelValueArray);

}

From source file:it.geosolutions.jaiext.range.RangeTest.java

private void checkRangeConversion(Range range) {
    checkRangeConversion(range, DataBuffer.TYPE_BYTE);
    checkRangeConversion(range, DataBuffer.TYPE_DOUBLE);
    checkRangeConversion(range, DataBuffer.TYPE_FLOAT);
    checkRangeConversion(range, DataBuffer.TYPE_INT);
    checkRangeConversion(range, DataBuffer.TYPE_SHORT);
    checkRangeConversion(range, DataBuffer.TYPE_USHORT);
}

From source file:it.geosolutions.geobatch.destination.vulnerability.VulnerabilityComputation.java

/**
 * Method used for merging the input Rasters into a 2 images, one for human targets and the other for not human targets
 * /* ww w .  j a v a2 s.co  m*/
 * @param humanTargets
 * @param notHumanTargets
 * @param bandPerTargetH
 * @param bandPerTargetNH
 * @throws IOException
 * @throws java.awt.geom.NoninvertibleTransformException
 * @throws TransformException
 * @throws MismatchedDimensionException
 */
public RenderedImage[] rasterCalculation(Map<Integer, TargetInfo> bandPerTargetH,
        Map<Integer, TargetInfo> bandPerTargetNH) throws IOException,
        java.awt.geom.NoninvertibleTransformException, MismatchedDimensionException, TransformException {
    // Initialization of the images
    RenderedImage humanTargets = null;
    RenderedImage notHumanTargets = null;
    String basePath = System.getProperty(RASTER_PATH_PROP, "");
    if (!basePath.equals("")) {
        basePath = basePath + File.separator + codicePartner;
    }
    // Read of the resources
    Map vulnerabilityConf = (Map) readResourceFromXML("/vulnerability.xml");
    // Vulnerability engine used for extracting the Targets
    VulnerabilityStatsEngine vsengine = new VulnerabilityStatsEngine(basePath, vulnerabilityConf, dataStore,
            DISTANCE_TYPE_NAME, pixelArea);
    // Target Map
    Map<String, TargetInfo> targetInfo = vsengine.getTargetInfo();

    /*
     * Creation of 2 images: one for the HUMAN TARGETS and the other for NOT HUMAN TARGETS
     */
    // List of Human Targets
    List<RenderedImage> humanList = new ArrayList<RenderedImage>();

    // List of Not Human Targets
    List<RenderedImage> notHumanList = new ArrayList<RenderedImage>();

    // Counters indicating which band is associated to the TargetInfo and
    // Image
    int humanBandCounter = 0;
    int notHumanBandCounter = 0;

    // Iterator on all the targets
    Iterator<String> rasterIter = targetInfo.keySet().iterator();

    // Initializations of the parameters for merging the input rasters
    Envelope2D globalBBOXHuman = null;
    Envelope2D globalBBOXNotHuman = null;
    List<AffineTransform> tfHuman = new ArrayList<AffineTransform>();
    List<AffineTransform> tfNotHuman = new ArrayList<AffineTransform>();
    AffineTransform g2WHuman = null;
    AffineTransform g2WNotHuman = null;
    // Cycle on all the rasters
    while (rasterIter.hasNext()) {
        // save the ID of this target
        String targetID = rasterIter.next();

        // Load the target manager, init its status and check if the actual
        // distance is a valid distance for it
        TargetInfo info = targetInfo.get(targetID);

        // Getting of the transformation parameters
        GridGeometry2D gg2D = info.getGG2D();
        Envelope2D envelope = gg2D.getEnvelope2D();
        AffineTransform w2g = (AffineTransform) gg2D.getCRSToGrid2D(PixelOrientation.UPPER_LEFT);
        // getting information about current Target
        TargetManager manager = info.getManager();

        // Image associated to the current target
        RenderedImage newImage = info.getRaster();
        // Image data type
        int imgDataType = newImage.getSampleModel().getDataType();
        // Check if the image really exists
        if (newImage != null) {
            // If the target is human
            if (manager.isHumanTarget()) {
                // Other check for ensuring the target is correct
                if (imgDataType != DataBuffer.TYPE_FLOAT) {
                    System.out.println("Wrong data type");
                }

                // perform union
                if (globalBBOXHuman == null) {
                    globalBBOXHuman = new Envelope2D(envelope);
                } else {
                    globalBBOXHuman.include(envelope);
                }
                // Selection of the first g2w transform as the global one
                if (g2WHuman == null) {
                    g2WHuman = (AffineTransform) gg2D.getGridToCRS2D(PixelOrientation.UPPER_LEFT);
                }

                // Creation of the transformation from destination Raster space to source Raster space
                AffineTransform temp = new AffineTransform(w2g);
                temp.concatenate(g2WHuman);
                tfHuman.add(temp);

                // Addition of the TargetInfo of this target
                bandPerTargetH.put(humanBandCounter, info);
                // Update of the bandCounter
                humanBandCounter++;
                // Addition of the image to the associated list
                humanList.add(newImage);

            } else {
                // Other check for ensuring the target is correct
                if (imgDataType != DataBuffer.TYPE_BYTE) {
                    System.out.println("Wrong data type");
                }

                // perform union
                if (globalBBOXNotHuman == null) {
                    globalBBOXNotHuman = envelope;
                } else {
                    globalBBOXNotHuman.include(envelope);
                }
                // Selection of the first g2w transform as the global one
                if (g2WNotHuman == null) {
                    g2WNotHuman = (AffineTransform) gg2D.getGridToCRS2D(PixelOrientation.UPPER_LEFT);
                }
                // Creation of the transformation from destination Raster space to source Raster space
                AffineTransform temp = new AffineTransform(w2g);
                temp.concatenate(g2WNotHuman);
                tfNotHuman.add(temp);

                // Addition of the TargetInfo of this target
                bandPerTargetNH.put(notHumanBandCounter, info);
                // Update of the bandCounter
                notHumanBandCounter++;
                // Addition of the image to the associated list
                notHumanList.add(newImage);
            }
        }
    }

    // computing final raster space for the two targets
    GridGeometry2D humanGG2D = new GridGeometry2D(PixelInCell.CELL_CORNER, new AffineTransform2D(g2WHuman),
            globalBBOXHuman, null);
    globalBBOXHuman = humanGG2D.getEnvelope2D(); // take into account integer pixel roundings

    GridGeometry2D noHumanGG2D = new GridGeometry2D(PixelInCell.CELL_CORNER, new AffineTransform2D(g2WNotHuman),
            globalBBOXNotHuman, null);
    globalBBOXNotHuman = noHumanGG2D.getEnvelope2D(); // take into account integer pixel roundings

    // BandMerge of the images
    RenderedImage[] imagesHuman = new RenderedImage[humanList.size()];
    RenderedImage[] imagesNotHuman = new RenderedImage[notHumanList.size()];
    // Setting of the final layout
    ImageLayout layoutH = new ImageLayout2();
    GridEnvelope2D gridRange2D = humanGG2D.getGridRange2D();
    layoutH.setMinX(gridRange2D.x);
    layoutH.setMinY(gridRange2D.y);
    layoutH.setWidth(gridRange2D.width);
    layoutH.setHeight(gridRange2D.height);
    // Definition of the TileCache
    RenderingHints hintsH = new RenderingHints(JAI.KEY_TILE_CACHE, JAI.getDefaultInstance().getTileCache());
    // Setting of the layout as hint
    hintsH.put(JAI.KEY_IMAGE_LAYOUT, layoutH);
    // Merging of the input human targets
    humanTargets = BandMergeDescriptor.create(null, 0, hintsH, tfHuman, humanList.toArray(imagesHuman));
    // Setting of the final layout
    ImageLayout layoutNH = new ImageLayout2();
    gridRange2D = noHumanGG2D.getGridRange2D();
    layoutNH.setMinX(gridRange2D.x);
    layoutNH.setMinY(gridRange2D.y);
    layoutNH.setWidth(gridRange2D.width);
    layoutNH.setHeight(gridRange2D.height);
    // Definition of the TileCache
    RenderingHints hintsNH = new RenderingHints(JAI.KEY_TILE_CACHE, JAI.getDefaultInstance().getTileCache());
    hintsNH.put(JAI.KEY_IMAGE_LAYOUT, layoutNH);
    // Merging of the input not human targets
    notHumanTargets = BandMergeDescriptor.create(null, 0, hintsNH, tfNotHuman,
            notHumanList.toArray(imagesNotHuman));

    // cache the final images
    humanTargets = NullDescriptor.create(humanTargets,
            new RenderingHints(JAI.KEY_TILE_CACHE, JAI.getDefaultInstance().getTileCache()));

    notHumanTargets = NullDescriptor.create(notHumanTargets,
            new RenderingHints(JAI.KEY_TILE_CACHE, JAI.getDefaultInstance().getTileCache()));

    // Clearing of the initial lists
    notHumanList.clear();
    humanList.clear();
    // create a new array of the new images
    return new RenderedImage[] { humanTargets, notHumanTargets };
}

From source file:org.apache.xmlgraphics.image.codec.png.PNGImageDecoder.java

/**
 * Reads in an image of a given size and returns it as a WritableRaster.
 *///from w ww. ja  v  a  2s .c  o m
private void decodePass(final WritableRaster imRas, final int xOffset, final int yOffset, final int xStep,
        final int yStep, final int passWidth, final int passHeight) {
    if (passWidth == 0 || passHeight == 0) {
        return;
    }

    final int bytesPerRow = (this.inputBands * passWidth * this.bitDepth + 7) / 8;
    final int eltsPerRow = this.bitDepth == 16 ? bytesPerRow / 2 : bytesPerRow;
    byte[] curr = new byte[bytesPerRow];
    byte[] prior = new byte[bytesPerRow];

    // Create a 1-row tall Raster to hold the data
    final WritableRaster passRow = createRaster(passWidth, 1, this.inputBands, eltsPerRow, this.bitDepth);
    final DataBuffer dataBuffer = passRow.getDataBuffer();
    final int type = dataBuffer.getDataType();
    byte[] byteData = null;
    short[] shortData = null;
    if (type == DataBuffer.TYPE_BYTE) {
        byteData = ((DataBufferByte) dataBuffer).getData();
    } else {
        shortData = ((DataBufferUShort) dataBuffer).getData();
    }

    // Decode the (sub)image row-by-row
    int srcY, dstY;
    for (srcY = 0, dstY = yOffset; srcY < passHeight; srcY++, dstY += yStep) {
        // Read the filter type byte and a row of data
        int filter = 0;
        try {
            filter = this.dataStream.read();
            this.dataStream.readFully(curr, 0, bytesPerRow);
        } catch (final Exception e) {
            log.error("Exception", e);
        }

        switch (filter) {
        case PNG_FILTER_NONE:
            break;
        case PNG_FILTER_SUB:
            decodeSubFilter(curr, bytesPerRow, this.bytesPerPixel);
            break;
        case PNG_FILTER_UP:
            decodeUpFilter(curr, prior, bytesPerRow);
            break;
        case PNG_FILTER_AVERAGE:
            decodeAverageFilter(curr, prior, bytesPerRow, this.bytesPerPixel);
            break;
        case PNG_FILTER_PAETH:
            decodePaethFilter(curr, prior, bytesPerRow, this.bytesPerPixel);
            break;
        default:
            // Error -- uknown filter type
            final String msg = PropertyUtil.getString("PNGImageDecoder16");
            throw new RuntimeException(msg);
        }

        // Copy data into passRow byte by byte
        if (this.bitDepth < 16) {
            System.arraycopy(curr, 0, byteData, 0, bytesPerRow);
        } else {
            int idx = 0;
            for (int j = 0; j < eltsPerRow; j++) {
                shortData[j] = (short) (curr[idx] << 8 | curr[idx + 1] & 0xff);
                idx += 2;
            }
        }

        processPixels(this.postProcess, passRow, imRas, xOffset, xStep, dstY, passWidth);

        // Swap curr and prior
        final byte[] tmp = prior;
        prior = curr;
        curr = tmp;
    }
}

From source file:org.apache.xmlgraphics.image.codec.png.PNGRed.java

/**
 * Reads in an image of a given size and returns it as a WritableRaster.
 *//*w ww. jav a2  s  . c o  m*/
private void decodePass(final WritableRaster imRas, final int xOffset, final int yOffset, final int xStep,
        final int yStep, final int passWidth, final int passHeight) {
    if (passWidth == 0 || passHeight == 0) {
        return;
    }

    final int bytesPerRow = (this.inputBands * passWidth * this.bitDepth + 7) / 8;
    final int eltsPerRow = this.bitDepth == 16 ? bytesPerRow / 2 : bytesPerRow;
    byte[] curr = new byte[bytesPerRow];
    byte[] prior = new byte[bytesPerRow];

    // Create a 1-row tall Raster to hold the data
    final WritableRaster passRow = createRaster(passWidth, 1, this.inputBands, eltsPerRow, this.bitDepth);
    final DataBuffer dataBuffer = passRow.getDataBuffer();
    final int type = dataBuffer.getDataType();
    byte[] byteData = null;
    short[] shortData = null;
    if (type == DataBuffer.TYPE_BYTE) {
        byteData = ((DataBufferByte) dataBuffer).getData();
    } else {
        shortData = ((DataBufferUShort) dataBuffer).getData();
    }

    // Decode the (sub)image row-by-row
    int srcY, dstY;
    for (srcY = 0, dstY = yOffset; srcY < passHeight; srcY++, dstY += yStep) {
        // Read the filter type byte and a row of data
        int filter = 0;
        try {
            filter = this.dataStream.read();
            this.dataStream.readFully(curr, 0, bytesPerRow);
        } catch (final Exception e) {
            log.error("Exception", e);
        }

        switch (filter) {
        case PNG_FILTER_NONE:
            break;
        case PNG_FILTER_SUB:
            decodeSubFilter(curr, bytesPerRow, this.bytesPerPixel);
            break;
        case PNG_FILTER_UP:
            decodeUpFilter(curr, prior, bytesPerRow);
            break;
        case PNG_FILTER_AVERAGE:
            decodeAverageFilter(curr, prior, bytesPerRow, this.bytesPerPixel);
            break;
        case PNG_FILTER_PAETH:
            decodePaethFilter(curr, prior, bytesPerRow, this.bytesPerPixel);
            break;
        default:
            // Error -- unknown filter type
            final String msg = PropertyUtil.getString("PNGImageDecoder16");
            throw new RuntimeException(msg);
        }

        // Copy data into passRow byte by byte
        if (this.bitDepth < 16) {
            System.arraycopy(curr, 0, byteData, 0, bytesPerRow);
        } else {
            int idx = 0;
            for (int j = 0; j < eltsPerRow; j++) {
                shortData[j] = (short) (curr[idx] << 8 | curr[idx + 1] & 0xff);
                idx += 2;
            }
        }

        processPixels(this.postProcess, passRow, imRas, xOffset, xStep, dstY, passWidth);

        // Swap curr and prior
        final byte[] tmp = prior;
        prior = curr;
        curr = tmp;
    }
}