Example usage for java.awt.image BufferedImage TYPE_3BYTE_BGR

List of usage examples for java.awt.image BufferedImage TYPE_3BYTE_BGR

Introduction

In this page you can find the example usage for java.awt.image BufferedImage TYPE_3BYTE_BGR.

Prototype

int TYPE_3BYTE_BGR

To view the source code for java.awt.image BufferedImage TYPE_3BYTE_BGR.

Click Source Link

Document

Represents an image with 8-bit RGB color components, corresponding to a Windows-style BGR color model) with the colors Blue, Green, and Red stored in 3 bytes.

Usage

From source file:Main.java

public static void main(String[] argv) throws Exception {
    BufferedImage bufferedImage = new BufferedImage(200, 200, BufferedImage.TYPE_3BYTE_BGR);
    Image img = Toolkit.getDefaultToolkit().createImage(bufferedImage.getSource());
}

From source file:Main.java

public static void main(String[] args) throws Exception {
    BufferedImage original = new BufferedImage(100, 100, BufferedImage.TYPE_3BYTE_BGR);

    BufferedImage copy = original.getSubimage(10, 10, 50, 50);

    ImageIO.write(copy, "png", new File("Test.png"));
}

From source file:Main.java

/**
 * Shrinks an image to fit into memory more
 * Effectively.//from w  w  w .j a  va  2s .  c  om
 * @param src The source image.
 * @return
 */
public static BufferedImage imgUtilMinimizeNoAlpha(BufferedImage src) {
    if (src == null)
        return null;
    BufferedImage b = new BufferedImage(src.getWidth(), src.getHeight(), BufferedImage.TYPE_3BYTE_BGR);
    Graphics2D g = (Graphics2D) b.getGraphics();
    g.drawImage(src, 0, 0, null);
    g.dispose();
    return b;
}

From source file:arlocros.Imshow.java

/**
 * @param opencvImage/*w ww . java2  s . c om*/
 */
public static void show(Mat opencvImage) {

    Dimension frameSize = new Dimension(opencvImage.rows(), opencvImage.cols());
    if (frame == null) {
        frame = new Imshow("", frameSize.height, frameSize.width);
        frame.Window.setVisible(true);

        frame.Window.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
        if (frame.SizeCustom) {
            Imgproc.resize(opencvImage, opencvImage, new Size(frame.Height, frame.Width));
        }
    }
    BufferedImage bufImage = null;
    try {

        int type = BufferedImage.TYPE_BYTE_GRAY;
        if (opencvImage.channels() > 1) {
            type = BufferedImage.TYPE_3BYTE_BGR;
        }
        int bufferSize = opencvImage.channels() * opencvImage.cols() * opencvImage.rows();
        byte[] b = new byte[bufferSize];
        opencvImage.get(0, 0, b);
        BufferedImage bufferedImage = new BufferedImage(opencvImage.cols(), opencvImage.rows(), type);
        final byte[] targetPixels = ((DataBufferByte) bufferedImage.getRaster().getDataBuffer()).getData();
        System.arraycopy(b, 0, targetPixels, 0, b.length);
        bufImage = bufferedImage;
        frame.image.setImage(bufImage);
        frame.Window.pack();
        frame.label.updateUI();
        //frame.Window.setVisible(true);
    } catch (RuntimeException e) {
        logger.info("Exception while visualizing.", e);
    }
}

From source file:org.springframework.cloud.stream.app.object.detection.processor.ObjectDetectionTensorflowInputConverter.java

private static Tensor<UInt8> makeImageTensor(byte[] imageBytes) throws IOException {
    ByteArrayInputStream is = new ByteArrayInputStream(imageBytes);
    BufferedImage img = ImageIO.read(is);

    if (img.getType() != BufferedImage.TYPE_3BYTE_BGR) {
        throw new IllegalArgumentException(
                String.format("Expected 3-byte BGR encoding in BufferedImage, found %d", img.getType()));
    }/* w  ww .  j a v  a  2  s  .c o  m*/
    byte[] data = ((DataBufferByte) img.getData().getDataBuffer()).getData();
    // ImageIO.read produces BGR-encoded images, while the model expects RGB.
    bgrToRgb(data);

    //Expand dimensions since the model expects images to have shape: [1, None, None, 3]
    long[] shape = new long[] { BATCH_SIZE, img.getHeight(), img.getWidth(), CHANNELS };

    return Tensor.create(UInt8.class, shape, ByteBuffer.wrap(data));
}

From source file:net.vectorgaming.mediarealm.util.FileUtils.java

public static void createThumbnail(String directory) throws IOException {

    IMediaReader reader = ToolFactory.makeReader(directory);
    reader.setBufferedImageTypeToGenerate(BufferedImage.TYPE_3BYTE_BGR);
    reader.addListener(new ImageSnapListener());
    while (reader.readPacket() == null)
        ;/*from w  w  w .  ja  v  a 2 s  .  co m*/

}

From source file:org.apache.xmlgraphics.ps.ImageEncodingHelperTestCase.java

/**
 * Tests a BGR versus RBG image. Debugging shows the BGR follows the
 * optimizeWriteTo() (which is intended). The bytes are compared with the
 * RBG image, which happens to follow the writeRGBTo().
 *
 * @throws IOException/*from w w  w. j  a  va2  s  . com*/
 */
public void testRGBAndBGRImages() throws IOException {
    BufferedImage imageBGR = new BufferedImage(100, 75, BufferedImage.TYPE_3BYTE_BGR);
    imageBGR = prepareImage(imageBGR);
    BufferedImage imageRGB = new BufferedImage(100, 75, BufferedImage.TYPE_INT_BGR);
    imageRGB = prepareImage(imageRGB);

    final ImageEncodingHelper imageEncodingHelperBGR = new ImageEncodingHelper(imageBGR);
    final ImageEncodingHelper imageEncodingHelperRGB = new ImageEncodingHelper(imageRGB);

    final ByteArrayOutputStream baosBGR = new ByteArrayOutputStream();
    imageEncodingHelperBGR.encode(baosBGR);

    final ByteArrayOutputStream baosRGB = new ByteArrayOutputStream();
    imageEncodingHelperRGB.encode(baosRGB);

    assertTrue(Arrays.equals(baosBGR.toByteArray(), baosRGB.toByteArray()));
}

From source file:org.jcodec.codecs.mjpeg.MJPEGParser.java

BufferedImage buffered(DecodedImage decoded) {
    int w = decoded.getWidth();
    int h = decoded.getHeight();
    BufferedImage bi = new BufferedImage(w, h, BufferedImage.TYPE_3BYTE_BGR);
    bi.setRGB(0, 0, w, h, decoded.getPixels(), 0, w);
    return bi;/* w w w .j  a  v a  2 s.  c  om*/

}

From source file:org.springframework.cloud.stream.app.pose.estimation.processor.PoseEstimationTensorflowInputConverter.java

private Tensor<Float> makeImageTensor(byte[] imageBytes) throws IOException {
    ByteArrayInputStream is = new ByteArrayInputStream(imageBytes);
    BufferedImage img = ImageIO.read(is);

    if (img.getType() != BufferedImage.TYPE_3BYTE_BGR) {
        throw new IllegalArgumentException(
                String.format("Expected 3-byte BGR encoding in BufferedImage, found %d", img.getType()));
    }//w ww.java  2  s.c  o  m

    // ImageIO.read produces BGR-encoded images, while the model expects RGB.
    int[] data = toIntArray(img);

    //Expand dimensions since the model expects images to have shape: [1, None, None, 3]
    long[] shape = new long[] { BATCH_SIZE, img.getHeight(), img.getWidth(), CHANNELS };

    return Tensor.create(shape, FloatBuffer.wrap(toRgbFloat(data)));
}

From source file:org.jboss.arquillian.extension.screenRecorder.ScreenRecorder.java

/**
 * Starts recording a video to the temporary file. If {@link #stopRecording(java.io.File) }
 * is invoked, this method stops recording.
 *//*from   www .ja va 2  s .  com*/
public void startRecording() {
    running = true;
    thread = new Thread(new Runnable() {
        public void run() {
            File output;
            try {
                output = File.createTempFile("arquillain-screen-recorder", "." + videoType);
                output.deleteOnExit();
                output.createNewFile();
            } catch (IOException e) {
                throw new IllegalStateException("Can't create a temporary file for recording.", e);
            }
            IMediaWriter writer = ToolFactory.makeWriter(output.getAbsolutePath());
            writer.addVideoStream(0, 0, ICodec.ID.CODEC_ID_MPEG4, screenBounds.width / 2,
                    screenBounds.height / 2);
            long startTime = System.nanoTime();
            while (running) {
                BufferedImage screen = getDesktopScreenshot();

                BufferedImage bgrScreen = convertToType(screen, BufferedImage.TYPE_3BYTE_BGR);

                writer.encodeVideo(0, bgrScreen, System.nanoTime() - startTime, TimeUnit.NANOSECONDS);
                try {
                    Thread.sleep((long) (1000 / FRAME_RATE));
                } catch (InterruptedException ex) {
                    logger.error("Exception occured during video recording", ex);
                }
                if (!running) {
                    writer.close();
                    try {
                        if (destination != null) {
                            if (destination.exists()) {
                                destination.delete();
                            }
                            FileUtils.moveFile(output, destination);
                        }
                    } catch (IOException e) {
                        throw new IllegalStateException(
                                "Can't move the temporary recorded content to the destination file.", e);
                    } finally {
                        output.delete();
                    }
                }
            }
        }
    });
    thread.start();
}