Example usage for org.opencv.android Utils matToBitmap

List of usage examples for org.opencv.android Utils matToBitmap

Introduction

In this page you can find the example usage for org.opencv.android Utils matToBitmap.

Prototype

public static void matToBitmap(Mat mat, Bitmap bmp) 

Source Link

Document

Short form of the matToBitmap(mat, bmp, premultiplyAlpha=false)

Usage

From source file:org.ar.rubik.MonoChromatic.java

License:Open Source License

/**
  * Computer Mono Chromatic Filter using GPU and OpenCL
  * @param image//  w  w w. j  av  a  2  s .  c  o  m
  * @return
  */
private static Mat monochromaticMedianImageFilterOpenCL(Mat image) {

    Log.i(Constants.TAG, "Mono Arg Image: " + image); // 720*1280 CV_8UC3

    //       Imgproc.resize(image, image, new Size(image.size().height/2, image.size().width/2));

    Size size = image.size();

    // Create OpenCL Output Bit Map
    Bitmap outputBitmap = Bitmap.createBitmap((int) size.width, (int) size.height, Bitmap.Config.ARGB_8888);

    // Create OpenCL Input Bit Map
    Bitmap inputBitmap = Bitmap.createBitmap((int) size.width, (int) size.height, Bitmap.Config.ARGB_8888);

    // Convert to Hue, Luminance, and Saturation
    //        long startHLS = System.currentTimeMillis();
    Mat hls_image = new Mat();
    Imgproc.cvtColor(image, hls_image, Imgproc.COLOR_BGR2YUV);
    //        Log.i(Constants.TAG, "Mono HLS Image: " + hls_image);  // 720*1280 CV_8UC3
    //        long endHLS = System.currentTimeMillis();
    //        Log.i(Constants.TAG, "HLS Conversion Time: " + (endHLS - startHLS) + "mS"); 

    // Convert image Mat to Bit Map.
    Utils.matToBitmap(hls_image, inputBitmap);

    // Call C++ code.
    nativeStepOpenCL((int) 7, (int) 5, 0, 0, true, inputBitmap, outputBitmap);

    Mat result = new Mat();
    Utils.bitmapToMat(outputBitmap, result);

    //        long startChannel = System.currentTimeMillis();
    List<Mat> channels = new LinkedList<Mat>();
    Core.split(result, channels);
    Mat channel0 = channels.get(0);
    //        long endChannel = System.currentTimeMillis();
    //        Log.i(Constants.TAG, "Channel Conversion Time: " + (endChannel - startChannel) + "mS"); 
    return channel0;

    //       return result;
}

From source file:org.firstinspires.ftc.rmrobotics.histoy.opmodes.feRMilab.experimental.AutoExperimental.java

private void DisplayImage(Mat img) {
    // Scale down x2
    Core.flip(img, img, -1);/*from w w  w  .  j ava2  s . c o  m*/
    mImageMap = Bitmap.createBitmap(img.cols(), img.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(img, mImageMap);

    ((FtcRobotControllerActivity) hardwareMap.appContext).runOnUiThread(new Runnable() {
        @Override
        public void run() {
            mImageView.setImageBitmap(mImageMap);
        }
    });
}

From source file:org.firstinspires.ftc.teamcode.opmodes.demo.VumarkHiJackVideo.java

@Override
public void init_loop() {
    RelicRecoveryVuMark vuMark = RelicRecoveryVuMark.from(relicTemplate);
    if (vuMark != RelicRecoveryVuMark.UNKNOWN) {

        /* Found an instance of the template. In the actual game, you will probably
         * loop until this condition occurs, then move on to act accordingly depending
         * on which VuMark was visible. */
        telemetry.addData("VuMark", "%s visible", vuMark);

        /* For fun, we also exhibit the navigational pose. In the Relic Recovery game,
         * it is perhaps unlikely that you will actually need to act on this pose information, but
         * we illustrate it nevertheless, for completeness. */
        //qualcomm is stupid and didn't let me see the real vuforia matrix, so I shimmed it
        //heres where is gets good
        VuforiaDefaultListenerShim writeGoodCode = (VuforiaDefaultListenerShim) relicTemplate.getListener();

        OpenGLMatrix pose = writeGoodCode.getRawPose();
        telemetry.addData("Pose", format(pose));

        /* We further illustrate how to decompose the pose into useful rotational and
         * translational components */
        if (pose != null) {
            //alternate projection!
            //get vuforia's real position matrix
            Matrix34F goodCodeWritten = writeGoodCode.getRealPose();

            //reset imagePoints
            final float[][] vufPoints = new float[point.length][2];

            //use vuforias projectPoints method to project all those box points
            for (int i = 0; i < point.length; i++) {
                //project
                vufPoints[i] = Tool.projectPoint(camCal, goodCodeWritten, point[i]).getData();
                //convert to opencv language

                //telemetry.addData("point", "num: %d, x: %f.2, y: %f.2", i, vufPoints[i][0], vufPoints[i][1]);
            }/*from w  w w  .  j ava 2s  . c o m*/

            //telemetry.addData("Camera Size", "w: %f.2, h: %f.2", camCal.getSize().getData()[0], camCal.getSize().getData()[1]);

            //get frame from vuforia
            try {
                VuforiaLocalizer.CloseableFrame frame = ray.take();

                int img = 0;
                for (; img < frame.getNumImages(); img++) {
                    //telemetry.addData("Image format " + img, frame.getImage(img).getFormat());
                    if (frame.getImage(img).getFormat() == PIXEL_FORMAT.RGB888)
                        break;
                }

                Image mImage = frame.getImage(img);

                //stick it in a Mat for "display purposes"
                Mat out = new Mat(mImage.getHeight(), mImage.getWidth(), CV_8UC3);

                java.nio.ByteBuffer color = mImage.getPixels();
                byte[] ray = new byte[color.limit()];
                color.rewind();
                color.get(ray);
                out.put(0, 0, ray);

                frame.close();

                //convert points, halfing distances b/c vuforia does that internally so we gotta fix it
                for (int i = 0; i < vufPoints.length; i++)
                    imagePoints[i] = new Point((int) vufPoints[i][0], (int) vufPoints[i][1]);

                Scalar green = new Scalar(0, 255, 0);
                for (int i = 0; i < 2; i++)
                    for (int o = 0; o < 4; o++)
                        Imgproc.line(out, imagePoints[o == 0 ? 3 + i * 4 : i * 4 + o - 1],
                                imagePoints[i * 4 + o], green);

                //connect the rectangles
                for (int i = 0; i < 4; i++)
                    Imgproc.line(out, imagePoints[i], imagePoints[i + 4], green);

                for (int i = 8; i < imagePoints.length; i++)
                    Imgproc.drawMarker(out, imagePoints[i], green);

                //calculate points from projection
                //find the midpoint between the two points
                int leftXPoint = (int) ((imagePoints[8].x + imagePoints[9].x) / 2.0);
                int leftYPoint = (int) ((imagePoints[8].y + imagePoints[9].y) / 2.0);
                //find the y distande between the two
                leftDist = (int) (Math.abs(imagePoints[8].y - imagePoints[9].y) / 2.0);
                leftBall = new int[] { leftXPoint - (leftDist / 2), leftYPoint - (leftDist / 2) };

                //find the midpoint between the two points
                int rightXPoint = (int) ((imagePoints[10].x + imagePoints[11].x) / 2.0);
                int rightYPoint = (int) ((imagePoints[10].y + imagePoints[11].y) / 2.0);
                //find the y distande between the two
                rightDist = (int) (Math.abs(imagePoints[10].y - imagePoints[11].y) / 2.0);
                rightBall = new int[] { rightXPoint - (rightDist / 2), rightYPoint - (rightDist / 2) };

                //operation: subsquare
                //take a square mat we are 100% sure will have a ball in it
                //sum it up and find the average color

                leftColor = drawSquare(out, leftBall, leftDist);
                rightColor = drawSquare(out, rightBall, rightDist);

                //flip it for display
                Core.flip(out, out, -1);

                matQueue.add(out);

                //display!
                mView.getHandler().post(new Runnable() {
                    @Override
                    public void run() {
                        try {
                            Mat frame = matQueue.take();
                            //convert to bitmap
                            Utils.matToBitmap(frame, bm);
                            frame.release();
                            mView.invalidate();
                        } catch (InterruptedException e) {
                            //huh
                        }
                    }
                });
            } catch (Exception e) {
                Log.e("OPENCV", e.getLocalizedMessage());
            }
        }
    } else {
        telemetry.addData("VuMark", "not visible");
    }
}

From source file:org.firstinspires.ftc.teamcode.opmodes.demo.VumarkOpenCV.java

@Override
public void init_loop() {
    RelicRecoveryVuMark vuMark = RelicRecoveryVuMark.from(relicTemplate);
    if (vuMark != RelicRecoveryVuMark.UNKNOWN) {

        /* Found an instance of the template. In the actual game, you will probably
         * loop until this condition occurs, then move on to act accordingly depending
         * on which VuMark was visible. */
        telemetry.addData("VuMark", "%s visible", vuMark);

        /* For fun, we also exhibit the navigational pose. In the Relic Recovery game,
         * it is perhaps unlikely that you will actually need to act on this pose information, but
         * we illustrate it nevertheless, for completeness. */
        //qualcomm is stupid and didn't let me see the real vuforia matrix, so I shimmed it
        //heres where is gets good
        VuforiaDefaultListenerShim writeGoodCode = (VuforiaDefaultListenerShim) relicTemplate.getListener();

        OpenGLMatrix pose = writeGoodCode.getRawPose();
        telemetry.addData("Pose", format(pose));

        /* We further illustrate how to decompose the pose into useful rotational and
         * translational components */
        if (pose != null) {
            //alternate projection!
            //get vuforia's real position matrix
            Matrix34F goodCodeWritten = writeGoodCode.getRealPose();

            //reset imagePoints
            final float[][] vufPoints = new float[point.length][2];

            //use vuforias projectPoints method to project all those box points
            for (int i = 0; i < point.length; i++) {
                //project
                vufPoints[i] = Tool.projectPoint(camCal, goodCodeWritten, point[i]).getData();
                //convert to opencv language

                //telemetry.addData("point", "num: %d, x: %f.2, y: %f.2", i, vufPoints[i][0], vufPoints[i][1]);
            }// w  w  w . java 2 s.  com

            //telemetry.addData("Camera Size", "w: %f.2, h: %f.2", camCal.getSize().getData()[0], camCal.getSize().getData()[1]);

            //get frame from vuforia
            try {
                VuforiaLocalizer.CloseableFrame frame = ray.take();

                int img = 0;
                for (; img < frame.getNumImages(); img++) {
                    //Log.i("OPENCV", "Image " + img + " " + frame.getImage(img).getWidth());
                    if (frame.getImage(img).getWidth() == size[0])
                        break;
                }

                final Mat out;
                if (img < frame.getNumImages())
                    out = getMatFromImage(frame.getImage(img));
                else
                    throw new IllegalArgumentException("No frame with matching width!");

                frame.close();

                //convert points, halfing distances b/c vuforia does that internally so we gotta fix it
                for (int i = 0; i < vufPoints.length; i++)
                    imagePoints[i] = new Point((int) vufPoints[i][0] / 2, (int) vufPoints[i][1] / 2);

                Scalar color = new Scalar(0, 255, 0);
                for (int i = 0; i < 2; i++)
                    for (int o = 0; o < 4; o++)
                        Imgproc.line(out, imagePoints[o == 0 ? 3 + i * 4 : i * 4 + o - 1],
                                imagePoints[i * 4 + o], color);

                //connect the rectangles
                for (int i = 0; i < 4; i++)
                    Imgproc.line(out, imagePoints[i], imagePoints[i + 4], color);

                for (int i = 8; i < imagePoints.length; i++)
                    Imgproc.drawMarker(out, imagePoints[i], color);

                //flip it for display
                Core.flip(out, out, -1);

                matQueue.add(out);

                //display!
                mView.getHandler().post(new Runnable() {
                    @Override
                    public void run() {
                        try {
                            Mat frame = matQueue.take();
                            //convert to bitmap
                            Utils.matToBitmap(frame, bm);
                            frame.release();
                            mView.invalidate();
                        } catch (InterruptedException e) {
                            //huh
                        }
                    }
                });
            } catch (Exception e) {
                Log.e("OPENCV", e.getLocalizedMessage());
            }
        }
    } else {
        telemetry.addData("VuMark", "not visible");
    }
}

From source file:org.gearvrf.ipbsample.SampleViewManager.java

License:Apache License

@Override
public void onPreviewFrame(byte[] data, Camera camera) {
    setFps();/*from www . j  a  va 2  s .c o m*/
    getTouchPadInput();

    Mat yuvImage = new Mat(SampleProcessing.height * 3 / 2, SampleProcessing.width, CvType.CV_8UC1);
    yuvImage.put(0, 0, data);

    Mat rgbaImage = new Mat(SampleProcessing.height, SampleProcessing.width, CvType.CV_8UC4);
    Imgproc.cvtColor(yuvImage, rgbaImage, Imgproc.COLOR_YUV420sp2RGBA, 4);

    Mat outputImage = new Mat(SampleProcessing.height, SampleProcessing.width, CvType.CV_8UC4);

    outputImage = SampleProcessing.processImage(rgbaImage, viewNum);

    if (zoomNum > 0) {
        outputImage = SampleProcessing.zoomImage(outputImage, zoomNum);
    }

    if (showData) {
        outputImage = SampleProcessing.dataImage(outputImage, viewNum, zoomNum, imageFps, latitude, longitude,
                altitude);
    }

    Utils.matToBitmap(outputImage, bitmap);
    bitmapTexture.update(bitmap);
}

From source file:org.mixare.MixView.java

License:Open Source License

@Override
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    Mat rgba = inputFrame.rgba();/*from w w w  . j  av  a 2s  .com*/

    if (createBitmap == true) {
        try {
            Point p1 = new Point(selectionStart.x, selectionStart.y);
            Point p2 = new Point(selectionEnd.x, selectionEnd.y);

            org.opencv.core.Rect rect = new org.opencv.core.Rect(p1, p2);
            Mat thumnailMat = new Mat(rgba, rect);

            thumbnail = Bitmap.createBitmap(thumnailMat.width(), thumnailMat.height(), Bitmap.Config.ARGB_8888);

            Utils.matToBitmap(thumnailMat, thumbnail);

            createBitmap = false;
        } catch (Exception ex) {
            // Toast.makeText(this, "Please try selecting object again!!",
            // Toast.LENGTH_SHORT).show();
            Log.e("Mixare", "Error while selecting object", ex);
        }
    }

    return rgba;
}

From source file:uk.ac.horizon.artcodes.detect.handler.MarkerActionDetectionHandler.java

License:Open Source License

@Override
public void onMarkersDetected(Collection<Marker> markers, ArrayList<MatOfPoint> contours, Mat hierarchy,
        Size sourceImageSize) {//from   w ww.ja  v a2 s . c o  m
    countMarkers(markers);
    long now = System.currentTimeMillis();
    int best = 0;
    Action selected = null;
    for (Action action : this.experience.getActions()) {
        if (action.getMatch() == Action.Match.any) {
            for (String code : action.getCodes()) {
                int count = markerCounts.count(code);
                if (count > best) {
                    selected = action;
                    best = count;
                }
            }
        } else if (action.getMatch() == Action.Match.all) {
            int min = MAX;
            int total = 0;
            for (String code : action.getCodes()) {
                int count = markerCounts.count(code);
                min = Math.min(min, count);
                total += (count * 2);
            }

            if (min > REQUIRED && total > best) {
                best = total;
                selected = action;
            }
        }
    }

    if (best < REQUIRED) {
        if (currentAction != null) {
            if (now - lastSeen > REMAIN) {
                currentAction = null;
                this.markerActionHandler.onMarkerActionDetected(null, null, null);
            }
        }
    } else if (selected != currentAction) {
        currentAction = selected;
        lastSeen = now;
        ArrayList<MarkerImage> markerImages = null;
        if (this.markerDrawer != null) {
            Marker markerObject = null;
            for (Marker possibleMarkerObject : markers) {
                if (possibleMarkerObject.toString().equals(currentAction.getCodes().get(0))) {
                    markerObject = possibleMarkerObject;
                }
            }
            if (markerObject != null) {
                final Rect boundingRect = Imgproc.boundingRect(contours.get(markerObject.markerIndex));
                Mat thumbnailMat = this.markerDrawer.drawMarker(markerObject, contours, hierarchy, boundingRect,
                        null);
                Bitmap thumbnail = Bitmap.createBitmap(thumbnailMat.width(), thumbnailMat.height(),
                        Bitmap.Config.ARGB_8888);
                Utils.matToBitmap(thumbnailMat, thumbnail);
                MarkerImage markerImage = new MarkerImage(markerObject.toString(), thumbnail,
                        (float) (boundingRect.tl().x / sourceImageSize.width),
                        (float) (boundingRect.tl().y / sourceImageSize.height),
                        (float) (boundingRect.width / sourceImageSize.width),
                        (float) (boundingRect.height / sourceImageSize.height));
                markerImages = new ArrayList<>(1);
                markerImages.add(markerImage);

                Log.i("SOURCEIMG", "w" + sourceImageSize.width + " h" + sourceImageSize.height);
            }
        }
        this.markerActionHandler.onMarkerActionDetected(currentAction, currentAction, markerImages);
    } else {
        for (Marker possibleMarkerObject : markers) {
            String marker = possibleMarkerObject.toString();
            for (String code : currentAction.getCodes()) {
                if (code.equals(marker)) {
                    lastSeen = now;
                    return;
                }
            }
        }
    }
}

From source file:uk.ac.horizon.artcodes.detect.handler.MultipleMarkerActionDetectionHandler.java

License:Open Source License

private MarkerImage createImageForMarker(Marker marker, ArrayList<MatOfPoint> contours, Mat hierarchy,
        Size sourceImageSize) {//from   ww  w .  j av a  2  s.  c om
    if (marker != null) {
        final Rect boundingRect = Imgproc.boundingRect(contours.get(marker.markerIndex));
        final Mat thumbnailMat = this.markerDrawer.drawMarker(marker, contours, hierarchy, boundingRect, null);
        final Bitmap thumbnail = Bitmap.createBitmap(thumbnailMat.width(), thumbnailMat.height(),
                Bitmap.Config.ARGB_8888);
        Utils.matToBitmap(thumbnailMat, thumbnail);
        return new MarkerImage(marker.toString(), thumbnail,
                (float) (boundingRect.tl().x / sourceImageSize.width),
                (float) (boundingRect.tl().y / sourceImageSize.height),
                (float) (boundingRect.width / sourceImageSize.width),
                (float) (boundingRect.height / sourceImageSize.height));
    }
    return null;
}

From source file:uk.ac.horizon.artcodes.detect.ImageBuffers.java

License:Open Source License

public Bitmap createOverlayBitmap() {
    if (overlayReady) {
        if (overlayBitmap == null) {
            overlayBitmap = Bitmap.createBitmap(overlay.cols(), overlay.rows(), Bitmap.Config.ARGB_8888);
        }//from  w  ww.j  a v a  2s. com
        Utils.matToBitmap(overlay, overlayBitmap);
        return overlayBitmap;
    } else if (overlayBitmap != null) {
        overlayBitmap = null;
    }

    return null;
}

From source file:ve.ucv.ciens.ccg.nxtar.MainActivity.java

License:Apache License

@Override
public MarkerData findMarkersInFrame(byte[] frame) {
    if (ocvOn) {//from  ww w  .  java2  s. c  o m
        if (cameraCalibrated) {
            int[] codes = new int[ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS];
            float[] translations = new float[ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS * 3];
            float[] rotations = new float[ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS * 9];
            MarkerData data;
            Bitmap tFrame, mFrame;
            Mat inImg = new Mat();
            Mat outImg = new Mat();

            // Fill the codes array with -1 to indicate markers that were not found;
            for (int i : codes)
                codes[i] = -1;

            // Decode the input image and convert it to an OpenCV matrix.
            tFrame = BitmapFactory.decodeByteArray(frame, 0, frame.length);
            Utils.bitmapToMat(tFrame, inImg);

            // Find the markers in the input image.
            getMarkerCodesAndLocations(inImg.getNativeObjAddr(), outImg.getNativeObjAddr(), codes,
                    cameraMatrix.getNativeObjAddr(), distortionCoeffs.getNativeObjAddr(), translations,
                    rotations);

            // Encode the output image as a JPEG image.
            mFrame = Bitmap.createBitmap(outImg.cols(), outImg.rows(), Bitmap.Config.RGB_565);
            Utils.matToBitmap(outImg, mFrame);
            mFrame.compress(CompressFormat.JPEG, 100, outputStream);

            // Create and fill the output data structure.
            data = new MarkerData();
            data.outFrame = outputStream.toByteArray();
            data.markerCodes = codes;
            data.rotationMatrices = new Matrix3[ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS];
            data.translationVectors = new Vector3[ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS];

            for (int i = 0, p = 0; i < ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS; i++, p += 3) {
                data.translationVectors[i] = new Vector3(translations[p], translations[p + 1],
                        translations[p + 2]);
            }

            for (int k = 0; k < ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS; k++) {
                data.rotationMatrices[k] = new Matrix3();
                for (int row = 0; row < 3; row++) {
                    for (int col = 0; col < 3; col++) {
                        data.rotationMatrices[k].val[col + (row * 3)] = rotations[col + (row * 3) + (9 * k)];
                    }
                }
            }

            // Clean up memory.
            tFrame.recycle();
            mFrame.recycle();
            outputStream.reset();

            return data;
        } else {
            Gdx.app.debug(TAG, CLASS_NAME + ".findMarkersInFrame(): The camera has not been calibrated.");
            return null;
        }
    } else {
        Gdx.app.debug(TAG, CLASS_NAME + ".findMarkersInFrame(): OpenCV is not ready or failed to load.");
        return null;
    }
}