Example usage for org.opencv.android Utils bitmapToMat

List of usage examples for org.opencv.android Utils bitmapToMat

Introduction

In this page you can find the example usage for org.opencv.android Utils bitmapToMat.

Prototype

public static void bitmapToMat(Bitmap bmp, Mat mat) 

Source Link

Document

Short form of the bitmapToMat(bmp, mat, unPremultiplyAlpha=false).

Usage

From source file:org.akvo.caddisfly.helper.ImageHelper.java

License:Open Source License

/**
 * Gets the center of the backdrop in the test chamber
 *
 * @param bitmap the photo to analyse// www .  j a  va2 s  .co m
 * @return the center point of the found circle
 */
public static Point getCenter(@NonNull Bitmap bitmap) {

    // convert bitmap to mat
    Mat mat = new Mat(bitmap.getWidth(), bitmap.getHeight(), CvType.CV_8UC1);
    Mat grayMat = new Mat(bitmap.getWidth(), bitmap.getHeight(), CvType.CV_8UC1);

    Utils.bitmapToMat(bitmap, mat);

    // convert to grayScale
    int colorChannels = (mat.channels() == 3) ? Imgproc.COLOR_BGR2GRAY
            : ((mat.channels() == 4) ? Imgproc.COLOR_BGRA2GRAY : 1);

    Imgproc.cvtColor(mat, grayMat, colorChannels);

    // reduce the noise so we avoid false circle detection
    //Imgproc.GaussianBlur(grayMat, grayMat, new Size(9, 9), 2, 2);

    // param1 = gradient value used to handle edge detection
    // param2 = Accumulator threshold value for the
    // cv2.CV_HOUGH_GRADIENT method.
    // The smaller the threshold is, the more circles will be
    // detected (including false circles).
    // The larger the threshold is, the more circles will
    // potentially be returned.
    double param1 = 10, param2 = 100;

    // create a Mat object to store the circles detected
    Mat circles = new Mat(bitmap.getWidth(), bitmap.getHeight(), CvType.CV_8UC1);

    // find the circle in the image
    Imgproc.HoughCircles(grayMat, circles, Imgproc.CV_HOUGH_GRADIENT, RESOLUTION_INVERSE_RATIO,
            (double) MIN_CIRCLE_CENTER_DISTANCE, param1, param2, MIN_RADIUS, MAX_RADIUS);

    int numberOfCircles = (circles.rows() == 0) ? 0 : circles.cols();

    // draw the circles found on the image
    if (numberOfCircles > 0) {

        double[] circleCoordinates = circles.get(0, 0);

        int x = (int) circleCoordinates[0], y = (int) circleCoordinates[1];

        org.opencv.core.Point center = new org.opencv.core.Point(x, y);
        int foundRadius = (int) circleCoordinates[2];

        // circle outline
        Imgproc.circle(mat, center, foundRadius, COLOR_GREEN, 4);

        Utils.matToBitmap(mat, bitmap);

        return new Point((int) center.x, (int) center.y);
    }

    return null;
}

From source file:org.ar.rubik.MonoChromatic.java

License:Open Source License

/**
  * Computer Mono Chromatic Filter using GPU and OpenCL
  * @param image//from  www  .jav  a2s  .  c o  m
  * @return
  */
private static Mat monochromaticMedianImageFilterOpenCL(Mat image) {

    Log.i(Constants.TAG, "Mono Arg Image: " + image); // 720*1280 CV_8UC3

    //       Imgproc.resize(image, image, new Size(image.size().height/2, image.size().width/2));

    Size size = image.size();

    // Create OpenCL Output Bit Map
    Bitmap outputBitmap = Bitmap.createBitmap((int) size.width, (int) size.height, Bitmap.Config.ARGB_8888);

    // Create OpenCL Input Bit Map
    Bitmap inputBitmap = Bitmap.createBitmap((int) size.width, (int) size.height, Bitmap.Config.ARGB_8888);

    // Convert to Hue, Luminance, and Saturation
    //        long startHLS = System.currentTimeMillis();
    Mat hls_image = new Mat();
    Imgproc.cvtColor(image, hls_image, Imgproc.COLOR_BGR2YUV);
    //        Log.i(Constants.TAG, "Mono HLS Image: " + hls_image);  // 720*1280 CV_8UC3
    //        long endHLS = System.currentTimeMillis();
    //        Log.i(Constants.TAG, "HLS Conversion Time: " + (endHLS - startHLS) + "mS"); 

    // Convert image Mat to Bit Map.
    Utils.matToBitmap(hls_image, inputBitmap);

    // Call C++ code.
    nativeStepOpenCL((int) 7, (int) 5, 0, 0, true, inputBitmap, outputBitmap);

    Mat result = new Mat();
    Utils.bitmapToMat(outputBitmap, result);

    //        long startChannel = System.currentTimeMillis();
    List<Mat> channels = new LinkedList<Mat>();
    Core.split(result, channels);
    Mat channel0 = channels.get(0);
    //        long endChannel = System.currentTimeMillis();
    //        Log.i(Constants.TAG, "Channel Conversion Time: " + (endChannel - startChannel) + "mS"); 
    return channel0;

    //       return result;
}

From source file:org.designosaurs.opmode.DesignosaursAuto.java

private Mat getRegionAboveBeacon() {
    // Timing is wrong _
    if (vuforia.rgb == null || start == null || end == null)
        return null;

    Mat output = new Mat();

    Bitmap bm = Bitmap.createBitmap(vuforia.rgb.getWidth(), vuforia.rgb.getHeight(), Bitmap.Config.RGB_565);
    bm.copyPixelsFromBuffer(vuforia.rgb.getPixels());

    if (end.x - start.x <= 0 || end.y - start.y <= 0) {
        Log.i(TAG, "Failing beacon recognition call because of improper viewport!");
        Log.i(TAG, "start: " + start.toString());
        Log.i(TAG, "end: " + end.toString());
        Log.i(TAG, "dX: " + (end.x - start.x));
        Log.i(TAG, "dY: " + (end.y - start.y));

        return null;
    }/*from w w w .  j  a  v a2s .c  o  m*/

    try {
        // Pass the region above the image to OpenCV:
        Bitmap croppedImage = Bitmap.createBitmap(bm, start.x, start.y, end.x - start.x, end.y - start.y);
        croppedImage = DesignosaursUtils.rotate(croppedImage, 90);

        Utils.bitmapToMat(croppedImage, output);
    } catch (Exception e) {
        e.printStackTrace();

        return null;
    }

    return output;
}

From source file:org.firstinspires.ftc.team4042.autos.VuMarkIdentifier.java

License:Open Source License

public Mat getFrame() {
    /*relicTrackables = this.vuforia.loadTrackablesFromAsset("RelicVuMark");
    relicTemplate = relicTrackables.get(0);
            /*ww w.  j a  v  a2s .c  o m*/
    relicTemplate.setName("relicVuMarkTemplate"); // can help in debugging; otherwise not necessary
            
    relicTrackables.activate();*/ //ElapsedTime timeout = new ElapsedTime();
    Bitmap bm = null;
    while (bm == null) {
        if (this.vuforia.rgb != null) {
            bm = Bitmap.createBitmap(this.vuforia.rgb.getWidth(), this.vuforia.rgb.getHeight(),
                    Bitmap.Config.RGB_565);
            bm.copyPixelsFromBuffer(vuforia.rgb.getPixels());
        }
    }

    //telemetry.log().add(bm.getWidth() + " x " + bm.getHeight());
    Mat tmp = new Mat(bm.getWidth(), bm.getHeight(), CvType.CV_8UC4);
    Utils.bitmapToMat(bm, tmp);

    /*
    FileOutputStream out = null;
    try {
    out = new FileOutputStream(new File("./storage/emulated/0/DCIM/nicepic.png"));
    bm.compress(Bitmap.CompressFormat.PNG, 100, out); // bmp is your Bitmap instance
    // PNG is a lossless format, the compression factor (100) is ignored
    } catch (Exception e) {
    e.printStackTrace();
    } finally {
    try {
        if (out != null) {
            out.close();
        }
    } catch (IOException e) {
        e.printStackTrace();
    }
    }
    */

    return tmp;
}

From source file:org.firstinspires.ftc.teamcode.AutonomousVuforia.java

public int getBeaconConfig(Image img, VuforiaTrackable beacon, CameraCalibration camCal) {

    OpenGLMatrix pose = ((VuforiaTrackableDefaultListener) beacon.getListener()).getRawPose();
    telemetry.addData("Stuff", pose != null);
    telemetry.addData("Stuff", img != null);
    try {/* w ww .j a  v a 2s.  c om*/
        telemetry.addData("Stuff", img.getPixels() != null);
    } catch (Exception e) {
        telemetry.addData("Stuff", e);
    }
    telemetry.update();

    if (pose != null && img != null && img.getPixels() != null) {
        Matrix34F rawPose = new Matrix34F();
        float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12);
        rawPose.setData(poseData);

        float[][] corners = new float[4][2];

        corners[0] = Tool.projectPoint(camCal, rawPose, new Vec3F(-127, 276, 0)).getData();
        corners[1] = Tool.projectPoint(camCal, rawPose, new Vec3F(127, 276, 0)).getData();
        corners[2] = Tool.projectPoint(camCal, rawPose, new Vec3F(127, 92, 0)).getData();
        corners[3] = Tool.projectPoint(camCal, rawPose, new Vec3F(-127, 92, 0)).getData();

        Bitmap bm = Bitmap.createBitmap(img.getWidth(), img.getHeight(), Bitmap.Config.RGB_565);
        bm.copyPixelsFromBuffer(img.getPixels());

        Mat crop = new Mat(bm.getHeight(), bm.getWidth(), CvType.CV_8UC3);
        Utils.bitmapToMat(bm, crop);

        float x = Math.min(Math.min(corners[1][0], corners[3][0]), Math.min(corners[0][0], corners[2][0]));
        float y = Math.min(Math.min(corners[1][1], corners[3][1]), Math.min(corners[0][1], corners[2][1]));
        float width = Math.max(Math.abs(corners[0][0] - corners[2][0]),
                Math.abs(corners[1][0] - corners[3][0]));
        float height = Math.max(Math.abs(corners[0][1] - corners[2][1]),
                Math.abs(corners[1][1] - corners[3][1]));

        x = Math.max(x, 0);
        y = Math.max(y, 0);
        width = (x + width > crop.cols()) ? crop.cols() - x : width;
        height = (y + height > crop.rows()) ? crop.rows() - y : height;

        Mat cropped = new Mat(crop, new Rect((int) x, (int) y, (int) width, (int) height));

        Imgproc.cvtColor(cropped, cropped, Imgproc.COLOR_RGB2HSV_FULL);

        Mat mask = new Mat();
        Core.inRange(cropped, blueLow, blueHigh, mask);
        Moments mmnts = Imgproc.moments(mask, true);

        if (mmnts.get_m00() > mask.total() * 0.8) {
            return BEACON_ALL_BLUE;
        } else if (mmnts.get_m00() < mask.total() * 0.8) {
            return BEACON_NO_BLUE;
        }

        if ((mmnts.get_m01() / mmnts.get_m00()) < cropped.rows() / 2) {

            return BEACON_RED_BLUE;
        } else {

            return BEACON_BLUERED;
        } // else

    }

    return BEACON_NOT_VISIBLE;
}

From source file:org.firstinspires.ftc.teamcode.VuforiaColor.java

public void runOpMode() throws InterruptedException {
    //        frontRightMotor = hardwareMap.dcMotor.get("frontRightMotor");
    //        backRightMotor = hardwareMap.dcMotor.get("backRightMotor");
    //        frontLeftMotor = hardwareMap.dcMotor.get("frontLeftMotor");
    //        backLeftMotor = hardwareMap.dcMotor.get("backLeftMotor");
    //        rollerMotor = hardwareMap.dcMotor.get("rollerMotor");
    ///* w w  w  .  j av  a  2  s .c  o  m*/
    //        backRightMotor.setDirection(DcMotor.Direction.REVERSE);
    //        backLeftMotor.setDirection(DcMotor.Direction.REVERSE);

    colorDetector = new ColorBlobDetector();

    VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(R.id.cameraMonitorViewId);
    parameters.vuforiaLicenseKey = "ATjJBiD/////AAAAGc0JoedLjk5flVb2gExO3UVJCpOq5U4cyH9czcMyX5C8h+1AWXo7A0CU24r/IVeoC+7Te9zwJkX6IjHv5c77UNqrsyerM7pbjywj6/2NlzSUwb3jtEd9APhY5cOoSibb5NDRFM9beUWt0k4HuFMaw5OIZRs5YWge7KaJt5SzhqEFMQ6Loo8eugB9BBbPfuV3d7u4sQZBAKeRsR9mmnfvFJTUHHgcPlALU/rJBgw40AeFFvChjzNhwtlWYymeM/0173jH7JB2dyhoNtn/9byIUQzMw8KtaXbD3IfFJySLgJWmYjaA7cKdboL0nvkOoZNFMm2yqenbUDe/CEIMkhAsKjS3sgX4t6Fq+8gkhSnOS/Vd";
    parameters.cameraDirection = VuforiaLocalizer.CameraDirection.FRONT;
    this.vuforia = ClassFactory.createVuforiaLocalizer(parameters);
    Vuforia.setFrameFormat(PIXEL_FORMAT.RGB565, true); //enables RGB565 format for the image
    vuforia.setFrameQueueCapacity(1); //tells VuforiaLocalizer to only store one frame at a time
    piController = new PIController(.0016, 0.00013, 0.00023, 0.000012);

    Vuforia.setHint(HINT.HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 1);
    VuforiaTrackables visionTargets = vuforia.loadTrackablesFromAsset("FTC_2016-17");
    VuforiaTrackable wheelsTarget = visionTargets.get(0);
    wheelsTarget.setName("Wheels"); // Wheels

    VuforiaTrackable toolsTarget = visionTargets.get(1);
    toolsTarget.setName("Tools"); // Tools

    VuforiaTrackable legosTarget = visionTargets.get(2);
    legosTarget.setName("Legos"); // Legos

    VuforiaTrackable gearsTarget = visionTargets.get(3);
    gearsTarget.setName("Gears"); // Gears

    /** For convenience, gather together all the trackable objects in one easily-iterable collection */
    List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
    allTrackables.addAll(visionTargets);

    /**
     * We use units of mm here because that's the recommended units of measurement for the
     * size values specified in the XML for the ImageTarget trackables in data sets. E.g.:
     *      <ImageTarget name="stones" size="247 173"/>
     * You don't *have to* use mm here, but the units here and the units used in the XML
     * target configuration files *must* correspond for the math to work out correctly.
     */
    float mmPerInch = 25.4f;
    float mmBotLength = 16 * mmPerInch;
    float mmBotWidth = 18 * mmPerInch; // ... or whatever is right for your robot
    float mmFTCFieldWidth = (12 * 12 - 2) * mmPerInch; // the FTC field is ~11'10" center-to-center of the glass panels
    float mmVisionTargetZOffset = 5.75f * mmPerInch;
    float mmPhoneZOffset = 5.5f * mmPerInch;

    OpenGLMatrix gearsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-mmFTCFieldWidth / 2, -mmFTCFieldWidth / 12, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    gearsTarget.setLocation(gearsTargetLocationOnField);
    RobotLog.ii(TAG, "Gears Target=%s", format(gearsTargetLocationOnField));

    OpenGLMatrix toolsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-mmFTCFieldWidth / 2, mmFTCFieldWidth / 4, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    toolsTarget.setLocation(toolsTargetLocationOnField);
    RobotLog.ii(TAG, "Tools Target=%s", format(toolsTargetLocationOnField));

    /*
     * To place the Wheels and Legos Targets on the Blue Audience wall:
     * - First we rotate it 90 around the field's X axis to flip it upright
     * - Finally, we translate it along the Y axis towards the blue audience wall.
     */
    OpenGLMatrix wheelsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(mmFTCFieldWidth / 12, mmFTCFieldWidth / 2, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    wheelsTarget.setLocation(wheelsTargetLocationOnField);
    RobotLog.ii(TAG, "Wheels Target=%s", format(wheelsTargetLocationOnField));

    OpenGLMatrix legosTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(-mmFTCFieldWidth / 4, mmFTCFieldWidth / 2, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    legosTarget.setLocation(legosTargetLocationOnField);
    RobotLog.ii(TAG, "Legos Target=%s", format(legosTargetLocationOnField));

    /**
     * Create a transformation matrix describing where the phone is on the robot. Here, we
     * put the phone on the right hand side of the robot with the screen facing in (see our
     * choice of BACK camera above) and in landscape mode. Starting from alignment between the
     * robot's and phone's axes, this is a rotation of -90deg along the Y axis.
     *
     * When determining whether a rotation is positive or negative, consider yourself as looking
     * down the (positive) axis of rotation from the positive towards the origin. Positive rotations
     * are then CCW, and negative rotations CW. An example: consider looking down the positive Z
     * axis towards the origin. A positive rotation about Z (ie: a rotation parallel to the the X-Y
     * plane) is then CCW, as one would normally expect from the usual classic 2D geometry.
     */
    OpenGLMatrix phoneLocationOnRobot = OpenGLMatrix.translation(mmBotWidth / 2, 0, mmPhoneZOffset)
            .multiplied(Orientation.getRotationMatrix(AxesReference.EXTRINSIC, AxesOrder.YZY, AngleUnit.DEGREES,
                    0, 180, 0));
    RobotLog.ii(TAG, "phone=%s", format(phoneLocationOnRobot));

    ((VuforiaTrackableDefaultListener) visionTargets.get(0).getListener())
            .setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) visionTargets.get(1).getListener())
            .setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) visionTargets.get(2).getListener())
            .setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) visionTargets.get(3).getListener())
            .setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection);

    /** Wait for the game to begin */
    telemetry.addData(">", "Press Play to start tracking");
    telemetry.addData("OpenCV", Core.NATIVE_LIBRARY_NAME);
    telemetry.update();
    waitForStart();

    /** Start tracking the data sets we care about. */
    visionTargets.activate();

    hitRed = true;
    isButtonHit = false;
    directionFoundInARow = 0;
    directionToHit = "";

    telemetry.addData("Loop", "Out");
    telemetry.update();
    while (opModeIsActive()) {
        String visibleTarget = "";
        Mat img = null;
        Mat croppedImg = null;
        Point beaconImageCenter = null;

        VuforiaLocalizer.CloseableFrame frame = vuforia.getFrameQueue().take();
        telemetry.update();
        if (frame != null) {
            Image rgb = null;
            long numImages = frame.getNumImages();

            for (int i = 0; i < numImages; i++) {
                if (frame.getImage(i).getFormat() == PIXEL_FORMAT.RGB565) {
                    rgb = frame.getImage(i);
                    break;
                } //if
            } //for

            if (rgb != null) {
                Bitmap bmp = Bitmap.createBitmap(rgb.getWidth(), rgb.getHeight(), Bitmap.Config.RGB_565);
                bmp.copyPixelsFromBuffer(rgb.getPixels());

                img = new Mat();
                Utils.bitmapToMat(bmp, img);
                telemetry.addData("Img", "Converted");
                telemetry.update();
            }
        }

        for (VuforiaTrackable beacon : allTrackables) {
            // Add beacon to telemetry if visible
            if (((VuforiaTrackableDefaultListener) beacon.getListener()).isVisible()) {
                visibleTarget = beacon.getName();
                telemetry.addData(visibleTarget, "Visible");
            }

            OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) beacon.getListener())
                    .getUpdatedRobotLocation();
            if (robotLocationTransform != null) {
                lastLocation = robotLocationTransform;
            }

            OpenGLMatrix pose = ((VuforiaTrackableDefaultListener) beacon.getListener()).getRawPose();

            if (pose != null) {
                Matrix34F rawPose = new Matrix34F();
                float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12);
                rawPose.setData(poseData);

                // Corners of beacon image in camera image
                Vec2F upperLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(-127, 92, 0));
                Vec2F upperRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(127, 92, 0));
                Vec2F lowerLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(-127, -92, 0));
                Vec2F lowerRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(127, -92, 0));

                VectorF translation = pose.getTranslation();
                /** First argument is get(1) if phone is vertical
                 First argument is get(0) if phone is horizontal */

                if (img != null && !isButtonHit) {
                    telemetry.addData(beacon.getName() + "-Translation", translation);

                    // Vectors are stored (y,x).  Coordinate system starts in top right
                    int height = (int) (upperLeft.getData()[0] - lowerLeft.getData()[0]);
                    int width = (int) (upperRight.getData()[1] - upperLeft.getData()[1]);

                    int rowStart = (int) upperRight.getData()[0] - height < 0 ? 1
                            : (int) upperRight.getData()[0] - height;
                    int rowEnd = rowStart + height > img.rows() ? img.rows() - 1 : rowStart + height;
                    int colStart = (int) upperRight.getData()[1] < 0 ? 1 : (int) upperRight.getData()[1];
                    int colEnd = colStart + width > img.cols() ? img.cols() - 1 : colStart + width;

                    telemetry.addData("Target Location", "");
                    telemetry.addData("[" + upperLeft.getData()[0] + "," + upperLeft.getData()[1] + "]",
                            "[" + upperRight.getData()[0] + "," + upperRight.getData()[1] + "]");
                    telemetry.addData("[" + lowerLeft.getData()[0] + "," + lowerLeft.getData()[1] + "]",
                            "[" + lowerRight.getData()[0] + "," + lowerRight.getData()[1] + "]");

                    telemetry.addData(colStart + "", rowStart);
                    telemetry.addData(colEnd + "", rowEnd);
                    telemetry.addData(img.rows() + "", img.cols());
                    telemetry.update();

                    // Crop the image to look only at the beacon
                    // TODO Verify beacon is in cropped image
                    // NEED TO CHECK BEACON HEIGHT FOR INCLUSION IN CROPPED IMAGE
                    croppedImg = img.submat(rowStart, rowEnd, colStart, colEnd);
                }
            }
        }

        // Process the rgb image
        if (croppedImg != null && !isButtonHit) {
            // Find the color of the beacon you need to hit
            if (hitRed) {
                colorDetector.setHsvColor(new Scalar(230, 75, 255)); // Red detector, needs verification with beacon
            } else {
                colorDetector.setHsvColor(new Scalar(130, 150, 255)); // Blue detector, needs verification with beacon
            }
            colorDetector.process(croppedImg);

            // Calculate the center of the blob detected
            Point beaconToHitCenter = null;
            List<Moments> blueMu = new ArrayList<>(colorDetector.getContours().size());
            for (int i = 0; i < colorDetector.getContours().size(); i++) {
                blueMu.add(Imgproc.moments(colorDetector.getContours().get(i), false));
                Moments p = blueMu.get(i);
                int x = (int) (p.get_m10() / p.get_m00());
                int y = (int) (p.get_m01() / p.get_m00());
                beaconToHitCenter = new Point(x, y);
            }

            // Find the color of the beacon you are not hitting
            if (hitRed) {
                colorDetector.setHsvColor(new Scalar(130, 150, 255)); // Blue detector, needs verification with beacon
            } else {
                colorDetector.setHsvColor(new Scalar(230, 75, 255)); // Red detector, needs verification with beacon
            }
            colorDetector.process(croppedImg);

            // Calculate the center of the blob detected
            Point secondReferenceCenter = null;
            List<Moments> redMu = new ArrayList<>(colorDetector.getContours().size());
            for (int i = 0; i < colorDetector.getContours().size(); i++) {
                redMu.add(Imgproc.moments(colorDetector.getContours().get(i), false));
                Moments p = redMu.get(i);
                int x = (int) (p.get_m10() / p.get_m00());
                int y = (int) (p.get_m01() / p.get_m00());
                secondReferenceCenter = new Point(x, y);
            }

            // Use the two centers of the blobs to determine which direction to hit
            if (beaconToHitCenter != null && secondReferenceCenter != null && !isButtonHit) {
                // (!isButtonHit) Only hit the button once
                // (!needToTurn) Do not hit the button if the robot is not straight centered
                //                    hitBeaconButton(isLeft(center, beaconImageCenter));
                if (isLeft(beaconToHitCenter, secondReferenceCenter)) {
                    if (!directionToHit.equals("Left")) {
                        directionFoundInARow = 0;
                    }
                    directionFoundInARow++;
                    directionToHit = "Left";
                } else {
                    if (!directionToHit.equals("Right")) {
                        directionFoundInARow = 0;
                    }
                    directionFoundInARow++;
                    directionToHit = "Right";
                }
            }

            // Find the color five times in a row before hitting it
            if (directionFoundInARow >= 3) {
                isButtonHit = true;
            }
        }

        if (isButtonHit) {
            telemetry.addData("Hit Button-", directionToHit);
        }

        //            if(needToTurn) {
        //                turn(degreesToTurn);
        //                telemetry.addData("Turn-", degreesToTurn);
        //            }

        /**
         * Provide feedback as to where the robot was last located (if we know).
         */
        if (lastLocation != null) {
            //  RobotLog.vv(TAG, "robot=%s", format(lastLocation));
            telemetry.addData("Pos", myFormat(lastLocation));

            if (!visibleTarget.equals("")) {
                telemetry.addData("Move", piController.processLocation(lastLocation, visibleTarget));
            }
        } else {
            telemetry.addData("Pos", "Unknown");
        }

        telemetry.update();
        idle();
    }
}

From source file:org.firstinspires.ftc.teamcode.VuforiaMovement.java

public void runOpMode() throws InterruptedException {
    frontRightMotor = hardwareMap.dcMotor.get("frontRightMotor");
    backRightMotor = hardwareMap.dcMotor.get("backRightMotor");
    frontLeftMotor = hardwareMap.dcMotor.get("frontLeftMotor");
    backLeftMotor = hardwareMap.dcMotor.get("backLeftMotor");
    rollerMotor = hardwareMap.dcMotor.get("rollerMotor");

    backRightMotor.setDirection(DcMotor.Direction.REVERSE);
    backLeftMotor.setDirection(DcMotor.Direction.REVERSE);

    colorDetector = new ColorBlobDetector();

    VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(R.id.cameraMonitorViewId);
    parameters.vuforiaLicenseKey = "ATjJBiD/////AAAAGc0JoedLjk5flVb2gExO3UVJCpOq5U4cyH9czcMyX5C8h+1AWXo7A0CU24r/IVeoC+7Te9zwJkX6IjHv5c77UNqrsyerM7pbjywj6/2NlzSUwb3jtEd9APhY5cOoSibb5NDRFM9beUWt0k4HuFMaw5OIZRs5YWge7KaJt5SzhqEFMQ6Loo8eugB9BBbPfuV3d7u4sQZBAKeRsR9mmnfvFJTUHHgcPlALU/rJBgw40AeFFvChjzNhwtlWYymeM/0173jH7JB2dyhoNtn/9byIUQzMw8KtaXbD3IfFJySLgJWmYjaA7cKdboL0nvkOoZNFMm2yqenbUDe/CEIMkhAsKjS3sgX4t6Fq+8gkhSnOS/Vd";
    parameters.cameraDirection = VuforiaLocalizer.CameraDirection.BACK;
    this.vuforia = new VuforiaLocalizerImplSubclass(parameters);

    Vuforia.setHint(HINT.HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 4);
    VuforiaTrackables beacons = vuforia.loadTrackablesFromAsset("FTC_2016-17");
    beacons.get(0).setName("Wheels");
    beacons.get(1).setName("Tools");
    beacons.get(2).setName("Legos");
    beacons.get(3).setName("Gears");

    float mmPerInch = 25.4f;
    float mmBotWidth = 18 * mmPerInch; // ... or whatever is right for your robot
    float mmFTCFieldWidth = (12 * 12 - 2) * mmPerInch; // the FTC field is ~11'10" center-to-center of the glass panels
    float mmVisionTargetZOffset = 5.75f * mmPerInch;

    // Initialize the location of the targets and phone on the field
    OpenGLMatrix wheelsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(mmFTCFieldWidth / 12, mmFTCFieldWidth / 2, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    beacons.get(0).setLocation(wheelsTargetLocationOnField);
    RobotLog.ii(TAG, "Wheels Target=%s", format(wheelsTargetLocationOnField));

    OpenGLMatrix toolsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-mmFTCFieldWidth / 2, mmFTCFieldWidth / 4, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    beacons.get(1).setLocation(toolsTargetLocationOnField);
    RobotLog.ii(TAG, "Tools Target=%s", format(toolsTargetLocationOnField));

    OpenGLMatrix legosTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(-mmFTCFieldWidth / 4, mmFTCFieldWidth / 2, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    beacons.get(2).setLocation(legosTargetLocationOnField);
    RobotLog.ii(TAG, "Legos Target=%s", format(legosTargetLocationOnField));

    OpenGLMatrix gearsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-mmFTCFieldWidth / 2, -mmFTCFieldWidth / 12, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    beacons.get(3).setLocation(gearsTargetLocationOnField);
    RobotLog.ii(TAG, "Gears Target=%s", format(gearsTargetLocationOnField));

    OpenGLMatrix phoneLocationOnRobot = OpenGLMatrix.translation(mmBotWidth / 2, 0, 0).multiplied(Orientation
            .getRotationMatrix(AxesReference.EXTRINSIC, AxesOrder.YZY, AngleUnit.DEGREES, -90, 0, 0));
    RobotLog.ii(TAG, "Phone=%s", format(phoneLocationOnRobot));

    ((VuforiaTrackableDefaultListener) beacons.get(0).getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) beacons.get(1).getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) beacons.get(2).getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) beacons.get(3).getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);

    /** Wait for the game to begin */
    telemetry.addData(">", "Press Play to start tracking");
    telemetry.addData("OpenCV", Core.NATIVE_LIBRARY_NAME);
    telemetry.update();/*from   www  .ja  v a  2s  .com*/
    waitForStart();

    /** Start tracking the data sets we care about. */
    beacons.activate();

    hitRed = true;
    isButtonHit = false;
    directionFoundInARow = 0;
    directionToHit = "";

    movingToFirstBeacon = false;
    liningUpWithFirstBeacon = false;
    movingToSecondBeacon = false;
    liningUpWithSecondBeacon = false;
    while (opModeIsActive()) {
        String visibleTarget = "";
        Mat img = null;
        Mat croppedImg = null;
        Point beaconImageCenter = null;

        if (movingToFirstBeacon) {
            // TODO Estimate distance to the beacon from a point TBD
            // TODO Estimate distance to move forward and turn to face the beacon until second movement set
            // Move this outside the loop?

            // Move forward until you see the beacon
            while (movingToFirstBeacon) {
                // Move in increments to minimize the times you check the trackables
                for (int i = 0; i < 50; i++) {
                    frontRightMotor.setPower(1);
                    backRightMotor.setPower(1);
                    frontLeftMotor.setPower(1);
                    backLeftMotor.setPower(1);
                }

                for (VuforiaTrackable beacon : beacons) {
                    // Add beacon to telemetry if visible
                    if (((VuforiaTrackableDefaultListener) beacon.getListener()).isVisible()) {
                        visibleTarget = beacon.getName();
                        telemetry.addData(visibleTarget, "Visible");
                    }

                    OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) beacon
                            .getListener()).getUpdatedRobotLocation();
                    if (robotLocationTransform != null) {
                        lastLocation = robotLocationTransform;
                    }
                }

                // Move to the beacon until the beacon is in sight
                if (lastLocation != null) {
                    movingToFirstBeacon = false; // Only execute this once
                }
            }
        }

        while (liningUpWithFirstBeacon) {
            for (VuforiaTrackable beacon : beacons) {
                // Add beacon to telemetry if visible
                if (((VuforiaTrackableDefaultListener) beacon.getListener()).isVisible()) {
                    visibleTarget = beacon.getName();
                    telemetry.addData(visibleTarget, "Visible");
                }

                OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) beacon.getListener())
                        .getUpdatedRobotLocation();
                if (robotLocationTransform != null) {
                    lastLocation = robotLocationTransform;
                }
            }

            RobotMovement movement = processLocation(lastLocation, visibleTarget);
            if (movement.isNoMovement()) {
                liningUpWithFirstBeacon = false;
            }

            processMovement(movement);
        }

        if (movingToSecondBeacon) {
            // TODO Estimate the movements/distance from the first beacon to the second
            movingToSecondBeacon = false; // Only execute this once
        }

        if (vuforia.rgb != null && !isButtonHit) {
            Bitmap bmp = Bitmap.createBitmap(vuforia.rgb.getWidth(), vuforia.rgb.getHeight(),
                    Bitmap.Config.RGB_565);
            bmp.copyPixelsFromBuffer(vuforia.rgb.getPixels());

            img = new Mat();
            Utils.bitmapToMat(bmp, img);
        }

        for (VuforiaTrackable beacon : beacons) {
            // Add beacon to telemetry if visible
            if (((VuforiaTrackableDefaultListener) beacon.getListener()).isVisible()) {
                visibleTarget = beacon.getName();
                telemetry.addData(visibleTarget, "Visible");
            }

            OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) beacon.getListener())
                    .getUpdatedRobotLocation();
            if (robotLocationTransform != null) {
                lastLocation = robotLocationTransform;
            }

            OpenGLMatrix pose = ((VuforiaTrackableDefaultListener) beacon.getListener()).getRawPose();

            if (pose != null) {
                Matrix34F rawPose = new Matrix34F();
                float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12);
                rawPose.setData(poseData);

                // Corners of beacon image in camera image
                Vec2F upperLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(-127, 92, 0));
                Vec2F upperRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(127, 92, 0));
                Vec2F lowerLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(-127, -92, 0));
                Vec2F lowerRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(127, -92, 0));

                VectorF translation = pose.getTranslation();
                /** First argument is get(1) if phone is vertical
                 First argument is get(0) if phone is horizontal */
                // DOES NOT WORK???
                degreesToTurn = Math.toDegrees(Math.atan2(translation.get(1), translation.get(2)));
                telemetry.addData("Degrees-", degreesToTurn);
                // TODO Check degreee turning threshold
                if (Math.abs(degreesToTurn) > 15) {
                    // Turn after doing calculating transformations
                    needToTurn = true;
                }

                if (img != null && !isButtonHit) {
                    telemetry.addData(beacon.getName() + "-Translation", translation);
                    telemetry.addData(beacon.getName() + "-Degrees", degreesToTurn);

                    // Vectors are stored (y,x).  Coordinate system starts in top right
                    int height = (int) (lowerLeft.getData()[0] - upperLeft.getData()[0]);
                    int width = (int) (upperLeft.getData()[1] - upperRight.getData()[1]);

                    int rowStart = (int) upperRight.getData()[0] - height < 0 ? 0
                            : (int) upperRight.getData()[0] - height;
                    int rowEnd = rowStart + height > img.rows() ? img.rows() - 1 : rowStart + height;
                    int colStart = (int) upperRight.getData()[1] < 0 ? 0 : (int) upperRight.getData()[1];
                    int colEnd = colStart + width > img.cols() ? img.cols() - 1 : colStart + width;

                    telemetry.addData("Target Location", "");
                    telemetry.addData("[" + upperLeft.getData()[0] + "," + upperLeft.getData()[1] + "]",
                            "[" + upperRight.getData()[0] + "," + upperRight.getData()[1] + "]");
                    telemetry.addData("[" + lowerLeft.getData()[0] + "," + lowerLeft.getData()[1] + "]",
                            "[" + lowerRight.getData()[0] + "," + lowerRight.getData()[1] + "]");

                    telemetry.addData(colStart + "", rowStart);
                    telemetry.addData(colEnd + "", rowEnd);

                    // Crop the image to look only at the beacon
                    // TODO Verify beacon is in cropped image
                    // NEED TO CHECK BEACON HEIGHT FOR INCLUSION IN CROPPED IMAGE
                    croppedImg = img.submat(rowStart, rowEnd, colStart, colEnd);
                }
            }
        }

        // Process the rgb image
        if (croppedImg != null && !isButtonHit) {
            // Find the color of the beacon you need to hit
            if (hitRed) {
                colorDetector.setHsvColor(new Scalar(180, 240, 240)); // Red detector, needs verification with beacon
            } else {
                colorDetector.setHsvColor(new Scalar(25, 255, 185)); // Blue detector, needs verification with beacon
            }
            colorDetector.process(croppedImg);

            // Calculate the center of the blob detected
            Point beaconToHitCenter = null;
            List<Moments> blueMu = new ArrayList<>(colorDetector.getContours().size());
            for (int i = 0; i < colorDetector.getContours().size(); i++) {
                blueMu.add(Imgproc.moments(colorDetector.getContours().get(i), false));
                Moments p = blueMu.get(i);
                int x = (int) (p.get_m10() / p.get_m00());
                int y = (int) (p.get_m01() / p.get_m00());
                beaconToHitCenter = new Point(x, y);
            }

            // Find the color of the beacon you are not hitting
            if (hitRed) {
                colorDetector.setHsvColor(new Scalar(25, 255, 185)); // Blue detector, needs verification with beacon
            } else {
                colorDetector.setHsvColor(new Scalar(180, 240, 240)); // Red detector, needs verification with beacon
            }
            colorDetector.process(croppedImg);

            // Calculate the center of the blob detected
            Point secondReferenceCenter = null;
            List<Moments> redMu = new ArrayList<>(colorDetector.getContours().size());
            for (int i = 0; i < colorDetector.getContours().size(); i++) {
                redMu.add(Imgproc.moments(colorDetector.getContours().get(i), false));
                Moments p = redMu.get(i);
                int x = (int) (p.get_m10() / p.get_m00());
                int y = (int) (p.get_m01() / p.get_m00());
                secondReferenceCenter = new Point(x, y);
            }

            // Use the two centers of the blobs to determine which direction to hit
            if (beaconToHitCenter != null && secondReferenceCenter != null && !isButtonHit && !needToTurn) {
                // (!isButtonHit) Only hit the button once
                // (!needToTurn) Do not hit the button if the robot is not straight centered
                //                    hitBeaconButton(isLeft(center, beaconImageCenter));
                if (isLeft(beaconToHitCenter, secondReferenceCenter)) {
                    if (!directionToHit.equals("Left")) {
                        directionFoundInARow = 0;
                    }
                    directionFoundInARow++;
                    directionToHit = "Left";
                } else {
                    if (!directionToHit.equals("Right")) {
                        directionFoundInARow = 0;
                    }
                    directionFoundInARow++;
                    directionToHit = "Right";
                }
            }

            // Find the color five times in a row before hitting it
            if (directionFoundInARow >= 3) {
                isButtonHit = true;
            }
        }

        if (isButtonHit) {
            telemetry.addData("Hit Button-", directionToHit);
        }

        //            if(needToTurn) {
        //                turn(degreesToTurn);
        //                telemetry.addData("Turn-", degreesToTurn);
        //            }

        /**
         * Provide feedback as to where the robot was last located (if we know).
         */
        if (lastLocation != null) {
            //  RobotLog.vv(TAG, "robot=%s", format(lastLocation));
            telemetry.addData("Pos", myFormat(lastLocation));

            if (!visibleTarget.equals("")) {
                telemetry.addData("Move", processLocation(lastLocation, visibleTarget));
            }
        } else {
            telemetry.addData("Pos", "Unknown");
        }

        telemetry.update();
        idle();
    }
}

From source file:samples.FtcTestOpenCv.java

License:Open Source License

/**
 * This method is called when the camera view is started. It will allocate and initialize
 * some global resources.//  ww w.j a  v  a 2s .  c o m
 *
 * @param width specifies the width of the camera view.
 * @param height specifies the height of the camera view.
 */
@Override
public void onCameraViewStarted(int width, int height) {
    faceRects = new MatOfRect();
    totalProcessingTime = 0;
    framesProcessed = 0;

    overlayImage = new Mat();
    Bitmap overlayBitmap = BitmapFactory.decodeResource(activity.getResources(), R.drawable.mustache);
    Utils.bitmapToMat(overlayBitmap, overlayImage);
    //
    // Don't allow overlay unless overlay image has the rgba channels.
    //
    if (overlayImage.channels() < 4)
        doOverlayImage = false;
}

From source file:uk.ac.horizon.artcodes.detect.ImageBuffers.java

License:Open Source License

/**
 * Set the image data using a Bitmap.//from  w w w. j  a  va  2  s.  co m
 * Make sure the Bitmap is the right size for this ImageBuffers or if creating an ImageBuffers
 * just for this call createBuffer(bitmap.getWidth(), bitmap.getHeight(), 8) then setROI(null).
 * @param bitmap A Bitmap with format ARGB_8888 or RGB_565 (requirement from OpenCV).
 */
public void setImage(Bitmap bitmap) {
    overlayReady = false;
    // Utils.bitmapToMat creates a RGBA CV_8UC4 image.
    Mat rgbaImage = new Mat(bitmap.getHeight(), bitmap.getWidth(), CvType.CV_8UC4);
    Utils.bitmapToMat(bitmap, rgbaImage);
    // The 'bgrBuffer' is supposed to be BGR CV_8UC3, so convert.
    Mat bgrImage = this.getBgrBuffer();
    Imgproc.cvtColor(rgbaImage, bgrImage, Imgproc.COLOR_RGBA2BGR);
    this.setImage(bgrImage);
}

From source file:ve.ucv.ciens.ccg.nxtar.MainActivity.java

License:Apache License

@Override
public MarkerData findMarkersInFrame(byte[] frame) {
    if (ocvOn) {//www. j  a  v  a2s .  c  o  m
        if (cameraCalibrated) {
            int[] codes = new int[ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS];
            float[] translations = new float[ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS * 3];
            float[] rotations = new float[ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS * 9];
            MarkerData data;
            Bitmap tFrame, mFrame;
            Mat inImg = new Mat();
            Mat outImg = new Mat();

            // Fill the codes array with -1 to indicate markers that were not found;
            for (int i : codes)
                codes[i] = -1;

            // Decode the input image and convert it to an OpenCV matrix.
            tFrame = BitmapFactory.decodeByteArray(frame, 0, frame.length);
            Utils.bitmapToMat(tFrame, inImg);

            // Find the markers in the input image.
            getMarkerCodesAndLocations(inImg.getNativeObjAddr(), outImg.getNativeObjAddr(), codes,
                    cameraMatrix.getNativeObjAddr(), distortionCoeffs.getNativeObjAddr(), translations,
                    rotations);

            // Encode the output image as a JPEG image.
            mFrame = Bitmap.createBitmap(outImg.cols(), outImg.rows(), Bitmap.Config.RGB_565);
            Utils.matToBitmap(outImg, mFrame);
            mFrame.compress(CompressFormat.JPEG, 100, outputStream);

            // Create and fill the output data structure.
            data = new MarkerData();
            data.outFrame = outputStream.toByteArray();
            data.markerCodes = codes;
            data.rotationMatrices = new Matrix3[ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS];
            data.translationVectors = new Vector3[ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS];

            for (int i = 0, p = 0; i < ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS; i++, p += 3) {
                data.translationVectors[i] = new Vector3(translations[p], translations[p + 1],
                        translations[p + 2]);
            }

            for (int k = 0; k < ProjectConstants.MAXIMUM_NUMBER_OF_MARKERS; k++) {
                data.rotationMatrices[k] = new Matrix3();
                for (int row = 0; row < 3; row++) {
                    for (int col = 0; col < 3; col++) {
                        data.rotationMatrices[k].val[col + (row * 3)] = rotations[col + (row * 3) + (9 * k)];
                    }
                }
            }

            // Clean up memory.
            tFrame.recycle();
            mFrame.recycle();
            outputStream.reset();

            return data;
        } else {
            Gdx.app.debug(TAG, CLASS_NAME + ".findMarkersInFrame(): The camera has not been calibrated.");
            return null;
        }
    } else {
        Gdx.app.debug(TAG, CLASS_NAME + ".findMarkersInFrame(): OpenCV is not ready or failed to load.");
        return null;
    }
}