Example usage for org.opencv.calib3d Calib3d solvePnP

List of usage examples for org.opencv.calib3d Calib3d solvePnP

Introduction

In this page you can find the example usage for org.opencv.calib3d Calib3d solvePnP.

Prototype

public static boolean solvePnP(MatOfPoint3f objectPoints, MatOfPoint2f imagePoints, Mat cameraMatrix,
            MatOfDouble distCoeffs, Mat rvec, Mat tvec) 

Source Link

Usage

From source file:org.ar.rubik.CubePoseEstimator.java

License:Open Source License

/**
 * Pose Estimation//from   w w w  . j a  v  a  2 s.c  o  m
 * 
 * Deduce real world cube coordinates and rotation
 * 
 * @param rubikFace
 * @param image 
 * @param stateModel 
 * @return 
 */
public static CubePose poseEstimation(RubikFace rubikFace, Mat image, StateModel stateModel) {

    if (rubikFace == null)
        return null;

    if (rubikFace.faceRecognitionStatus != FaceRecognitionStatusEnum.SOLVED)
        return null;

    LeastMeansSquare lmsResult = rubikFace.lmsResult;

    if (lmsResult == null)
        return null;

    // OpenCV Pose Estimate requires at least four points.
    if (rubikFace.rhombusList.size() <= 4)
        return null;

    if (cameraMatrix == null) {
        cameraMatrix = stateModel.cameraCalibration.getOpenCVCameraMatrix((int) (image.size().width),
                (int) (image.size().height));
        distCoeffs = new MatOfDouble(stateModel.cameraCalibration.getDistortionCoefficients());
    }

    /*
     * For the purposes of external camera calibration: i.e., where the cube is 
     * located in camera coordinates, we define the geometry of the face of a
     * cube composed of nine 3D locations each representing the center of each tile.
     * Correspondence between these points and nine 2D points from the actual
     * camera image, along with camera calibration data, are using to calculate
     * the Pose of the Cube (i.e. "Cube Pose").
     * 
     * The geometry of the cube here is defined as having center at {0,0,0},
     * and edge size of 2 units (i.e., +/- 1.0).
     */

    // List of real world point and screen points that correspond.
    List<Point3> objectPointsList = new ArrayList<Point3>(9);
    List<Point> imagePointsList = new ArrayList<Point>(9);

    // Create list of image (in 2D) and object (in 3D) points.
    // Loop over Rubik Face Tiles
    for (int n = 0; n < 3; n++) {
        for (int m = 0; m < 3; m++) {

            Rhombus rhombus = rubikFace.faceRhombusArray[n][m];

            // Only use if Rhombus was non null.
            if (rhombus != null) {

                // Obtain center of Rhombus in screen image coordinates
                // Convention:
                //  o X is zero on the left, and increases to the right.
                //  o Y is zero on the top and increases downward.
                Point imagePoint = new Point(rhombus.center.x, rhombus.center.y);
                imagePointsList.add(imagePoint);

                // N and M are actual not conceptual (as in design doc).
                int mm = 2 - n;
                int nn = 2 - m;
                // above now matches design doc.
                // that is:
                //  o the nn vector is to the right and upwards.
                //  o the mm vector is to the left and upwards.

                // Calculate center of Tile in OpenCV World Space Coordinates
                // Convention:
                //  o X is zero in the center, and increases to the left.
                //  o Y is zero in the center and increases downward.
                //  o Z is zero (at the world coordinate origin) and increase away for the camera.
                float x = (1 - mm) * 0.66666f;
                float y = -1.0f;
                float z = -1.0f * (1 - nn) * 0.666666f;
                Point3 objectPoint = new Point3(x, y, z);
                objectPointsList.add(objectPoint);
            }
        }
    }

    // Cast image point list into OpenCV Matrix.
    MatOfPoint2f imagePoints = new MatOfPoint2f();
    imagePoints.fromList(imagePointsList);

    // Cast object point list into OpenCV Matrix.
    MatOfPoint3f objectPoints = new MatOfPoint3f();
    objectPoints.fromList(objectPointsList);

    Mat rvec = new Mat();
    Mat tvec = new Mat();

    //      Log.e(Constants.TAG, "Image Points: " + imagePoints.dump());
    //      Log.e(Constants.TAG, "Object Points: " + objectPoints.dump());

    //      =+= sometimes a "count >= 4" exception 
    Calib3d.solvePnP(objectPoints, imagePoints, cameraMatrix, distCoeffs, rvec, tvec);

    Log.v(Constants.TAG, String.format("Open CV Rotation Vector x=%4.2f y=%4.2f z=%4.2f", rvec.get(0, 0)[0],
            rvec.get(1, 0)[0], rvec.get(2, 0)[0]));

    // Convert from OpenCV to OpenGL World Coordinates
    float x = +1.0f * (float) tvec.get(0, 0)[0];
    float y = -1.0f * (float) tvec.get(1, 0)[0];
    float z = -1.0f * (float) tvec.get(2, 0)[0];

    //        // =+= Add manual offset correction to translation  
    //        x += MenuAndParams.xTranslationOffsetParam.value;
    //        y += MenuAndParams.yTranslationOffsetParam.value;
    //        z += MenuAndParams.zTranslationOffsetParam.value;      

    // Convert Rotation Vector from OpenCL polarity axes definition to OpenGL definition
    // Note, polarity of x-axis is OK, no need to invert.
    rvec.put(1, 0, -1.0f * rvec.get(1, 0)[0]); // y-axis
    rvec.put(2, 0, -1.0f * rvec.get(2, 0)[0]); // z-axis

    //        // =+= Add manual offset correction to Rotation
    //        rvec.put(0, 0, rvec.get(0, 0)[0] + MenuAndParams.xRotationOffsetParam.value * Math.PI / 180.0);  // X rotation
    //        rvec.put(1, 0, rvec.get(1, 0)[0] + MenuAndParams.yRotationOffsetParam.value * Math.PI / 180.0);  // Y rotation
    //        rvec.put(2, 0, rvec.get(2, 0)[0] + MenuAndParams.zRotationOffsetParam.value * Math.PI / 180.0);  // Z rotation

    // Package up as CubePose object
    CubePose cubePose = new CubePose();
    cubePose.x = x;
    cubePose.y = y;
    cubePose.z = z;
    cubePose.xRotation = rvec.get(0, 0)[0];
    cubePose.yRotation = rvec.get(1, 0)[0];
    cubePose.zRotation = rvec.get(2, 0)[0];

    //      Log.e(Constants.TAG, "Result: " + result);
    //      Log.e(Constants.TAG, "Camera: " + cameraMatrix.dump());
    //      Log.e(Constants.TAG, "Rotation: " + rvec.dump());
    //      Log.e(Constants.TAG, "Translation: " + tvec.dump());

    //      // Reporting in OpenGL World Coordinates
    //      Core.rectangle(image, new Point(0, 50), new Point(1270, 150), Constants.ColorBlack, -1);
    //      Core.putText(image, String.format("Translation  x=%4.2f y=%4.2f z=%4.2f", x, y, z), new Point(50, 100), Constants.FontFace, 3, Constants.ColorWhite, 3);
    //      Core.putText(image, String.format("Rotation     x=%4.0f y=%4.0f z=%4.0f", cubeXrotation, cubeYrotation, cubeZrotation), new Point(50, 150), Constants.FontFace, 3, Constants.ColorWhite, 3);

    Log.v(Constants.TAG, "Cube Pose: " + cubePose);

    return cubePose;
}

From source file:org.ar.rubik.CubeReconstructor.java

License:Open Source License

/**
 * Pose Estimation//from  w ww.j  av  a 2s.  c  o m
 * 
 * Deduce real world cube coordinates and rotation
 * 
 * @param rubikFace
 * @param image 
 * @param stateModel 
 */
public void poseEstimation(RubikFace rubikFace, Mat image, StateModel stateModel) {

    if (rubikFace == null)
        return;

    if (rubikFace.faceRecognitionStatus != FaceRecognitionStatusEnum.SOLVED)
        return;

    LeastMeansSquare lmsResult = rubikFace.lmsResult;

    if (lmsResult == null)
        return;

    // OpenCV Pose Estimate requires at least four points.
    if (rubikFace.rhombusList.size() <= 4)
        return;

    // List of real world point and screen points that correspond.
    List<Point3> objectPointsList = new ArrayList<Point3>(9);
    List<Point> imagePointsList = new ArrayList<Point>(9);

    // Create list of image (in 2D) and object (in 3D) points.
    // Loop over Rubik Face Tiles/Rhombi
    for (int n = 0; n < 3; n++) {
        for (int m = 0; m < 3; m++) {

            Rhombus rhombus = rubikFace.faceRhombusArray[n][m];

            // Only use if Rhombus was non null.
            if (rhombus != null) {

                // Obtain center of Rhombus in screen image coordinates
                // Convention:
                //  o X is zero on the left, and increases to the right.
                //  o Y is zero on the top and increases downward.
                Point imagePoint = new Point(rhombus.center.x, rhombus.center.y);
                imagePointsList.add(imagePoint);

                // N and M are actual not conceptual (as in design doc).
                int mm = 2 - n;
                int nn = 2 - m;
                // above now matches design doc.
                // that is:
                //  o the nn vector is to the right and upwards.
                //  o the mm vector is to the left and upwards.

                // Calculate center of Tile in OpenCV World Space Coordinates
                // Convention:
                //  o X is zero in the center, and increases to the left.
                //  o Y is zero in the center and increases downward.
                //  o Z is zero (at the world coordinate origin) and increase away for the camera.
                float x = (1 - mm) * 0.66666f;
                float y = -1.0f;
                float z = -1.0f * (1 - nn) * 0.666666f;
                Point3 objectPoint = new Point3(x, y, z);
                objectPointsList.add(objectPoint);
            }
        }
    }

    // Cast image point list into OpenCV Matrix.
    MatOfPoint2f imagePoints = new MatOfPoint2f();
    imagePoints.fromList(imagePointsList);

    // Cast object point list into OpenCV Matrix.
    MatOfPoint3f objectPoints = new MatOfPoint3f();
    objectPoints.fromList(objectPointsList);

    Mat cameraMatrix = stateModel.cameraParameters.getOpenCVCameraMatrix();
    MatOfDouble distCoeffs = new MatOfDouble();
    Mat rvec = new Mat();
    Mat tvec = new Mat();

    //      Log.e(Constants.TAG, "Image Points: " + imagePoints.dump());
    //      Log.e(Constants.TAG, "Object Points: " + objectPoints.dump());

    //      boolean result = 
    Calib3d.solvePnP(objectPoints, imagePoints, cameraMatrix, distCoeffs, rvec, tvec);

    Log.v(Constants.TAG, String.format("Open CV Rotation Vector x=%4.2f y=%4.2f z=%4.2f", rvec.get(0, 0)[0],
            rvec.get(1, 0)[0], rvec.get(2, 0)[0]));

    // Convert from OpenCV to OpenGL World Coordinates
    x = +1.0f * (float) tvec.get(0, 0)[0];
    y = -1.0f * (float) tvec.get(1, 0)[0];
    z = -1.0f * (float) tvec.get(2, 0)[0];

    // =+= Add manual offset correction to translation  
    x += MenuAndParams.xTranslationOffsetParam.value;
    y += MenuAndParams.yTranslationOffsetParam.value;
    z += MenuAndParams.zTranslationOffsetParam.value;

    // Convert Rotation Vector from OpenCL polarity axes definition to OpenGL definition
    rvec.put(1, 0, -1.0f * rvec.get(1, 0)[0]);
    rvec.put(2, 0, -1.0f * rvec.get(2, 0)[0]);

    // =+= Add manual offset correction to Rotation
    rvec.put(0, 0, rvec.get(0, 0)[0] + MenuAndParams.xRotationOffsetParam.value * Math.PI / 180.0); // X rotation
    rvec.put(1, 0, rvec.get(1, 0)[0] + MenuAndParams.yRotationOffsetParam.value * Math.PI / 180.0); // Y rotation
    rvec.put(2, 0, rvec.get(2, 0)[0] + MenuAndParams.zRotationOffsetParam.value * Math.PI / 180.0); // Z rotation

    // Create an OpenCV Rotation Matrix from a Rotation Vector
    Mat rMatrix = new Mat(4, 4, CvType.CV_32FC2);
    Calib3d.Rodrigues(rvec, rMatrix);
    Log.v(Constants.TAG, "Rodrigues Matrix: " + rMatrix.dump());

    /*
     * Create an OpenGL Rotation Matrix
     * Notes:
     *   o  OpenGL is in column-row order (correct?).
     *   o  OpenCV Rodrigues Rotation Matrix is 3x3 where OpenGL Rotation Matrix is 4x4.
     */

    // Initialize all Rotational Matrix elements to zero.
    for (int i = 0; i < 16; i++)
        rotationMatrix[i] = 0.0f; // Initialize to zero

    // Initialize element [3,3] to 1.0: i.e., "w" component in homogenous coordinates
    rotationMatrix[3 * 4 + 3] = 1.0f;

    // Copy OpenCV matrix to OpenGL matrix element by element.
    for (int r = 0; r < 3; r++)
        for (int c = 0; c < 3; c++)
            rotationMatrix[r + c * 4] = (float) (rMatrix.get(r, c)[0]);

    // Diagnostics
    for (int r = 0; r < 4; r++)
        Log.v(Constants.TAG, String.format("Rotation Matrix  r=%d  [%5.2f  %5.2f  %5.2f  %5.2f]", r,
                rotationMatrix[r + 0], rotationMatrix[r + 4], rotationMatrix[r + 8], rotationMatrix[r + 12]));

    //      Log.e(Constants.TAG, "Result: " + result);
    //      Log.e(Constants.TAG, "Camera: " + cameraMatrix.dump());
    //      Log.e(Constants.TAG, "Rotation: " + rvec.dump());
    //      Log.e(Constants.TAG, "Translation: " + tvec.dump());

    //      // Reporting in OpenGL World Coordinates
    //      Core.rectangle(image, new Point(0, 50), new Point(1270, 150), Constants.ColorBlack, -1);
    //      Core.putText(image, String.format("Translation  x=%4.2f y=%4.2f z=%4.2f", x, y, z), new Point(50, 100), Constants.FontFace, 3, Constants.ColorWhite, 3);
    //      Core.putText(image, String.format("Rotation     x=%4.0f y=%4.0f z=%4.0f", cubeXrotation, cubeYrotation, cubeZrotation), new Point(50, 150), Constants.FontFace, 3, Constants.ColorWhite, 3);
}