Example usage for org.opencv.calib3d Calib3d solvePnP

List of usage examples for org.opencv.calib3d Calib3d solvePnP

Introduction

In this page you can find the example usage for org.opencv.calib3d Calib3d solvePnP.

Prototype

public static boolean solvePnP(MatOfPoint3f objectPoints, MatOfPoint2f imagePoints, Mat cameraMatrix,
            MatOfDouble distCoeffs, Mat rvec, Mat tvec, boolean useExtrinsicGuess, int flags) 

Source Link

Usage

From source file:com.android.cts.verifier.sensors.RVCVXCheckAnalyzer.java

License:Apache License

/**
 * Analyze video frames using computer vision approach and generate a ArrayList<AttitudeRec>
 *
 * @param recs  output ArrayList of AttitudeRec
 * @return total number of frame of the video
 *///from w  ww  .j  a  v  a2 s .  co  m
private int analyzeVideo(ArrayList<AttitudeRec> recs) {
    VideoMetaInfo meta = new VideoMetaInfo(new File(mPath, "videometa.json"));

    int decimation = 1;
    boolean use_timestamp = true;

    // roughly determine if decimation is necessary
    if (meta.fps > DECIMATION_FPS_TARGET) {
        decimation = (int) (meta.fps / DECIMATION_FPS_TARGET);
        meta.fps /= decimation;
    }

    VideoDecoderForOpenCV videoDecoder = new VideoDecoderForOpenCV(new File(mPath, "video.mp4"), decimation);

    Mat frame;
    Mat gray = new Mat();
    int i = -1;

    Size frameSize = videoDecoder.getSize();

    if (frameSize.width != meta.frameWidth || frameSize.height != meta.frameHeight) {
        // this is very unlikely
        return -1;
    }

    if (TRACE_VIDEO_ANALYSIS) {
        Debug.startMethodTracing("cvprocess");
    }

    Size patternSize = new Size(4, 11);

    float fc = (float) (meta.frameWidth / 2.0 / Math.tan(meta.fovWidth / 2.0));
    Mat camMat = cameraMatrix(fc, new Size(frameSize.width / 2, frameSize.height / 2));
    MatOfDouble coeff = new MatOfDouble(); // dummy

    MatOfPoint2f centers = new MatOfPoint2f();
    MatOfPoint3f grid = asymmetricalCircleGrid(patternSize);
    Mat rvec = new MatOfFloat();
    Mat tvec = new MatOfFloat();

    MatOfPoint2f reprojCenters = new MatOfPoint2f();

    if (LOCAL_LOGV) {
        Log.v(TAG, "Camera Mat = \n" + camMat.dump());
    }

    long startTime = System.nanoTime();
    long[] ts = new long[1];

    while ((frame = videoDecoder.getFrame(ts)) != null) {
        if (LOCAL_LOGV) {
            Log.v(TAG, "got a frame " + i);
        }

        if (use_timestamp && ts[0] == -1) {
            use_timestamp = false;
        }

        // has to be in front, as there are cases where execution
        // will skip the later part of this while
        i++;

        // convert to gray manually as by default findCirclesGridDefault uses COLOR_BGR2GRAY
        Imgproc.cvtColor(frame, gray, Imgproc.COLOR_RGB2GRAY);

        boolean foundPattern = Calib3d.findCirclesGrid(gray, patternSize, centers,
                Calib3d.CALIB_CB_ASYMMETRIC_GRID);

        if (!foundPattern) {
            // skip to next frame
            continue;
        }

        if (OUTPUT_DEBUG_IMAGE) {
            Calib3d.drawChessboardCorners(frame, patternSize, centers, true);
        }

        // figure out the extrinsic parameters using real ground truth 3D points and the pixel
        // position of blobs found in findCircleGrid, an estimated camera matrix and
        // no-distortion are assumed.
        boolean foundSolution = Calib3d.solvePnP(grid, centers, camMat, coeff, rvec, tvec, false,
                Calib3d.CV_ITERATIVE);

        if (!foundSolution) {
            // skip to next frame
            if (LOCAL_LOGV) {
                Log.v(TAG, "cannot find pnp solution in frame " + i + ", skipped.");
            }
            continue;
        }

        // reproject points to for evaluation of result accuracy of solvePnP
        Calib3d.projectPoints(grid, rvec, tvec, camMat, coeff, reprojCenters);

        // error is evaluated in norm2, which is real error in pixel distance / sqrt(2)
        double error = Core.norm(centers, reprojCenters, Core.NORM_L2);

        if (LOCAL_LOGV) {
            Log.v(TAG, "Found attitude, re-projection error = " + error);
        }

        // if error is reasonable, add it into the results. use ratio to frame height to avoid
        // discriminating higher definition videos
        if (error < REPROJECTION_THREASHOLD_RATIO * frameSize.height) {
            double[] rv = new double[3];
            double timestamp;

            rvec.get(0, 0, rv);
            if (use_timestamp) {
                timestamp = (double) ts[0] / 1e6;
            } else {
                timestamp = (double) i / meta.fps;
            }
            if (LOCAL_LOGV)
                Log.v(TAG, String.format("Added frame %d  ts = %f", i, timestamp));
            recs.add(new AttitudeRec(timestamp, rodr2rpy(rv)));
        }

        if (OUTPUT_DEBUG_IMAGE) {
            Calib3d.drawChessboardCorners(frame, patternSize, reprojCenters, true);
            Imgcodecs.imwrite(Environment.getExternalStorageDirectory().getPath() + "/RVCVRecData/DebugCV/img"
                    + i + ".png", frame);
        }
    }

    if (LOCAL_LOGV) {
        Log.v(TAG, "Finished decoding");
    }

    if (TRACE_VIDEO_ANALYSIS) {
        Debug.stopMethodTracing();
    }

    if (LOCAL_LOGV) {
        // time analysis
        double totalTime = (System.nanoTime() - startTime) / 1e9;
        Log.i(TAG, "Total time: " + totalTime + "s, Per frame time: " + totalTime / i);
    }
    return i;
}

From source file:org.usfirst.frc.team2084.CMonster2016.vision.Target.java

License:Open Source License

/**
 * Creates a new possible target based on the specified blob and calculates
 * its score./*from   w ww .  j a  v  a 2  s. c o  m*/
 *
 * @param p the shape of the possible target
 */
public Target(MatOfPoint contour, Mat grayImage) {
    // Simplify contour to make the corner finding algorithm work better
    MatOfPoint2f fContour = new MatOfPoint2f();
    contour.convertTo(fContour, CvType.CV_32F);
    Imgproc.approxPolyDP(fContour, fContour, VisionParameters.getGoalApproxPolyEpsilon(), true);
    fContour.convertTo(contour, CvType.CV_32S);

    this.contour = contour;

    // Check area, and don't do any calculations if it is not valid
    if (validArea = validateArea()) {

        // Find a bounding rectangle
        RotatedRect rect = Imgproc.minAreaRect(fContour);

        Point[] rectPoints = new Point[4];
        rect.points(rectPoints);

        for (int j = 0; j < rectPoints.length; j++) {
            Point rectPoint = rectPoints[j];

            double minDistance = Double.MAX_VALUE;
            Point point = null;

            for (int i = 0; i < contour.rows(); i++) {
                Point contourPoint = new Point(contour.get(i, 0));
                double dist = distance(rectPoint, contourPoint);
                if (dist < minDistance) {
                    minDistance = dist;
                    point = contourPoint;
                }
            }

            rectPoints[j] = point;
        }
        MatOfPoint2f rectMat = new MatOfPoint2f(rectPoints);
        // Refine the corners to improve accuracy
        Imgproc.cornerSubPix(grayImage, rectMat, new Size(4, 10), new Size(-1, -1),
                new TermCriteria(TermCriteria.EPS + TermCriteria.COUNT, 30, 0.1));
        rectPoints = rectMat.toArray();

        // Identify each corner
        SortedMap<Double, List<Point>> x = new TreeMap<>();
        Arrays.stream(rectPoints).forEach((p) -> {
            List<Point> points;
            if ((points = x.get(p.x)) == null) {
                x.put(p.x, points = new LinkedList<>());
            }
            points.add(p);
        });

        int i = 0;
        for (Iterator<List<Point>> it = x.values().iterator(); it.hasNext();) {
            List<Point> s = it.next();

            for (Point p : s) {
                switch (i) {
                case 0:
                    topLeft = p;
                    break;
                case 1:
                    bottomLeft = p;
                    break;
                case 2:
                    topRight = p;
                    break;
                case 3:
                    bottomRight = p;
                }
                i++;
            }
        }

        // Organize corners
        if (topLeft.y > bottomLeft.y) {
            Point p = bottomLeft;
            bottomLeft = topLeft;
            topLeft = p;
        }

        if (topRight.y > bottomRight.y) {
            Point p = bottomRight;
            bottomRight = topRight;
            topRight = p;
        }

        // Create corners for centroid calculation
        corners = new MatOfPoint2f(rectPoints);

        // Calculate center
        Moments moments = Imgproc.moments(corners);
        center = new Point(moments.m10 / moments.m00, moments.m01 / moments.m00);

        // Put the points in the correct order for solvePNP
        rectPoints[0] = topLeft;
        rectPoints[1] = topRight;
        rectPoints[2] = bottomLeft;
        rectPoints[3] = bottomRight;
        // Recreate corners in the new order
        corners = new MatOfPoint2f(rectPoints);

        widthTop = distance(topLeft, topRight);
        widthBottom = distance(bottomLeft, bottomRight);
        width = (widthTop + widthBottom) / 2.0;
        heightLeft = distance(topLeft, bottomLeft);
        heightRight = distance(topRight, bottomRight);
        height = (heightLeft + heightRight) / 2.0;

        Mat tvec = new Mat();

        // Calculate target's location
        Calib3d.solvePnP(OBJECT_POINTS, corners, CAMERA_MAT, DISTORTION_MAT, rotation, tvec, false,
                Calib3d.CV_P3P);

        // =======================================
        // Position and Orientation Transformation
        // =======================================

        double armAngle = VisionResults.getArmAngle();

        // Flip y axis to point upward
        Core.multiply(tvec, SIGN_NORMALIZATION_MATRIX, tvec);

        // Shift origin to arm pivot point, on the robot's centerline
        CoordinateMath.translate(tvec, CAMERA_X_OFFSET, CAMERA_Y_OFFSET, ARM_LENGTH);

        // Align axes with ground
        CoordinateMath.rotateX(tvec, -armAngle);
        Core.add(rotation, new MatOfDouble(armAngle, 0, 0), rotation);

        // Shift origin to robot center of rotation
        CoordinateMath.translate(tvec, 0, ARM_PIVOT_Y_OFFSET, -ARM_PIVOT_Z_OFFSET);

        double xPosFeet = tvec.get(0, 0)[0];
        double yPosFeet = tvec.get(1, 0)[0];
        double zPosFeet = tvec.get(2, 0)[0];

        // Old less effective aiming heading and distance calculation
        // double pixelsToFeet = TARGET_WIDTH / width;

        // distance = (TARGET_WIDTH * HighGoalProcessor.IMAGE_SIZE.width
        // / (2 * width ** Math.tan(VisionParameters.getFOVAngle() / 2)));
        // double xPosFeet = (center.x - (HighGoalProcessor.IMAGE_SIZE.width
        // / 2)) * pixelsToFeet;
        // double yPosFeet = -(center.y -
        // (HighGoalProcessor.IMAGE_SIZE.height / 2)) * pixelsToFeet;

        distance = Math.sqrt(xPosFeet * xPosFeet + zPosFeet * zPosFeet);

        position = new Point3(xPosFeet, yPosFeet, zPosFeet);

        xGoalAngle = Math.atan(xPosFeet / zPosFeet);
        yGoalAngle = Math.atan(yPosFeet / zPosFeet);

        validate();
        score = calculateScore();
    } else {
        valid = false;
    }
}