Example usage for org.opencv.core MatOfPoint2f MatOfPoint2f

List of usage examples for org.opencv.core MatOfPoint2f MatOfPoint2f

Introduction

In this page you can find the example usage for org.opencv.core MatOfPoint2f MatOfPoint2f.

Prototype

public MatOfPoint2f() 

Source Link

Usage

From source file:com.trandi.opentld.tld.LKTracker.java

License:Apache License

/**
 * @return Pair of new, FILTERED, last and current POINTS, or null if it hasn't managed to track anything.
 *//* www . j av  a 2  s . c  o  m*/
Pair<Point[], Point[]> track(final Mat lastImg, final Mat currentImg, Point[] lastPoints) {
    final int size = lastPoints.length;
    final MatOfPoint2f currentPointsMat = new MatOfPoint2f();
    final MatOfPoint2f pointsFBMat = new MatOfPoint2f();
    final MatOfByte statusMat = new MatOfByte();
    final MatOfFloat errSimilarityMat = new MatOfFloat();
    final MatOfByte statusFBMat = new MatOfByte();
    final MatOfFloat errSimilarityFBMat = new MatOfFloat();

    //Forward-Backward tracking
    Video.calcOpticalFlowPyrLK(lastImg, currentImg, new MatOfPoint2f(lastPoints), currentPointsMat, statusMat,
            errSimilarityMat, WINDOW_SIZE, MAX_LEVEL, termCriteria, 0, LAMBDA);
    Video.calcOpticalFlowPyrLK(currentImg, lastImg, currentPointsMat, pointsFBMat, statusFBMat,
            errSimilarityFBMat, WINDOW_SIZE, MAX_LEVEL, termCriteria, 0, LAMBDA);

    final byte[] status = statusMat.toArray();
    float[] errSimilarity = new float[lastPoints.length];
    //final byte[] statusFB = statusFBMat.toArray();
    final float[] errSimilarityFB = errSimilarityFBMat.toArray();

    // compute the real FB error (relative to LAST points not the current ones...
    final Point[] pointsFB = pointsFBMat.toArray();
    for (int i = 0; i < size; i++) {
        errSimilarityFB[i] = Util.norm(pointsFB[i], lastPoints[i]);
    }

    final Point[] currPoints = currentPointsMat.toArray();
    // compute real similarity error
    errSimilarity = normCrossCorrelation(lastImg, currentImg, lastPoints, currPoints, status);

    //TODO  errSimilarityFB has problem != from C++
    // filter out points with fwd-back error > the median AND points with similarity error > median
    return filterPts(lastPoints, currPoints, errSimilarity, errSimilarityFB, status);
}

From source file:de.vion.eyetracking.cameracalib.calibration.opencv.CameraCalibrator.java

private double computeReprojectionErrors(List<Mat> objectPoints, List<Mat> rvecs, List<Mat> tvecs,
        Mat perViewErrors) {/*from  ww  w  . j av  a  2  s  .c  o m*/
    MatOfPoint2f cornersProjected = new MatOfPoint2f();
    double totalError = 0;
    double error;
    float viewErrors[] = new float[objectPoints.size()];

    MatOfDouble distortionCoefficients = new MatOfDouble(this.mDistortionCoefficients);
    int totalPoints = 0;
    for (int i = 0; i < objectPoints.size(); i++) {
        MatOfPoint3f points = new MatOfPoint3f(objectPoints.get(i));
        Calib3d.projectPoints(points, rvecs.get(i), tvecs.get(i), this.mCameraMatrix, distortionCoefficients,
                cornersProjected);
        error = Core.norm(this.mCornersBuffer.get(i), cornersProjected, Core.NORM_L2);

        int n = objectPoints.get(i).rows();
        viewErrors[i] = (float) Math.sqrt(error * error / n);
        totalError += error * error;
        totalPoints += n;
    }
    perViewErrors.create(objectPoints.size(), 1, CvType.CV_32FC1);
    perViewErrors.put(0, 0, viewErrors);

    return Math.sqrt(totalError / totalPoints);
}

From source file:dfmDrone.examples.fitEllipseExample.java

private static Mat findAndDrawEllipse(Mat sourceImg) {
    Mat grayScaleImg = new Mat();
    Mat hsvImg = new Mat();
    Imgproc.cvtColor(sourceImg, hsvImg, Imgproc.COLOR_BGR2HSV);
    Mat lower_hue_range = new Mat();
    Mat upper_hue_range = new Mat();
    Core.inRange(hsvImg, new Scalar(0, 100, 45), new Scalar(15, 255, 255), lower_hue_range);
    Core.inRange(hsvImg, new Scalar(160, 100, 45), new Scalar(180, 255, 255), upper_hue_range);
    Mat red_hue_image = new Mat();
    Core.addWeighted(lower_hue_range, 1.0, upper_hue_range, 1.0, 0, red_hue_image);
    Mat dilateElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(24, 24));
    Mat erodeElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(10, 10));

    Imgproc.blur(red_hue_image, red_hue_image, new Size(11, 11));
    // init/*  w w  w .  j a  v a  2 s.c  o  m*/
    List<MatOfPoint> contours = new ArrayList<>();
    Mat hierarchy = new Mat();

    // find contours
    Imgproc.findContours(red_hue_image, contours, hierarchy, Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_SIMPLE);
    System.out.println("After findcontours");
    // if any contour exist...
    if (hierarchy.size().height > 0 && hierarchy.size().width > 0) {
        // for each contour, display it in blue
        for (int idx = 0; idx >= 0; idx = (int) hierarchy.get(0, idx)[0]) {
            System.out.println(idx);
            //   Imgproc.drawContours(frame, contours, idx, new Scalar(250, 0, 0), 3);

        }
    }
    MatOfPoint2f approxCurve = new MatOfPoint2f();

    //For each contour found
    MatOfPoint2f contour2f = null;
    RotatedRect rotatedrect = null;
    for (MatOfPoint contour : contours) {
        //Convert contours(i) from MatOfPoint to MatOfPoint2f
        if (contour2f == null)
            contour2f = new MatOfPoint2f(contour.toArray());
        if (contour.size().area() > contour2f.size().area()) {
            contour2f = new MatOfPoint2f(contour.toArray());
        }
    }
    try {
        Imgproc.fitEllipse(contour2f);
        rotatedrect = Imgproc.fitEllipse(contour2f);

        double approxDistance = Imgproc.arcLength(contour2f, true) * 0.02;
        Imgproc.approxPolyDP(contour2f, approxCurve, approxDistance, true);

        //Convert back to MatOfPoint
        MatOfPoint points = new MatOfPoint(approxCurve.toArray());

        // Get bounding rect of contour
        Rect rect = Imgproc.boundingRect(points);

        // draw enclosing rectangle (all same color, but you could use variable i to make them unique)
        Imgproc.rectangle(sourceImg, rect.tl(), rect.br(), new Scalar(255, 0, 0), 1, 8, 0);
        Imgproc.ellipse(sourceImg, rotatedrect, new Scalar(255, 192, 203), 4, 8);
    } catch (CvException e) {
        e.printStackTrace();
        System.out.println("Ingen ellipse fundet");
    }
    return sourceImg;
}

From source file:edu.fiu.cate.breader.BaseSegmentation.java

/**
 * Finds the bounding box for the book on the stand using 
 * the depth average image.// w  w w  .j  a v a 2 s . c  o  m
 * @param src- The Depth average image
 * @return Rectangle delineating the book
 */
public Rect lowResDist(Mat src) {
    Mat dst = src.clone();

    Imgproc.blur(src, dst, new Size(5, 5), new Point(-1, -1), Core.BORDER_REPLICATE);
    //      Imgproc.threshold(dst, dst, 0,255,Imgproc.THRESH_BINARY_INV+Imgproc.THRESH_OTSU);
    Imgproc.Canny(dst, dst, 50, 200, 3, false);
    //      Canny(src, dst, 20, 60, 3);

    List<MatOfPoint> contours = new LinkedList<>();
    Mat hierarchy = new Mat();
    /// Find contours
    Imgproc.findContours(dst, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE,
            new Point(0, 0));

    Mat color = new Mat();
    Imgproc.cvtColor(src, color, Imgproc.COLOR_GRAY2BGR);
    for (int k = 0; k < contours.size(); k++) {
        byte[] vals = ITools.getHeatMapColor((float) k / (float) contours.size());
        Imgproc.drawContours(color, contours, k, new Scalar(vals[0], vals[1], vals[2]), 1);
    }
    new IViewer("LowRes Contours ", BReaderTools.bufferedImageFromMat(color));

    for (int k = 0; k < contours.size(); k++) {
        MatOfPoint2f tMat = new MatOfPoint2f();
        Imgproc.approxPolyDP(new MatOfPoint2f(contours.get(k).toArray()), tMat, 5, true);
        contours.set(k, new MatOfPoint(tMat.toArray()));
    }

    List<Point> points = new LinkedList<Point>();
    for (int i = 0; i < contours.size(); i++) {
        points.addAll(contours.get(i).toList());
    }

    MatOfInt tHull = new MatOfInt();
    Imgproc.convexHull(new MatOfPoint(points.toArray(new Point[points.size()])), tHull);

    //get bounding box
    Point[] tHullPoints = new Point[tHull.rows()];
    for (int i = 0; i < tHull.rows(); i++) {
        int pIndex = (int) tHull.get(i, 0)[0];
        tHullPoints[i] = points.get(pIndex);
    }
    Rect out = Imgproc.boundingRect(new MatOfPoint(tHullPoints));
    return out;
}

From source file:gab.opencv.OpenCV.java

License:Open Source License

public ArrayList<PVector> findChessboardCorners(int patternWidth, int patternHeight) {
    MatOfPoint2f corners = new MatOfPoint2f();
    Calib3d.findChessboardCorners(getCurrentMat(), new Size(patternWidth, patternHeight), corners);
    return matToPVectors(corners);
}

From source file:gov.nasa.jpl.memex.pooledtimeseries.PoT.java

License:Apache License

static ArrayList<double[][][]> getOpticalHistograms(Path filename, int w_d, int h_d, int o_d)
        throws PoTException {
    ArrayList<double[][][]> histograms = new ArrayList<double[][][]>();

    VideoCapture capture = new VideoCapture(filename.toString());

    if (!capture.isOpened()) {
        LOG.warning("video file " + filename.getFileName() + " could not be opened.");

        double[][][] hist = new double[w_d][h_d][o_d];
        histograms.add(hist);//from ww  w.  j  a v a  2 s  . c o m
    } else {
        // variables for processing images
        Mat original_frame = new Mat();

        Mat frame = new Mat();
        Mat frame_gray = new Mat();
        Mat prev_frame_gray = new Mat();
        MatOfPoint2f flow = new MatOfPoint2f();

        // computing a list of histogram of optical flows (i.e. a list of 5*5*8
        // arrays)
        for (int frame_index = 0;; frame_index++) {
            // capturing the video images
            capture.read(original_frame);

            if (original_frame.empty()) {
                if (original_frame.empty()) {
                    if (frame_index == 0) {
                        throw new PoTException("Could not read the video file");
                    } else
                        break;
                }
            } else {
                // resizing the captured frame and converting it to the gray scale
                // image.
                Imgproc.resize(original_frame, frame, new Size(frame_width, frame_height));
                Imgproc.cvtColor(frame, frame_gray, Imgproc.COLOR_BGR2GRAY);

                double[][][] hist = new double[w_d][h_d][o_d];
                histograms.add(hist);

                // from frame #2
                if (frame_index > 0) {
                    // calculate optical flows
                    Video.calcOpticalFlowFarneback(prev_frame_gray, frame_gray, flow, 0.5, 1, 10, 2, 7, 1.5, 0); // 0.5, 1, 15, 2, 7, 1.5, 0

                    // update histogram of optical flows
                    updateOpticalHistogram(histograms.get(frame_index), flow);
                }

                Mat temp_frame = prev_frame_gray;
                prev_frame_gray = frame_gray;
                frame_gray = temp_frame;
            }
        }

        capture.release();
    }

    return histograms;
}

From source file:karthiknr.TextID.ProcessAsyncActivity.java

License:Apache License

public Mat findWarpedMat(Mat imgSource) {

    //convert the image to black and white does (8 bit)
    Imgproc.Canny(imgSource, imgSource, 50, 50);

    //apply gaussian blur to smoothen lines of dots
    Imgproc.GaussianBlur(imgSource, imgSource, new org.opencv.core.Size(5, 5), 5);

    //find the contours
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Imgproc.findContours(imgSource, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);

    double maxArea = -1;
    int maxAreaIdx = -1;
    Log.d("size", Integer.toString(contours.size()));
    MatOfPoint temp_contour = contours.get(0); //the largest is at the index 0 for starting point
    MatOfPoint2f approxCurve = new MatOfPoint2f();
    MatOfPoint largest_contour = contours.get(0);
    //largest_contour.ge
    List<MatOfPoint> largest_contours = new ArrayList<MatOfPoint>();
    //Imgproc.drawContours(imgSource,contours, -1, new Scalar(0, 255, 0), 1);

    for (int idx = 0; idx < contours.size(); idx++) {
        temp_contour = contours.get(idx);
        double contourarea = Imgproc.contourArea(temp_contour);
        //compare this contour to the previous largest contour found
        if (contourarea > maxArea) {
            //check if this contour is a square
            MatOfPoint2f new_mat = new MatOfPoint2f(temp_contour.toArray());
            int contourSize = (int) temp_contour.total();
            MatOfPoint2f approxCurve_temp = new MatOfPoint2f();
            Imgproc.approxPolyDP(new_mat, approxCurve_temp, contourSize * 0.05, true);
            if (approxCurve_temp.total() == 4) {
                maxArea = contourarea;//from   w w w.  j  a va 2 s .c o m
                maxAreaIdx = idx;
                approxCurve = approxCurve_temp;
                largest_contour = temp_contour;
            }
        }
    }

    Imgproc.cvtColor(imgSource, imgSource, Imgproc.COLOR_BayerBG2RGB);
    //Mat sourceImage =Imgcodecs.imread(Environment.getExternalStorageDirectory().getAbsolutePath()+"/TextID/"+"/oocr.png");
    double[] temp_double;
    temp_double = approxCurve.get(0, 0);
    Point p1 = new Point(temp_double[0], temp_double[1]);
    //Core.circle(imgSource,p1,55,new Scalar(0,0,255));
    //Imgproc.warpAffine(sourceImage, dummy, rotImage,sourceImage.size());
    temp_double = approxCurve.get(1, 0);
    Point p2 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p2,150,new Scalar(255,255,255));
    temp_double = approxCurve.get(2, 0);
    Point p3 = new Point(temp_double[0], temp_double[1]);
    //Core.circle(imgSource,p3,200,new Scalar(255,0,0));
    temp_double = approxCurve.get(3, 0);
    Point p4 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p4,100,new Scalar(0,0,255));
    List<Point> source = new ArrayList<Point>();
    source.add(p1);
    source.add(p2);
    source.add(p3);
    source.add(p4);
    Mat startM = Converters.vector_Point2f_to_Mat(source);
    return startM;
}

From source file:mineshcvit.opendocscanner.CropImage.java

License:Apache License

private void makeDefault() {

    // minesh: finding the largest rect in the given image

    //Mat grayImage= Imgcodecs.imread(IMAGE_PATH, Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE);

    //////////////////////
    /////////////  w  ww.  j a  v a 2 s  .c  o  m

    Mat imgSource = new Mat();

    Utils.bitmapToMat(mBitmap, imgSource);
    //  Utils.bitmapToMat(bmp32, imgMAT);

    Imgproc.cvtColor(imgSource, imgSource, Imgproc.COLOR_BGR2GRAY);

    //Mat imgSource = Imgcodecs.imread(mImagePath,Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE);
    Log.w("myApp", "image path from isnde makedefault() is " + mImagePath);

    int matwidth = imgSource.width();
    int matheight = imgSource.height();

    Log.w("myApp", "mat image width, from makedefault() is " + matwidth);
    Log.w("myApp", "mat image height from, makedefault() is " + matheight);

    Mat imageBin = new Mat();

    double threshold = Imgproc.threshold(imgSource, imageBin, 0, 255, Imgproc.THRESH_OTSU);
    Log.w("myApp", "otsu threshold is " + threshold);

    //for canny higher threshold is chosen as otsus threshold and lower threshold is half of the otsu threshold value
    Imgproc.Canny(imgSource.clone(), imgSource, threshold * 0.5, threshold);

    // Imgcodecs.imwrite(mImagePath, imgSource);

    // int canny_height=imgSource.height();
    //   int canny_width=imgSource.width();

    // Log.w("myApp", "canny image height is "+canny_height);

    Imgproc.GaussianBlur(imgSource, imgSource, new org.opencv.core.Size(3, 3), 3);
    // find the contours
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    //MatVector contours = new MatVector();

    Imgproc.findContours(imgSource, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);

    double maxArea = -1;
    MatOfPoint temp_contour = contours.get(0); // the largest is at the
    // index 0 for starting
    // point
    MatOfPoint2f approxCurve = new MatOfPoint2f();

    for (int idx = 0; idx < contours.size(); idx++) {
        temp_contour = contours.get(idx);
        double contourarea = Imgproc.contourArea(temp_contour);
        // compare this contour to the previous largest contour found
        if (contourarea > maxArea) {
            // check if this contour is a square
            MatOfPoint2f new_mat = new MatOfPoint2f(temp_contour.toArray());
            int contourSize = (int) temp_contour.total();
            MatOfPoint2f approxCurve_temp = new MatOfPoint2f();
            Imgproc.approxPolyDP(new_mat, approxCurve_temp, contourSize * 0.05, true);
            if (approxCurve_temp.total() == 4) {
                maxArea = contourarea;
                approxCurve = approxCurve_temp;
            }
        }
    }
    double[] temp_double;
    temp_double = approxCurve.get(0, 0);
    Point p1 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p1,55,new Scalar(0,0,255));
    // Imgproc.warpAffine(sourceImage, dummy, rotImage,sourceImage.size());
    temp_double = approxCurve.get(1, 0);
    Point p2 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p2,150,new Scalar(255,255,255));
    temp_double = approxCurve.get(2, 0);
    Point p3 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p3,200,new Scalar(255,0,0));
    temp_double = approxCurve.get(3, 0);
    Point p4 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p4,100,new Scalar(0,0,255));
    ArrayList<Point> source = new ArrayList<Point>();
    ArrayList<Point> topPoints = new ArrayList<Point>();
    ArrayList<Point> bottomPoints = new ArrayList<Point>();
    ArrayList<Point> sortedPoints = new ArrayList<Point>();

    source.add(p1);
    source.add(p2);
    source.add(p3);
    source.add(p4);

    Collections.sort(source, new Comparator<Point>() {

        public int compare(Point o1, Point o2) {
            return Double.compare(o1.y, o2.y);
        }
    });

    topPoints.add(source.get(0));
    topPoints.add(source.get(1));

    Collections.sort(topPoints, new Comparator<Point>() {

        public int compare(Point o1, Point o2) {
            return Double.compare(o1.x, o2.x);
        }
    });

    bottomPoints.add(source.get(2));
    bottomPoints.add(source.get(3));

    Collections.sort(bottomPoints, new Comparator<Point>() {

        public int compare(Point o1, Point o2) {
            return Double.compare(o1.x, o2.x);
        }
    });

    sortedPoints.add(topPoints.get(0));//top left
    sortedPoints.add(bottomPoints.get(0));//bottom left
    sortedPoints.add(bottomPoints.get(1));//bottom right
    sortedPoints.add(topPoints.get(1));//top right

    /*
    c++ code to sort the points
            
    void sortCorners(std::vector<cv::Point2f>& corners, cv::Point2f center)
    {
    std::vector<cv::Point2f> top, bot;
            
    for (int i = 0; i < corners.size(); i++)
    {
    if (corners[i].y < center.y)
    top.push_back(corners[i]);
    else
    bot.push_back(corners[i]);
    }
            
    cv::Point2f tl = top[0].x > top[1].x ? top[1] : top[0];
    cv::Point2f tr = top[0].x > top[1].x ? top[0] : top[1];
    cv::Point2f bl = bot[0].x > bot[1].x ? bot[1] : bot[0];
    cv::Point2f br = bot[0].x > bot[1].x ? bot[0] : bot[1];
            
    corners.clear();
    corners.push_back(tl);
    corners.push_back(tr);
    corners.push_back(br);
    corners.push_back(bl);
    }
            
    ...
            
    // Get mass center
    cv::Point2f center(0,0);
    for (int i = 0; i < corners.size(); i++)
    center += corners[i];
            
    center *= (1. / corners.size());
    sortCorners(corners, center);
            
            
            
     */

    //p1 t0 p4 are in the anti clock wise order starting from top left

    // double s=source.get(0).x;

    /////////////////
    /////////////////
    int width = mBitmap.getWidth();
    int height = mBitmap.getHeight();

    Log.w("myApp", "bitmap width is " + width);
    Log.w("myApp", "bitmap height is " + height);

    Rect imageRect = new Rect(0, 0, width, height);

    // make the default size about 4/5 of the width or height

    /*
            
            int cropWidth = Math.min(width, height) * 4 / 5;
            int cropHeight = cropWidth;
            
            
            int x = (width - cropWidth) / 2;
            int y = (height - cropHeight) / 2;
            
            RectF cropRect = new RectF(x, y, x + cropWidth, y + cropHeight);
            
    */
    /// To test the points order

    /*
    Point p1 = new Point(1.0*x,1.0*y );
    Point p2 = new Point(1.0*x+150.0,1.0*y+1.0*cropHeight);
            
    Point p3 = new Point(1.0*x+1.0*cropWidth,1.0*y+1.0*cropHeight);
            
    Point p4 = new Point(1.0*x+1.0*cropWidth,1.0*y);
            
    ArrayList<Point> source = new ArrayList<Point>();
    source.add(p1);
    source.add(p2);
    source.add(p3);
    source.add(p4);
            
    */
    ////////////////////////////

    Log.w("myApp",
            "from inside makedeafult inside cropimage calss, default crop rect values are set and now highlight view will be initiated ");

    HighlightView hv = new HighlightView(mImageView, imageRect, sortedPoints);

    Log.w("myApp", "higlight view initiated; done");

    mImageView.add(hv);
    Log.w("myApp", "add hv is done; done");

    mImageView.invalidate();
    mCrop = hv;

    Log.w("myApp", "mcrop=hv donee");
    mCrop.setFocus(true);
    ;
}

From source file:opencv.CaptchaDetection.java

/***
 * ??, ROI/*from   w w w .ja  v  a  2s . c o  m*/
 * @param src
 * @return 
 */
private static List<Mat> find_number(Mat src) {
    Mat src_tmp = src.clone();

    //  
    Imgproc.dilate(src_tmp, src_tmp, new Mat());

    //  ?
    Mat canny_edge = new Mat();
    Imgproc.blur(src_tmp, src_tmp, new Size(3, 3));
    Imgproc.Canny(src_tmp, canny_edge, 50, 150, 3, false);

    //  
    List<MatOfPoint> contours = new ArrayList<>();
    Imgproc.findContours(canny_edge, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

    List<Rect> boundRect = new ArrayList<>();

    //  ??, ??
    for (int i = 0; i < contours.size(); i++) {
        MatOfPoint2f tmp_mp2f_1 = new MatOfPoint2f();
        MatOfPoint2f tmp_mp2f_2 = new MatOfPoint2f();

        contours.get(i).convertTo(tmp_mp2f_1, CvType.CV_32FC2);

        Imgproc.approxPolyDP(tmp_mp2f_1, tmp_mp2f_2, 3, true);

        tmp_mp2f_2.convertTo(contours.get(i), CvType.CV_32S);

        Rect rect = Imgproc.boundingRect(contours.get(i));

        //if (rect.area() > 300)
        //out.println("h : " + rect.height + ", w : " + rect.width + ", aera :  " + rect.area());

        if (rect.height >= 21 && rect.width >= 21 && rect.area() >= 700)
            boundRect.add(rect);
    }

    //  ??
    for (Rect rect : boundRect) {
        Scalar color = new Scalar(128);
        Imgproc.rectangle(src_tmp, rect.tl(), rect.br(), color, 2, 8, 0);
    }

    //  ???
    Collections.sort(boundRect, rectSort);

    List<Mat> numRoi = new ArrayList<>();
    for (Rect rect : boundRect)
        numRoi.add(src.submat(rect));

    //for (Mat roi : numRoi) 
    //showResult(roi, "roi");

    return numRoi;
}

From source file:org.ar.rubik.CubePoseEstimator.java

License:Open Source License

/**
 * Pose Estimation/*  ww w .  j av  a 2  s. co m*/
 * 
 * Deduce real world cube coordinates and rotation
 * 
 * @param rubikFace
 * @param image 
 * @param stateModel 
 * @return 
 */
public static CubePose poseEstimation(RubikFace rubikFace, Mat image, StateModel stateModel) {

    if (rubikFace == null)
        return null;

    if (rubikFace.faceRecognitionStatus != FaceRecognitionStatusEnum.SOLVED)
        return null;

    LeastMeansSquare lmsResult = rubikFace.lmsResult;

    if (lmsResult == null)
        return null;

    // OpenCV Pose Estimate requires at least four points.
    if (rubikFace.rhombusList.size() <= 4)
        return null;

    if (cameraMatrix == null) {
        cameraMatrix = stateModel.cameraCalibration.getOpenCVCameraMatrix((int) (image.size().width),
                (int) (image.size().height));
        distCoeffs = new MatOfDouble(stateModel.cameraCalibration.getDistortionCoefficients());
    }

    /*
     * For the purposes of external camera calibration: i.e., where the cube is 
     * located in camera coordinates, we define the geometry of the face of a
     * cube composed of nine 3D locations each representing the center of each tile.
     * Correspondence between these points and nine 2D points from the actual
     * camera image, along with camera calibration data, are using to calculate
     * the Pose of the Cube (i.e. "Cube Pose").
     * 
     * The geometry of the cube here is defined as having center at {0,0,0},
     * and edge size of 2 units (i.e., +/- 1.0).
     */

    // List of real world point and screen points that correspond.
    List<Point3> objectPointsList = new ArrayList<Point3>(9);
    List<Point> imagePointsList = new ArrayList<Point>(9);

    // Create list of image (in 2D) and object (in 3D) points.
    // Loop over Rubik Face Tiles
    for (int n = 0; n < 3; n++) {
        for (int m = 0; m < 3; m++) {

            Rhombus rhombus = rubikFace.faceRhombusArray[n][m];

            // Only use if Rhombus was non null.
            if (rhombus != null) {

                // Obtain center of Rhombus in screen image coordinates
                // Convention:
                //  o X is zero on the left, and increases to the right.
                //  o Y is zero on the top and increases downward.
                Point imagePoint = new Point(rhombus.center.x, rhombus.center.y);
                imagePointsList.add(imagePoint);

                // N and M are actual not conceptual (as in design doc).
                int mm = 2 - n;
                int nn = 2 - m;
                // above now matches design doc.
                // that is:
                //  o the nn vector is to the right and upwards.
                //  o the mm vector is to the left and upwards.

                // Calculate center of Tile in OpenCV World Space Coordinates
                // Convention:
                //  o X is zero in the center, and increases to the left.
                //  o Y is zero in the center and increases downward.
                //  o Z is zero (at the world coordinate origin) and increase away for the camera.
                float x = (1 - mm) * 0.66666f;
                float y = -1.0f;
                float z = -1.0f * (1 - nn) * 0.666666f;
                Point3 objectPoint = new Point3(x, y, z);
                objectPointsList.add(objectPoint);
            }
        }
    }

    // Cast image point list into OpenCV Matrix.
    MatOfPoint2f imagePoints = new MatOfPoint2f();
    imagePoints.fromList(imagePointsList);

    // Cast object point list into OpenCV Matrix.
    MatOfPoint3f objectPoints = new MatOfPoint3f();
    objectPoints.fromList(objectPointsList);

    Mat rvec = new Mat();
    Mat tvec = new Mat();

    //      Log.e(Constants.TAG, "Image Points: " + imagePoints.dump());
    //      Log.e(Constants.TAG, "Object Points: " + objectPoints.dump());

    //      =+= sometimes a "count >= 4" exception 
    Calib3d.solvePnP(objectPoints, imagePoints, cameraMatrix, distCoeffs, rvec, tvec);

    Log.v(Constants.TAG, String.format("Open CV Rotation Vector x=%4.2f y=%4.2f z=%4.2f", rvec.get(0, 0)[0],
            rvec.get(1, 0)[0], rvec.get(2, 0)[0]));

    // Convert from OpenCV to OpenGL World Coordinates
    float x = +1.0f * (float) tvec.get(0, 0)[0];
    float y = -1.0f * (float) tvec.get(1, 0)[0];
    float z = -1.0f * (float) tvec.get(2, 0)[0];

    //        // =+= Add manual offset correction to translation  
    //        x += MenuAndParams.xTranslationOffsetParam.value;
    //        y += MenuAndParams.yTranslationOffsetParam.value;
    //        z += MenuAndParams.zTranslationOffsetParam.value;      

    // Convert Rotation Vector from OpenCL polarity axes definition to OpenGL definition
    // Note, polarity of x-axis is OK, no need to invert.
    rvec.put(1, 0, -1.0f * rvec.get(1, 0)[0]); // y-axis
    rvec.put(2, 0, -1.0f * rvec.get(2, 0)[0]); // z-axis

    //        // =+= Add manual offset correction to Rotation
    //        rvec.put(0, 0, rvec.get(0, 0)[0] + MenuAndParams.xRotationOffsetParam.value * Math.PI / 180.0);  // X rotation
    //        rvec.put(1, 0, rvec.get(1, 0)[0] + MenuAndParams.yRotationOffsetParam.value * Math.PI / 180.0);  // Y rotation
    //        rvec.put(2, 0, rvec.get(2, 0)[0] + MenuAndParams.zRotationOffsetParam.value * Math.PI / 180.0);  // Z rotation

    // Package up as CubePose object
    CubePose cubePose = new CubePose();
    cubePose.x = x;
    cubePose.y = y;
    cubePose.z = z;
    cubePose.xRotation = rvec.get(0, 0)[0];
    cubePose.yRotation = rvec.get(1, 0)[0];
    cubePose.zRotation = rvec.get(2, 0)[0];

    //      Log.e(Constants.TAG, "Result: " + result);
    //      Log.e(Constants.TAG, "Camera: " + cameraMatrix.dump());
    //      Log.e(Constants.TAG, "Rotation: " + rvec.dump());
    //      Log.e(Constants.TAG, "Translation: " + tvec.dump());

    //      // Reporting in OpenGL World Coordinates
    //      Core.rectangle(image, new Point(0, 50), new Point(1270, 150), Constants.ColorBlack, -1);
    //      Core.putText(image, String.format("Translation  x=%4.2f y=%4.2f z=%4.2f", x, y, z), new Point(50, 100), Constants.FontFace, 3, Constants.ColorWhite, 3);
    //      Core.putText(image, String.format("Rotation     x=%4.0f y=%4.0f z=%4.0f", cubeXrotation, cubeYrotation, cubeZrotation), new Point(50, 150), Constants.FontFace, 3, Constants.ColorWhite, 3);

    Log.v(Constants.TAG, "Cube Pose: " + cubePose);

    return cubePose;
}