Example usage for org.opencv.core MatOfPoint2f MatOfPoint2f

List of usage examples for org.opencv.core MatOfPoint2f MatOfPoint2f

Introduction

In this page you can find the example usage for org.opencv.core MatOfPoint2f MatOfPoint2f.

Prototype

public MatOfPoint2f() 

Source Link

Usage

From source file:ImageReade.java

public static void detectLetter(Mat img) {
    ArrayList<Rect> boundRect = new ArrayList<>();
    Mat img_gray, img_sobel, img_threshold, element;
    img_gray = new Mat();
    img_sobel = new Mat();
    img_threshold = new Mat();
    element = new Mat();
    Imgproc.cvtColor(img, img_gray, Imgproc.COLOR_BGRA2GRAY);
    imshow("Rec img_gray", img_gray);
    Imgproc.Sobel(img_gray, img_sobel, CvType.CV_8U, 1, 0, 3, 1, 0, Imgproc.BORDER_DEFAULT);
    imshow("Rec img_sobel", img_sobel);
    Imgproc.threshold(img_sobel, img_threshold, 0, 255, CV_THRESH_OTSU + CV_THRESH_BINARY);
    imshow("Rec img_threshold", img_threshold);

    element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(16, 6));

    Imgproc.morphologyEx(img_threshold, img_threshold, CV_MOP_CLOSE, element);
    imshow("Rec img_threshold second", img_threshold);

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();

    //Imgproc.findContours(img_threshold, contours, new Mat(), Imgproc.RETR_LIST,Imgproc.CHAIN_APPROX_SIMPLE);
    Imgproc.findContours(img_threshold, contours, new Mat(), 0, 1);

    for (int i = 0; i < contours.size(); i++) {
        System.out.println(Imgproc.contourArea(contours.get(i)));
        //            if (Imgproc.contourArea(contours.get(i)) > 100) {
        //                //Imgproc.approxPolyDP( contours.get(i), contours_poly[i], 3, true );
        //                Rect rect = Imgproc.boundingRect(contours.get(i));
        //                System.out.println(rect.height);
        //                if (rect.width > rect.height) {
        //                    //System.out.println(rect.x +","+rect.y+","+rect.height+","+rect.width);
        //                    Core.rectangle(img, new Point(rect.x,rect.y), new Point(rect.x+rect.width,rect.y+rect.height),new Scalar(0,0,255));
        //                }
        //                    
        //                    
        //            }
        if (Imgproc.contourArea(contours.get(i)) > 100) {
            MatOfPoint2f mMOP2f1 = new MatOfPoint2f();
            MatOfPoint2f mMOP2f2 = new MatOfPoint2f();
            contours.get(i).convertTo(mMOP2f1, CvType.CV_32FC2);
            Imgproc.approxPolyDP(mMOP2f1, mMOP2f2, 3, true);
            mMOP2f2.convertTo(contours.get(i), CvType.CV_32S);
            Rect rect = Imgproc.boundingRect(contours.get(i));
            if (rect.width > rect.height) {
                Core.rectangle(img, new Point(rect.x, rect.y),
                        new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 0, 255));
            }//from w  ww . j  av a 2  s  . c om
        }
    }
    imshow("Rec Detected", img);
}

From source file:ThirdTry.java

public static void detectLetter(Mat img, Mat m2) {
    ArrayList<Rect> boundRect = new ArrayList<>();
    Mat img_gray, img_sobel, img_threshold, element;
    img_gray = new Mat();
    img_sobel = new Mat();
    img_threshold = new Mat();
    element = new Mat();
    Imgproc.cvtColor(img, img_gray, Imgproc.COLOR_BGRA2GRAY);
    //imshow("Rec img_gray", img_gray);
    Imgproc.Sobel(img_gray, img_sobel, CvType.CV_8UC1, 1, 0, 3, 1, 0, Imgproc.BORDER_DEFAULT);
    //imshow("Rec img_sobel", img_sobel);
    Imgproc.threshold(m2, img_threshold, 0, 255, CV_THRESH_OTSU + CV_THRESH_BINARY);
    //imshow("Rec img_threshold", img_threshold);

    element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 2));

    Imgproc.morphologyEx(m2, img_threshold, CV_MOP_CLOSE, element);
    imshow("Rec img_threshold second", img_threshold);

    element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(12, 12));
    Imgproc.morphologyEx(img_threshold, img_threshold, CV_MOP_CLOSE, element);
    //imshow("Rec img_threshold second", img_threshold);

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();

    //Imgproc.findContours(img_threshold, contours, new Mat(), Imgproc.RETR_LIST,Imgproc.CHAIN_APPROX_SIMPLE);
    Imgproc.findContours(img_threshold, contours, new Mat(), 0, 1);

    for (int i = 0; i < contours.size(); i++) {
        System.out.println(Imgproc.contourArea(contours.get(i)));
        //            if (Imgproc.contourArea(contours.get(i)) > 100) {
        //                //Imgproc.approxPolyDP( contours.get(i), contours_poly[i], 3, true );
        //                Rect rect = Imgproc.boundingRect(contours.get(i));
        //                System.out.println(rect.height);
        //                if (rect.width > rect.height) {
        //                    //System.out.println(rect.x +","+rect.y+","+rect.height+","+rect.width);
        //                    Core.rectangle(img, new Point(rect.x,rect.y), new Point(rect.x+rect.width,rect.y+rect.height),new Scalar(0,0,255));
        //                }
        //                    
        //                    
        //            }
        if (Imgproc.contourArea(contours.get(i)) > 100) {
            MatOfPoint2f mMOP2f1 = new MatOfPoint2f();
            MatOfPoint2f mMOP2f2 = new MatOfPoint2f();
            contours.get(i).convertTo(mMOP2f1, CvType.CV_32FC2);
            Imgproc.approxPolyDP(mMOP2f1, mMOP2f2, 3, true);
            mMOP2f2.convertTo(contours.get(i), CvType.CV_32S);
            Rect rect = Imgproc.boundingRect(contours.get(i));
            if (rect.width > rect.height) {
                Core.rectangle(img, new Point(rect.x, rect.y),
                        new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 0, 255));
            }/*w  ww .  ja va  2s.c om*/
        }
    }
    //imshow("Rec Detected", img);
}

From source file:OctoEye.java

License:Open Source License

private void detectPupil() {
    // min and max pupil radius
    int r_min = 2;
    int r_max = 45;

    // min and max pupil diameter
    int d_min = 2 * r_min;
    int d_max = 2 * r_max;

    // min and max pupil area
    double area;//from  ww w  .j  a  v  a  2  s .c  o  m
    double a_min = Math.PI * r_min * r_min;
    double a_max = Math.PI * r_max * r_max;

    // histogram stuff
    List<Mat> images;
    MatOfInt channels;
    Mat mask;
    Mat hist;
    MatOfInt mHistSize;
    MatOfFloat mRanges;

    // contour and circle stuff
    Rect rect = null;
    Rect rectMin;
    Rect rectMax;
    List<MatOfPoint> contours;
    MatOfPoint3 circles;

    // pupil center
    Point p;

    // ellipse test points
    Point v;
    Point r;
    Point s;

    // rect points
    Point tl;
    Point br;

    // pupil edge detection
    Vector<Point> pointsTest;
    Vector<Point> pointsEllipse;
    Vector<Point> pointsRemoved;

    // temporary variables
    double distance;
    double rad;
    double length;
    int x;
    int y;
    int tmp;
    byte buff[];

    // -------------------------------------------------------------------------------------------------------------
    // step 1
    // blur the image to reduce noise

    Imgproc.medianBlur(src, tmp1, 25);

    // -------------------------------------------------------------------------------------------------------------
    // step 2
    // locate the pupil with feature detection and compute a histogram for each,
    // the best feature will be used as rough pupil location (rectMin)

    int score = 0;
    int winner = 0;

    // feature detection
    MatOfKeyPoint matOfKeyPoints = new MatOfKeyPoint();
    FeatureDetector blobDetector = FeatureDetector.create(FeatureDetector.MSER); // Maximal Stable Extremal Regions
    blobDetector.detect(tmp1, matOfKeyPoints);
    List<KeyPoint> keyPoints = matOfKeyPoints.toList();

    // histogram calculation
    for (int i = 0; i < keyPoints.size(); i++) {
        x = (int) keyPoints.get(i).pt.x;
        y = (int) keyPoints.get(i).pt.y;
        tl = new Point(x - 5 >= 0 ? x - 5 : 0, y - 5 >= 0 ? y - 5 : 0);
        br = new Point(x + 5 < WIDTH ? x + 5 : WIDTH - 1, y + 5 < HEIGHT ? y + 5 : HEIGHT - 1);

        images = new ArrayList<Mat>();
        images.add(tmp1.submat(new Rect(tl, br)));
        channels = new MatOfInt(0);
        mask = new Mat();
        hist = new Mat();
        mHistSize = new MatOfInt(256);
        mRanges = new MatOfFloat(0f, 256f);
        Imgproc.calcHist(images, channels, mask, hist, mHistSize, mRanges);

        tmp = 0;
        for (int j = 0; j < 256 / 3; j++) {
            tmp += (256 / 3 - j) * (int) hist.get(j, 0)[0];
        }
        if (tmp >= score) {
            score = tmp;
            winner = i;
            rect = new Rect(tl, br);
        }

        if (debug) {
            // show features (orange)
            Core.circle(dbg, new Point(x, y), 3, ORANGE);
        }
    }
    if (rect == null) {
        return;
    }
    rectMin = rect.clone();

    if (debug) {
        // show rectMin (red)
        Core.rectangle(dbg, rectMin.tl(), rect.br(), RED, 1);
    }

    // -------------------------------------------------------------------------------------------------------------
    // step 3
    // compute a rectMax (blue) which is larger than the pupil

    int margin = 32;

    rect.x = rect.x - margin;
    rect.y = rect.y - margin;
    rect.width = rect.width + 2 * margin;
    rect.height = rect.height + 2 * margin;

    rectMax = rect.clone();

    if (debug) {
        // show features (orange)
        Core.rectangle(dbg, rectMax.tl(), rectMax.br(), BLUE);
    }

    // -------------------------------------------------------------------------------------------------------------
    // step 4
    // blur the image again

    Imgproc.medianBlur(src, tmp1, 7);
    Imgproc.medianBlur(tmp1, tmp1, 3);
    Imgproc.medianBlur(tmp1, tmp1, 3);
    Imgproc.medianBlur(tmp1, tmp1, 3);

    // -------------------------------------------------------------------------------------------------------------
    // step 5
    // detect edges

    Imgproc.Canny(tmp1, tmp2, 40, 50);

    // -------------------------------------------------------------------------------------------------------------
    // step 6
    // from pupil center to maxRect borders, find all edge points, compute a first ellipse

    p = new Point(rectMin.x + rectMin.width / 2, rectMin.y + rectMin.height / 2);
    pointsTest = new Vector<Point>();
    pointsEllipse = new Vector<Point>();
    pointsRemoved = new Vector<Point>();
    buff = new byte[tmp2.rows() * tmp2.cols()];
    tmp2.get(0, 0, buff);

    length = Math.min(p.x - rectMax.x - 3, p.y - rectMax.y - 3);
    length = Math.sqrt(2 * Math.pow(length, 2));
    Point z = new Point(p.x, p.y - length);
    for (int i = 0; i < 360; i += 15) {
        rad = Math.toRadians(i);
        x = (int) (p.x + Math.cos(rad) * (z.x - p.x) - Math.sin(rad) * (z.y - p.y));
        y = (int) (p.y + Math.sin(rad) * (z.x - p.x) - Math.cos(rad) * (z.y - p.y));
        pointsTest.add(new Point(x, y));
    }

    if (debug) {
        for (int i = 0; i < pointsTest.size(); i++) {
            Core.line(dbg, p, pointsTest.get(i), GRAY, 1);
            Core.rectangle(dbg, rectMin.tl(), rectMin.br(), GREEN, 1);
            Core.rectangle(dbg, rectMax.tl(), rectMax.br(), BLUE, 1);
        }
        Core.rectangle(dbg, rectMin.tl(), rectMin.br(), BLACK, -1);
        Core.rectangle(dbg, rectMin.tl(), rectMin.br(), RED, 1);
        Core.rectangle(dbg, rectMax.tl(), rectMax.br(), BLUE);
    }

    // p: Ursprung ("Mittelpunkt" der Ellipse)
    // v: Zielpunkt (Testpunkt)
    // r: Richtungsvektor PV
    for (int i = 0; i < pointsTest.size(); i++) {
        v = new Point(pointsTest.get(i).x, pointsTest.get(i).y);
        r = new Point(v.x - p.x, v.y - p.y);
        length = Math.sqrt(Math.pow(p.x - v.x, 2) + Math.pow(p.y - v.y, 2));
        boolean found = false;
        for (int j = 0; j < Math.round(length); j++) {
            s = new Point(Math.rint(p.x + (double) j / length * r.x),
                    Math.rint(p.y + (double) j / length * r.y));
            s.x = Math.max(1, Math.min(s.x, WIDTH - 2));
            s.y = Math.max(1, Math.min(s.y, HEIGHT - 2));
            tl = new Point(s.x - 1, s.y - 1);
            br = new Point(s.x + 1, s.y + 1);
            buff = new byte[3 * 3];
            rect = new Rect(tl, br);
            try {
                (tmp2.submat(rect)).get(0, 0, buff);
                for (int k = 0; k < 3 * 3; k++) {
                    if (Math.abs(buff[k]) == 1) {
                        pointsEllipse.add(s);
                        found = true;
                        break;
                    }
                }
            } catch (Exception e) {
                break;
            }
            if (found) {
                break;
            }
        }
    }

    double e_min = Double.POSITIVE_INFINITY;
    double e_max = 0;
    double e_med = 0;
    for (int i = 0; i < pointsEllipse.size(); i++) {
        v = pointsEllipse.get(i);
        length = Math.sqrt(Math.pow(p.x - v.x, 2) + Math.pow(p.y - v.y, 2));
        e_min = (length < e_min) ? length : e_min;
        e_max = (length > e_max) ? length : e_max;
        e_med = e_med + length;
    }
    e_med = e_med / pointsEllipse.size();
    if (pointsEllipse.size() >= 5) {
        Point[] points1 = new Point[pointsEllipse.size()];
        for (int i = 0; i < pointsEllipse.size(); i++) {
            points1[i] = pointsEllipse.get(i);
        }
        MatOfPoint2f points2 = new MatOfPoint2f();
        points2.fromArray(points1);
        pupil = Imgproc.fitEllipse(points2);
    }
    if (pupil.center.x == 0 && pupil.center.y == 0) {
        // something went wrong, return null
        reset();
        return;
    }

    if (debug) {
        Core.ellipse(dbg, pupil, PURPLE, 2);
    }

    // -------------------------------------------------------------------------------------------------------------
    // step 7
    // remove some outlier points and compute the ellipse again

    try {
        for (int i = 1; i <= 4; i++) {
            distance = 0;
            int remove = 0;
            for (int j = pointsEllipse.size() - 1; j >= 0; j--) {
                v = pointsEllipse.get(j);
                length = Math.sqrt(Math.pow(v.x - pupil.center.x, 2) + Math.pow(v.y - pupil.center.y, 2));
                if (length > distance) {
                    distance = length;
                    remove = j;
                }
            }
            v = pointsEllipse.get(remove);
            pointsEllipse.removeElementAt(remove);
            pointsRemoved.add(v);
        }
    } catch (Exception e) {
        // something went wrong, return null
        reset();
        return;
    }
    if (pointsEllipse.size() >= 5) {
        Point[] points1 = new Point[pointsEllipse.size()];
        for (int i = 0; i < pointsEllipse.size(); i++) {
            points1[i] = pointsEllipse.get(i);
        }
        MatOfPoint2f points2 = new MatOfPoint2f();
        points2.fromArray(points1);
        pupil = Imgproc.fitEllipse(points2);

        Point[] vertices = new Point[4];
        pupil.points(vertices);
        double d1 = Math
                .sqrt(Math.pow(vertices[1].x - vertices[0].x, 2) + Math.pow(vertices[1].y - vertices[0].y, 2));
        double d2 = Math
                .sqrt(Math.pow(vertices[2].x - vertices[1].x, 2) + Math.pow(vertices[2].y - vertices[1].y, 2));

        if (d1 >= d2) {
            pupilMajorAxis = (int) (d1 / 2);
            pupilMinorAxis = (int) (d2 / 2);
            axisA = new Point(vertices[1].x + (vertices[2].x - vertices[1].x) / 2,
                    vertices[1].y + (vertices[2].y - vertices[1].y) / 2);
            axisB = new Point(vertices[0].x + (vertices[1].x - vertices[0].x) / 2,
                    vertices[0].y + (vertices[1].y - vertices[0].y) / 2);
        } else {
            pupilMajorAxis = (int) (d2 / 2);
            pupilMinorAxis = (int) (d1 / 2);
            axisB = new Point(vertices[1].x + (vertices[2].x - vertices[1].x) / 2,
                    vertices[1].y + (vertices[2].y - vertices[1].y) / 2);
            axisA = new Point(vertices[0].x + (vertices[1].x - vertices[0].x) / 2,
                    vertices[0].y + (vertices[1].y - vertices[0].y) / 2);
        }
    }

    double ratio = (double) pupilMinorAxis / (double) pupilMajorAxis;
    if (ratio < 0.75 || 2 * pupilMinorAxis <= d_min || 2 * pupilMajorAxis >= d_max) {
        // something went wrong, return null
        reset();
        return;
    }

    // pupil found
    if (debug) {
        Core.ellipse(dbg, pupil, GREEN, 2);
        Core.line(dbg, pupil.center, axisA, RED, 2);
        Core.line(dbg, pupil.center, axisB, BLUE, 2);
        Core.circle(dbg, pupil.center, 1, GREEN, 0);

        x = 5;
        y = 5;
        Core.rectangle(dbg, new Point(x, y), new Point(x + 80 + 4, y + 10), BLACK, -1);
        Core.rectangle(dbg, new Point(x + 2, y + 2), new Point(x + 2 + pupilMajorAxis, y + 4), RED, -1);
        Core.rectangle(dbg, new Point(x + 2, y + 6), new Point(x + 2 + pupilMinorAxis, y + 8), BLUE, -1);

        for (int i = pointsEllipse.size() - 1; i >= 0; i--) {
            Core.circle(dbg, pointsEllipse.get(i), 2, ORANGE, -1);
        }
        for (int i = pointsRemoved.size() - 1; i >= 0; i--) {
            Core.circle(dbg, pointsRemoved.get(i), 2, PURPLE, -1);
        }
    }
    Core.ellipse(dst, pupil, GREEN, 2);
    Core.circle(dst, pupil.center, 1, GREEN, 0);
}

From source file:OCV_MinAreaRect.java

License:Open Source License

@Override
public void run(ImageProcessor ip) {
    byte[] byteArray = (byte[]) ip.getPixels();
    int w = ip.getWidth();
    int h = ip.getHeight();
    int num_slice = ip.getSliceNumber();

    ArrayList<Point> lstPt = new ArrayList<Point>();
    MatOfPoint2f pts = new MatOfPoint2f();

    for (int y = 0; y < h; y++) {
        for (int x = 0; x < w; x++) {
            if (byteArray[x + w * y] != 0) {
                lstPt.add(new Point((double) x, (double) y));
            }//from  ww  w.j av  a2  s  .co  m
        }
    }

    if (lstPt.isEmpty()) {
        return;
    }

    pts.fromList(lstPt);
    RotatedRect rect = Imgproc.minAreaRect(pts);
    showData(rect, num_slice);
}

From source file:OCV_MinEnclosingCircle.java

License:Open Source License

@Override
public void run(ImageProcessor ip) {
    byte[] byteArray = (byte[]) ip.getPixels();
    int w = ip.getWidth();
    int h = ip.getHeight();
    int num_slice = ip.getSliceNumber();

    ArrayList<Point> lstPt = new ArrayList<Point>();
    MatOfPoint2f pts = new MatOfPoint2f();

    for (int y = 0; y < h; y++) {
        for (int x = 0; x < w; x++) {
            if (byteArray[x + w * y] != 0) {
                lstPt.add(new Point((double) x, (double) y));
            }//from www  . j  a  v  a2 s.  c o  m
        }
    }

    if (lstPt.isEmpty()) {
        return;
    }

    pts.fromList(lstPt);
    float[] radius = new float[1];
    Point center = new Point();
    Imgproc.minEnclosingCircle(pts, center, radius);
    showData(center.x, center.y, (double) radius[0], num_slice);
}

From source file:OCV_GetPerspectiveTransform.java

License:Open Source License

@Override
public void run(ImageProcessor ip) {
    MatOfPoint2f matPt_src = new MatOfPoint2f();
    MatOfPoint2f matPt_dst = new MatOfPoint2f();
    matPt_src.fromList(lstPt_src);/*from www . j  a va 2 s . c o  m*/
    matPt_dst.fromList(lstPt_dst);

    Mat mat = Imgproc.getPerspectiveTransform(matPt_src, matPt_dst);

    if (mat == null || mat.rows() <= 0 || mat.cols() <= 0) {
        IJ.showMessage("Output is null or error");
        return;
    }

    ResultsTable rt = OCV__LoadLibrary.GetResultsTable(true);

    for (int i = 0; i < 3; i++) {
        rt.incrementCounter();
        rt.addValue("Column01", String.valueOf(mat.get(i, 0)[0]));
        rt.addValue("Column02", String.valueOf(mat.get(i, 1)[0]));
        rt.addValue("Column03", String.valueOf(mat.get(i, 2)[0]));
    }

    rt.show("Results");
}

From source file:OCV_FitEllipse.java

License:Open Source License

@Override
public void run(ImageProcessor ip) {
    byte[] byteArray = (byte[]) ip.getPixels();
    int w = ip.getWidth();
    int h = ip.getHeight();
    int num_slice = ip.getSliceNumber();

    ArrayList<Point> lstPt = new ArrayList<Point>();
    MatOfPoint2f pts = new MatOfPoint2f();

    for (int y = 0; y < h; y++) {
        for (int x = 0; x < w; x++) {
            if (byteArray[x + w * y] != 0) {
                lstPt.add(new Point((double) x, (double) y));
            }//from   ww  w.  j  a v  a  2s .  c o  m
        }
    }

    if (lstPt.isEmpty()) {
        return;
    }

    pts.fromList(lstPt);
    RotatedRect rect = Imgproc.fitEllipse(pts);
    showData(rect, num_slice);
}

From source file:OCV_GetAffineTransform.java

License:Open Source License

@Override
public void run(ImageProcessor ip) {
    MatOfPoint2f matPt_src = new MatOfPoint2f();
    MatOfPoint2f matPt_dst = new MatOfPoint2f();
    matPt_src.fromList(lstPt_src);/*from  w  w  w  .jav a2 s . c  om*/
    matPt_dst.fromList(lstPt_dst);

    Mat mat = Imgproc.getAffineTransform(matPt_src, matPt_dst);

    if (mat == null || mat.rows() <= 0 || mat.cols() <= 0) {
        IJ.showMessage("Output is null or error");
        return;
    }

    ResultsTable rt = OCV__LoadLibrary.GetResultsTable(true);

    for (int i = 0; i < 2; i++) {
        rt.incrementCounter();
        rt.addValue("Column01", String.valueOf(mat.get(i, 0)[0]));
        rt.addValue("Column02", String.valueOf(mat.get(i, 1)[0]));
        rt.addValue("Column03", String.valueOf(mat.get(i, 2)[0]));
    }

    rt.show("Results");
}

From source file:arlocros.ComputePose.java

License:Apache License

public boolean computePose(Mat rvec, Mat tvec, Mat image2) throws NyARException, FileNotFoundException {
    // convert image to NyAR style for processing
    final INyARRgbRaster imageRaster = NyARImageHelper.createFromMat(image2);

    // create new marker system configuration
    i_config = new NyARMarkerSystemConfig(i_param);
    markerSystemState = new NyARMarkerSystem(i_config);
    // Create wrapper that passes cam pictures to marker system
    cameraSensorWrapper = new NyARSensor(i_screen_size);
    ids = new int[markerPatterns.size()];
    patternmap = new HashMap<>();
    for (int i = 0; i < markerPatterns.size(); i++) {
        // create marker description from pattern file and add to marker
        // system
        ids[i] = markerSystemState.addARMarker(arCodes.get(i), 25, markerConfig.getMarkerSize());
        patternmap.put(ids[i], markerPatterns.get(i));
    }/*w w  w .jav  a2 s . c om*/

    cameraSensorWrapper.update(imageRaster);
    markerSystemState.update(cameraSensorWrapper);

    // init 3D point list
    final List<Point3> points3dlist = new ArrayList<>();
    final List<Point> points2dlist = new ArrayList<>();

    for (final int id : ids) {
        // process only if this marker has been detected
        if (markerSystemState.isExistMarker(id) && markerSystemState.getConfidence(id) > 0.7) {
            // read and add 2D points
            final NyARIntPoint2d[] vertex2d = markerSystemState.getMarkerVertex2D(id);
            Point p = new Point(vertex2d[0].x, vertex2d[0].y);
            points2dlist.add(p);
            p = new Point(vertex2d[1].x, vertex2d[2].y);
            points2dlist.add(p);
            p = new Point(vertex2d[2].x, vertex2d[2].y);
            points2dlist.add(p);
            p = new Point(vertex2d[3].x, vertex2d[3].y);
            points2dlist.add(p);

            //            final MatOfPoint mop = new MatOfPoint();
            //            mop.fromList(points2dlist);
            //            final List<MatOfPoint> pts = new ArrayList<>();
            //            pts.add(mop);

            // read and add corresponding 3D points
            points3dlist.addAll(markerConfig.create3dpointlist(patternmap.get(id)));

            if (visualization) {
                // draw red rectangle around detected marker
                Core.rectangle(image2, new Point(vertex2d[0].x, vertex2d[0].y),
                        new Point(vertex2d[2].x, vertex2d[2].y), new Scalar(0, 0, 255));
                final String markerFile = patternmap.get(id).replaceAll(".*4x4_", "").replace(".patt", "");
                Core.putText(image2, markerFile,
                        new Point((vertex2d[2].x + vertex2d[0].x) / 2.0, vertex2d[0].y - 5), 4, 1,
                        new Scalar(250, 0, 0));
            }
        }

    }
    // load 2D and 3D points to Mats for solvePNP
    final MatOfPoint3f objectPoints = new MatOfPoint3f();
    objectPoints.fromList(points3dlist);
    final MatOfPoint2f imagePoints = new MatOfPoint2f();
    imagePoints.fromList(points2dlist);

    if (visualization) {
        // show image with markers detected
        Imshow.show(image2);
    }

    // do not call solvePNP with empty intput data (no markers detected)
    if (points2dlist.size() == 0) {
        objectPoints.release();
        imagePoints.release();
        return false;
    }

    // uncomment these lines if using RANSAC-based pose estimation (more
    // shaking)
    Mat inliers = new Mat();

    Calib3d.solvePnPRansac(objectPoints, imagePoints, cameraMatrix, distCoeffs, rvec, tvec, false, 300, 5, 16,
            inliers, Calib3d.CV_P3P);
    ArMarkerPoseEstimator.getLog()
            .info("Points detected: " + points2dlist.size() + " inliers: " + inliers.size());

    objectPoints.release();
    imagePoints.release();

    // avoid publish zero pose if localization failed
    if (inliers.rows() == 0) {
        inliers.release();
        return false;
    }

    inliers.release();
    return true;
}

From source file:com.android.cts.verifier.sensors.RVCVXCheckAnalyzer.java

License:Apache License

/**
 * Analyze video frames using computer vision approach and generate a ArrayList<AttitudeRec>
 *
 * @param recs  output ArrayList of AttitudeRec
 * @return total number of frame of the video
 *///w  w w.j  a  v  a2s .  c o m
private int analyzeVideo(ArrayList<AttitudeRec> recs) {
    VideoMetaInfo meta = new VideoMetaInfo(new File(mPath, "videometa.json"));

    int decimation = 1;
    boolean use_timestamp = true;

    // roughly determine if decimation is necessary
    if (meta.fps > DECIMATION_FPS_TARGET) {
        decimation = (int) (meta.fps / DECIMATION_FPS_TARGET);
        meta.fps /= decimation;
    }

    VideoDecoderForOpenCV videoDecoder = new VideoDecoderForOpenCV(new File(mPath, "video.mp4"), decimation);

    Mat frame;
    Mat gray = new Mat();
    int i = -1;

    Size frameSize = videoDecoder.getSize();

    if (frameSize.width != meta.frameWidth || frameSize.height != meta.frameHeight) {
        // this is very unlikely
        return -1;
    }

    if (TRACE_VIDEO_ANALYSIS) {
        Debug.startMethodTracing("cvprocess");
    }

    Size patternSize = new Size(4, 11);

    float fc = (float) (meta.frameWidth / 2.0 / Math.tan(meta.fovWidth / 2.0));
    Mat camMat = cameraMatrix(fc, new Size(frameSize.width / 2, frameSize.height / 2));
    MatOfDouble coeff = new MatOfDouble(); // dummy

    MatOfPoint2f centers = new MatOfPoint2f();
    MatOfPoint3f grid = asymmetricalCircleGrid(patternSize);
    Mat rvec = new MatOfFloat();
    Mat tvec = new MatOfFloat();

    MatOfPoint2f reprojCenters = new MatOfPoint2f();

    if (LOCAL_LOGV) {
        Log.v(TAG, "Camera Mat = \n" + camMat.dump());
    }

    long startTime = System.nanoTime();
    long[] ts = new long[1];

    while ((frame = videoDecoder.getFrame(ts)) != null) {
        if (LOCAL_LOGV) {
            Log.v(TAG, "got a frame " + i);
        }

        if (use_timestamp && ts[0] == -1) {
            use_timestamp = false;
        }

        // has to be in front, as there are cases where execution
        // will skip the later part of this while
        i++;

        // convert to gray manually as by default findCirclesGridDefault uses COLOR_BGR2GRAY
        Imgproc.cvtColor(frame, gray, Imgproc.COLOR_RGB2GRAY);

        boolean foundPattern = Calib3d.findCirclesGrid(gray, patternSize, centers,
                Calib3d.CALIB_CB_ASYMMETRIC_GRID);

        if (!foundPattern) {
            // skip to next frame
            continue;
        }

        if (OUTPUT_DEBUG_IMAGE) {
            Calib3d.drawChessboardCorners(frame, patternSize, centers, true);
        }

        // figure out the extrinsic parameters using real ground truth 3D points and the pixel
        // position of blobs found in findCircleGrid, an estimated camera matrix and
        // no-distortion are assumed.
        boolean foundSolution = Calib3d.solvePnP(grid, centers, camMat, coeff, rvec, tvec, false,
                Calib3d.CV_ITERATIVE);

        if (!foundSolution) {
            // skip to next frame
            if (LOCAL_LOGV) {
                Log.v(TAG, "cannot find pnp solution in frame " + i + ", skipped.");
            }
            continue;
        }

        // reproject points to for evaluation of result accuracy of solvePnP
        Calib3d.projectPoints(grid, rvec, tvec, camMat, coeff, reprojCenters);

        // error is evaluated in norm2, which is real error in pixel distance / sqrt(2)
        double error = Core.norm(centers, reprojCenters, Core.NORM_L2);

        if (LOCAL_LOGV) {
            Log.v(TAG, "Found attitude, re-projection error = " + error);
        }

        // if error is reasonable, add it into the results. use ratio to frame height to avoid
        // discriminating higher definition videos
        if (error < REPROJECTION_THREASHOLD_RATIO * frameSize.height) {
            double[] rv = new double[3];
            double timestamp;

            rvec.get(0, 0, rv);
            if (use_timestamp) {
                timestamp = (double) ts[0] / 1e6;
            } else {
                timestamp = (double) i / meta.fps;
            }
            if (LOCAL_LOGV)
                Log.v(TAG, String.format("Added frame %d  ts = %f", i, timestamp));
            recs.add(new AttitudeRec(timestamp, rodr2rpy(rv)));
        }

        if (OUTPUT_DEBUG_IMAGE) {
            Calib3d.drawChessboardCorners(frame, patternSize, reprojCenters, true);
            Imgcodecs.imwrite(Environment.getExternalStorageDirectory().getPath() + "/RVCVRecData/DebugCV/img"
                    + i + ".png", frame);
        }
    }

    if (LOCAL_LOGV) {
        Log.v(TAG, "Finished decoding");
    }

    if (TRACE_VIDEO_ANALYSIS) {
        Debug.stopMethodTracing();
    }

    if (LOCAL_LOGV) {
        // time analysis
        double totalTime = (System.nanoTime() - startTime) / 1e9;
        Log.i(TAG, "Total time: " + totalTime + "s, Per frame time: " + totalTime / i);
    }
    return i;
}