Example usage for org.opencv.core Point Point

List of usage examples for org.opencv.core Point Point

Introduction

In this page you can find the example usage for org.opencv.core Point Point.

Prototype

public Point(double x, double y) 

Source Link

Usage

From source file:com.mycompany.analyzer.VisualBalanceAnalyzer.java

public Point getImgCenter() {
    int width = frameWidth - 1;
    int height = frameHeight - 1;
    return new Point(frameX + width / 2, frameY + height / 2);
}

From source file:com.mycompany.analyzer.VisualBalanceAnalyzer.java

public Point getAllRegionsCenter() {
    int x = 0;// w w  w  .  java2 s . c o m
    int y = 0;
    int sumX = 0;
    int sumY = 0;
    double imgArea = frameWidth * frameHeight;
    double weight = imgArea * 3 / 100;
    Point actualCenter;
    if (objectList.size() + faceList.size() > 0) {
        for (Rect rect : objectList) {
            actualCenter = getRectCenter(rect);
            x = (int) actualCenter.x;
            y = (int) actualCenter.y;
            sumX += (x * rect.area());
            sumY += (y * rect.area());
        }
        for (Rect rect : faceList) {
            actualCenter = getRectCenter(rect);
            x = (int) actualCenter.x;
            y = (int) actualCenter.y;
            sumX += (x * (rect.area() + weight));
            sumY += (y * (rect.area() + weight));
        }
        sumX = (int) (sumX / calcSumOfMass());
        sumY = (int) (sumY / calcSumOfMass());
    }
    return new Point(sumX, sumY);
}

From source file:com.mycompany.facedetection.FaceDetector.java

public void findFaces() {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    CascadeClassifier faceDetector = new CascadeClassifier(
            "D:\\opencv\\sources\\data\\lbpcascades\\lbpcascade_frontalface.xml");
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(img, faceDetections);
    for (Rect rect : faceDetections.toArray()) {
        faceList.add(rect);/*from w ww. j a  va  2 s .  co  m*/
        Imgproc.rectangle(img, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height),
                new Scalar(0, 255, 0));
    }
}

From source file:com.mycompany.linedetection.LineDetector.java

public void findLines() {
    Imgproc.Canny(img, edgeDetectedImg, 100, 200, 3, true);
    Mat lines = new Mat();

    int width = img.width();
    int height = img.height();
    double diagonal = Math.sqrt(width * width + height * height);
    int minOfWidthHeight = (width < height) ? width : height;

    Imgproc.HoughLinesP(edgeDetectedImg, lines, 1, Math.PI / 180, minOfWidthHeight * 10 / 100,
            diagonal * 25 / 100, diagonal * 4 / 100);

    int firstN = (lines.rows() < 5) ? lines.rows() : 5;

    for (int x = 0; x < lines.rows(); x++) {
        double[] vec = lines.get(x, 0);
        double[] vec1 = lines.get(x, 1);
        double x1 = vec[0], y1 = vec[1], x2 = vec[2], y2 = vec[3];
        Point startPoint = new Point(x1, y1);
        Point endPoint = new Point(x2, y2);

        double angle_inv = horizontalLine.getAngle(new Line(x1, y1, x2, y2));
        double angle = horizontalLine.getAngle(new Line(x2, y2, x1, y1));
        if ((angle >= diagAngle1 - DIAGONAL_TRESHOLD && angle <= diagAngle1 + DIAGONAL_TRESHOLD)
                || (angle >= diagAngle2 - DIAGONAL_TRESHOLD && angle <= diagAngle2 + DIAGONAL_TRESHOLD)
                || (angle_inv >= diagAngle1 - DIAGONAL_TRESHOLD && angle_inv <= diagAngle1 + DIAGONAL_TRESHOLD)
                || (angle_inv >= diagAngle2 - DIAGONAL_TRESHOLD
                        && angle_inv <= diagAngle2 + DIAGONAL_TRESHOLD)) {
            diagonalLineList.add(new Line(x1, y1, x2, y2));
            Imgproc.line(img, startPoint, endPoint, new Scalar(255, 255, 0), 4);
        } else {/*from ww  w  .  j  av a2 s .  co  m*/
            lineList.add(new Line(x1, y1, x2, y2));
        }

    }

    Collections.sort(lineList, new Comparator<Line>() {
        @Override
        public int compare(Line l1, Line l2) {
            return (int) (l2.getLength() - l1.getLength());
        }

    });

    ArrayList arr = new ArrayList<Line>();

    for (int i = 0; i < firstN + 1; i++) {
        if (lineList.size() >= firstN + 1) {
            double x1 = lineList.get(i).getX1(), y1 = lineList.get(i).getY1(), x2 = lineList.get(i).getX2(),
                    y2 = lineList.get(i).getY2();
            Point startPoint = new Point(x1, y1);
            Point endPoint = new Point(x2, y2);
            arr.add(lineList.get(i));
            Imgproc.line(img, startPoint, endPoint, new Scalar(0, 0, 255), 3);
        }
    }
    lineList = arr;
}

From source file:com.nekomeshi312.whiteboardcorrection.WhiteBoardDetect.java

License:Open Source License

/**
 * ????????//from   ww w. j  a  v a2  s .c  om
 * @param lineEq ?????(ax+by=1) ??[angle][section]
 * @param points ?ArrayList
 * @param img ????null??????
 * @return true:? false:
 */
private boolean calcSquare(StraightLineEquation lineEq[][], ArrayList<Point> points, Mat img) {
    //2??
    Mat mat = new Mat(2, 2, CvType.CV_32F);
    mPointCenterX = 0.0f;
    mPointCenterY = 0.0f;
    int counter = 0;
    for (int ang0sec = 0; ang0sec < 2; ang0sec++) {
        mat.put(0, 0, lineEq[0][ang0sec].a);
        mat.put(0, 1, lineEq[0][ang0sec].b);
        for (int ang1sec = 0; ang1sec < 2; ang1sec++) {
            mat.put(1, 0, lineEq[1][ang1sec].a);
            mat.put(1, 1, lineEq[1][ang1sec].b);
            Mat matAns;
            try {
                matAns = mat.inv();
                if (matAns == null)
                    return false;
            } catch (Exception e) {//??????????
                e.printStackTrace();
                return false;
            }
            float x = (float) (matAns.get(0, 0)[0] + matAns.get(0, 1)[0] + mCenterX);
            float y = (float) (matAns.get(1, 0)[0] + matAns.get(1, 1)[0] + mCenterY);
            Point p = new Point(x, y);
            points.add(p);
            mPointCenterX += x;
            mPointCenterY += y;
            counter++;
        }
    }
    mPointCenterX /= (float) counter;
    mPointCenterY /= (float) counter;
    //????
    Collections.sort(points, new PointComparator());
    if (img != null) {
        Scalar color[] = new Scalar[4];
        color[0] = new Scalar(0xff, 0x00, 0x00);
        color[1] = new Scalar(0x00, 0xff, 0x00);
        color[2] = new Scalar(0x00, 0x00, 0xff);
        color[3] = new Scalar(0xff, 0x00, 0xff);

        for (int i = 0; i < 4; i++) {
            Core.circle(img, points.get(i), 30, color[i], 5);
        }
    }
    if (MyDebug.DEBUG) {
        for (int i = 0; i < 4; i++) {
            Log.d(LOG_TAG, "point(" + i + ") = " + points.get(i).x + ":" + points.get(i).y);
        }
    }

    return true;
}

From source file:com.nekomeshi312.whiteboardcorrection.WhiteBoardDetect.java

License:Open Source License

public ArrayList<Point> getUnAcceptableLineArea() {
    ArrayList<Point> points = new ArrayList<Point>();
    int qx = mViewWidth >> 2;
    int hx = mViewWidth >> 1;
    int q3x = 3 * qx;
    int qy = mViewHeight >> 2;
    int hy = mViewHeight >> 1;
    int q3y = 3 * qy;
    Point p = new Point(qx, hy);
    points.add(p);//  w w  w  .  jav a2  s  . c  o m
    p = new Point(hx, qy);
    points.add(p);
    p = new Point(q3x, hy);
    points.add(p);
    p = new Point(hx, q3y);
    points.add(p);
    return points;
}

From source file:com.opencv.mouse.MouseMainFrame.java

private void jToggleButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jToggleButton1ActionPerformed
    try {/*www  .j av  a 2  s .  c om*/
        robot = new Robot();
    } catch (AWTException e) {
    }
    t = new Thread() {
        public void run() {

            MatToBufImg matToBufferedImageConverter = new MatToBufImg(); //Utility class to convert Mat to Java's BufferedImage

            webCam = new VideoCapture(0);
            if (!webCam.isOpened()) {
                System.out.println("Kamera Ak Deil..!");
            } else
                System.out.println("Kamera Ald --> " + webCam.toString());

            Mat webcam_image = new Mat(480, 640, CvType.CV_8UC3);
            Mat hsv_image = new Mat(webcam_image.cols(), webcam_image.rows(), CvType.CV_8UC3);
            thresholded = new Mat(webcam_image.cols(), webcam_image.rows(), CvType.CV_8UC3,
                    new Scalar(255, 255, 255));
            if (webCam.isOpened()) {
                try {
                    Thread.sleep(1000);
                } catch (InterruptedException ex) {

                }

                while (true) {
                    try {
                        webCam.read(webcam_image);
                    } catch (Exception e) {
                        System.out.println("Web Cam Kapal !");
                    }

                    if (!webcam_image.empty()) {
                        try {
                            Thread.sleep(10);
                        } catch (InterruptedException ex) {

                        }
                        // Mat inRangeResim = webcam_image.clone();
                        /*
                        Mat inRangeResim = webcam_image.clone();
                        matToBufferedImageConverter.setMatrix(inRangeResim, ".jpg");
                        image =matToBufferedImageConverter.getBufferedImage();
                        Highgui.imwrite("D:\\bitirme.jpg", inRangeResim);
                        */

                        //       MatOfRect faceDetections = new MatOfRect();
                        Imgproc.cvtColor(webcam_image, hsv_image, Imgproc.COLOR_BGR2HSV);
                        //siyah hsv range 0 0 0 - 180 45 100
                        //hsvmavi   Core.inRange(webcam_image, new Scalar(75,63,40), new Scalar(118,255,255), webcam_image);
                        //rgb mavi        // Core.inRange(webcam_image, new Scalar(50,0,0), new Scalar(255,0,0), webcam_image);
                        //turuncu hsv      Core.inRange(webcam_image, new Scalar(5,50,50), new Scalar(15,255,255), webcam_image);
                        //Core.inRange(webcam_image, new Scalar(80,50,50), new Scalar(140,255,255), webcam_image);
                        //        Core.inRange(webcam_image, new Scalar(29,0,24), new Scalar(30,155,155), webcam_image);

                        //hsv mavi
                        //                       jSliderHmin.setValue(75);
                        //                       jSliderSmin.setValue(63);
                        //                       jSliderVmin.setValue(40);
                        //                       jSliderHmax.setValue(118);
                        //                       jSliderSmax.setValue(255);
                        //                       jSliderVmax.setValue(255);
                        //
                        //                       jSliderHmin.setValue(0);
                        //                       jSliderSmin.setValue(0);
                        //                       jSliderVmin.setValue(0);
                        //                       jSliderHmax.setValue(179);
                        //                       jSliderSmax.setValue(39);
                        //                       jSliderVmax.setValue(120);
                        Core.inRange(hsv_image, new Scalar(100, 97, 206), new Scalar(120, 255, 255),
                                thresholded);
                        Imgproc.dilate(thresholded, thresholded, element);

                        Imgproc.erode(thresholded, thresholded, element);
                        Imgproc.dilate(thresholded, thresholded, element);

                        Imgproc.erode(thresholded, thresholded, element);

                        List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
                        Imgproc.findContours(thresholded, contours, new Mat(), Imgproc.RETR_LIST,
                                Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));
                        Imgproc.drawContours(thresholded, contours, -1, new Scalar(255.0, 255.0, 255.0), 5);

                        for (int i = 0; i < contours.size(); i++) {
                            //  System.out.println(Imgproc.contourArea(contours.get(i)));
                            //    if (Imgproc.contourArea(contours.get(i)) > 1 ){
                            Rect rect = Imgproc.boundingRect(contours.get(i));
                            kesit = thresholded.submat(rect);
                            //System.out.println(rect.height);
                            // if (rect.height > 20 && rect.height <30 && rect.width < 30 && rect.width >20){
                            //  System.out.println(rect.x +","+rect.y+","+rect.height+","+rect.width);
                            Core.rectangle(webcam_image, new Point(rect.x, rect.y),
                                    new Point(rect.x + rect.width, rect.y + rect.height),
                                    new Scalar(0, 0, 255));

                            //}
                            //}
                            if (rect.height > 15 && rect.width > 15) {
                                System.out.println(rect.x + "\n" + rect.y);
                                Core.circle(webcam_image, new Point(rect.x, rect.y), i, new Scalar(0, 255, 0));
                                robot.mouseMove((int) (rect.x * 3), (int) (rect.y * 2.25));
                            }

                        }

                        //   Imgproc.cvtColor(webcam_image, webcam_image, Imgproc.COLOR_HSV2BGR);
                        //  hsv_image.convertTo(hsv_image, CvType.CV_32F);

                        //   Imgproc.Canny(thresholded, thresholded, 10, 20);
                        //   Core.bitwise_and(thresholded, webcam_image, webcam_image);

                        //ise yarar

                        //    Imgproc.cvtColor(thresholded, thresholded, Imgproc.COLOR_GRAY2BGR);
                        //  Core.bitwise_and(thresholded, webcam_image, webcam_image);

                        //    webcam_image.copyTo(hsv_image, thresholded);
                        //                            System.out.println("<------------------------------>");
                        //                            System.out.println("BGR: " +webcam_image.channels()+"  Size : "+webcam_image.size());
                        //                            System.out.println("HSV :"+hsv_image.channels()+"  Size: "+hsv_image.size());
                        //                            System.out.println("Thresold :"+thresholded.channels()+"  Size : "+thresholded.size());
                        //                            System.out.println("<------------------------------>");
                        //
                        matToBufferedImageConverter.setMatrix(webcam_image, ".jpg");

                        image = matToBufferedImageConverter.getBufferedImage();
                        g.drawImage(image, 0, 0, webcam_image.cols(), webcam_image.rows(), null);

                    } else {

                        System.out.println("Grnt yok!");
                        break;
                    }
                }
                //           webCam.release();
            }

        }
    };
    threadDurum = true;
    t.start();
}

From source file:com.opencv.video.VideoCaptureMain.java

private void jButtonPlayActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonPlayActionPerformed
    Thread t = new Thread() {
        @Override//from w  w w .j  a  v  a2s.  co m
        public void run() {

            MatToBufImg matToBufferedImageConverter = new MatToBufImg();
            try {
                final VideoCapture videoCapture = new VideoCapture("D:\\colorTest.mp4");
                //          videoCapture = new VideoCapture(0);
                //  Thread.sleep(3000);
                if (!videoCapture.isOpened()) {
                    System.out.println("Video Alamad");
                    return;
                }

                double fps = videoCapture.get(5);

                System.out.println("FPS :" + fps);
                frame = new Mat();

                Mat hsv_image = new Mat();
                Mat thresholded = new Mat();

                while (true) {
                    boolean basarili = videoCapture.read(frame);

                    if (!basarili) {
                        System.out.println("Okunamyor");
                        break;
                    }

                    Imgproc.cvtColor(frame, hsv_image, Imgproc.COLOR_BGR2HSV);

                    Core.inRange(hsv_image, new Scalar(170, 150, 60), new Scalar(179, 255, 255), thresholded);
                    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
                    Imgproc.findContours(thresholded, contours, new Mat(), Imgproc.RETR_LIST,
                            Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));
                    for (int i = 0; i < contours.size(); i++) {
                        //  System.out.println(Imgproc.contourArea(contours.get(i)));
                        //    if (Imgproc.contourArea(contours.get(i)) > 1 ){
                        Rect rect = Imgproc.boundingRect(contours.get(i));
                        kesit = frame.submat(rect);
                        //System.out.println(rect.height);
                        // if (rect.height > 20 && rect.height <30 && rect.width < 30 && rect.width >20){
                        //  System.out.println(rect.x +","+rect.y+","+rect.height+","+rect.width);
                        Core.rectangle(frame, new Point(rect.x, rect.y),
                                new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 0, 255));
                        // Core.circle(webcam_image, new Point(rect.x+rect.height/2, rect.y+rect.width/2), i, new Scalar(0, 0, 255));
                        //}
                        //}
                    }

                    matToBufferedImageConverter.setMatrix(frame, ".jpg");

                    g.drawImage(matToBufferedImageConverter.getBufferedImage(), 0, 0, 640, 480, null);

                }
            } catch (Exception e) {
                System.out.println("Sorun Burda");
            }

        }
    };

    t.start();

}

From source file:com.orange.documentare.core.image.segmentation.SegmentationDrawer.java

License:Open Source License

private void drawGlyphsSegmentation() {
    Scalar colorRect = new Scalar(255, 0, 0, 0);
    for (SegmentationRect rect : imageSegmentation.getDigitalTypes()) {
        Point tl = new Point(rect.x(), rect.y());
        Point br = new Point(tl.x + rect.width(), tl.y + rect.height());
        Core.rectangle(imageMat, tl, br, colorRect, 1);
    }//from  w  w w .j  ava  2  s. c  o m
}

From source file:com.orange.documentare.core.image.segmentation.SegmentationDrawer.java

License:Open Source License

private void drawBlocksSegmentation() {
    Scalar colorLine = new Scalar(0, 0, 255, 0);
    for (Block block : imageSegmentation.getBlocks()) {
        Core.rectangle(imageMat, new Point(block.x(), block.y() - 1),
                new Point(block.x() + block.width(), block.y() + block.height() + 1), colorLine, 1);
    }// w  w w  . j  ava  2s. c  o  m
}