Example usage for org.opencv.imgproc Imgproc line

List of usage examples for org.opencv.imgproc Imgproc line

Introduction

In this page you can find the example usage for org.opencv.imgproc Imgproc line.

Prototype

public static void line(Mat img, Point pt1, Point pt2, Scalar color, int thickness) 

Source Link

Usage

From source file:LicenseDetection.java

public void run() {

    // ------------------ set up tesseract for later use ------------------
    ITesseract tessInstance = new Tesseract();
    tessInstance.setDatapath("/Users/BradWilliams/Downloads/Tess4J");
    tessInstance.setLanguage("eng");

    // ------------------  Save image first ------------------
    Mat img;/*from  www.java  2  s  . c  o m*/
    img = Imgcodecs.imread(getClass().getResource("/resources/car_2_shopped2.jpg").getPath());
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/True_Image.png", img);

    // ------------------ Convert to grayscale ------------------
    Mat imgGray = new Mat();
    Imgproc.cvtColor(img, imgGray, Imgproc.COLOR_BGR2GRAY);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/Gray.png", imgGray);

    // ------------------ Blur so edge detection wont pick up noise ------------------
    Mat imgGaussianBlur = new Mat();
    Imgproc.GaussianBlur(imgGray, imgGaussianBlur, new Size(3, 3), 0);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/gaussian_blur.png", imgGaussianBlur);

    // ****************** Create image that will be cropped at end of program before OCR ***************************

    // ------------------ Binary theshold for OCR (used later)------------------
    Mat imgThresholdOCR = new Mat();
    Imgproc.adaptiveThreshold(imgGaussianBlur, imgThresholdOCR, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C,
            Imgproc.THRESH_BINARY, 7, 10);
    //Imgproc.threshold(imgSobel,imgThreshold,120,255,Imgproc.THRESH_TOZERO);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgThresholdOCR.png", imgThresholdOCR);

    // ------------------ Erosion operation------------------
    Mat kern = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_CROSS, new Size(3, 3));
    Mat imgErodeOCR = new Mat();
    Imgproc.morphologyEx(imgThresholdOCR, imgErodeOCR, Imgproc.MORPH_DILATE, kern); //Imgproc.MORPH_DILATE is performing erosion, wtf?
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgErodeOCR.png", imgErodeOCR);

    //------------------ Dilation operation  ------------------
    Mat kernall = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(3, 3));
    Mat imgDilateOCR = new Mat();
    Imgproc.morphologyEx(imgErodeOCR, imgDilateOCR, Imgproc.MORPH_ERODE, kernall);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgDilateOCR.png", imgDilateOCR);

    // *************************************************************************************************************

    //        // ------------------ Close operation (dilation followed by erosion) to reduce noise ------------------
    //        Mat k = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(3, 3));
    //        Mat imgCloseOCR = new Mat();
    //        Imgproc.morphologyEx(imgThresholdOCR,imgCloseOCR,1,k);
    //        Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgCloseOCR.png", imgCloseOCR);

    // ------------------ Sobel vertical edge detection ------------------
    Mat imgSobel = new Mat();
    Imgproc.Sobel(imgGaussianBlur, imgSobel, -1, 1, 0);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgSobel.png", imgSobel);

    // ------------------ Binary theshold ------------------
    Mat imgThreshold = new Mat();
    Imgproc.adaptiveThreshold(imgSobel, imgThreshold, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C,
            Imgproc.THRESH_BINARY, 99, -60);
    //Imgproc.threshold(imgSobel,imgThreshold,120,255,Imgproc.THRESH_TOZERO);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgThreshold.png", imgThreshold);

    //        // ------------------ Open operation (erosion followed by dilation) ------------------
    //        Mat ker = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_CROSS, new Size(3, 2));
    //        Mat imgOpen = new Mat();
    //        Imgproc.morphologyEx(imgThreshold,imgOpen,0,ker);
    //        Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgOpen.png", imgOpen);

    // ------------------ Close operation (dilation followed by erosion) to reduce noise ------------------
    Mat kernel = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(22, 8));
    Mat imgClose = new Mat();
    Imgproc.morphologyEx(imgThreshold, imgClose, 1, kernel);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgClose.png", imgClose);

    // ------------------ Find contours ------------------
    List<MatOfPoint> contours = new ArrayList<>();

    Imgproc.findContours(imgClose, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);

    // **************************** DEBUG CODE **************************

    Mat contourImg = new Mat(imgClose.size(), imgClose.type());
    for (int i = 0; i < contours.size(); i++) {
        Imgproc.drawContours(contourImg, contours, i, new Scalar(255, 255, 255), -1);
    }

    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/contours.png", contourImg);

    // ******************************************************************

    // --------------  Convert contours --------------------

    //Convert to MatOfPoint2f so that minAreaRect can be called
    List<MatOfPoint2f> newContours = new ArrayList<>();

    for (MatOfPoint mat : contours) {

        MatOfPoint2f newPoint = new MatOfPoint2f(mat.toArray());
        newContours.add(newPoint);

    }

    //Get minAreaRects
    List<RotatedRect> minAreaRects = new ArrayList<>();

    for (MatOfPoint2f mat : newContours) {

        RotatedRect rect = Imgproc.minAreaRect(mat);

        /*
         --------------- BUG WORK AROUND ------------
                
        Possible bug:
        When converting from MatOfPoint2f to RotatectRect the width height were reversed and the
        angle was -90 degrees from what it would be if the width and height were correct.
                
        When painting rectangle in image, the correct boxes were produced, but performing calculations on rect.angle
        rect.width, or rect.height yielded unwanted results.
                
        The following work around is buggy but works for my purpose
         */

        if (rect.size.width < rect.size.height) {
            double temp;

            temp = rect.size.width;
            rect.size.width = rect.size.height;
            rect.size.height = temp;
            rect.angle = rect.angle + 90;

        }

        //check aspect ratio and area and angle
        if (rect.size.width / rect.size.height > 1 && rect.size.width / rect.size.height < 5
                && rect.size.width * rect.size.height > 10000 && rect.size.width * rect.size.height < 50000
                && Math.abs(rect.angle) < 20) {
            minAreaRects.add(rect);
        }

        //minAreaRects.add(rect);
    }

    // **************************** DEBUG CODE **************************
    /*
    The following code is used to draw the rectangles on top of the original image for debugging purposes
     */
    //Draw Rotated Rects
    Point[] vertices = new Point[4];

    Mat imageWithBoxes = img;

    // Draw color rectangles on top of binary contours
    //        Mat imageWithBoxes = new Mat();
    //        Mat temp = imgDilateOCR;
    //        Imgproc.cvtColor(temp, imageWithBoxes, Imgproc.COLOR_GRAY2RGB);

    for (RotatedRect rect : minAreaRects) {

        rect.points(vertices);

        for (int i = 0; i < 4; i++) {
            Imgproc.line(imageWithBoxes, vertices[i], vertices[(i + 1) % 4], new Scalar(0, 0, 255), 2);
        }

    }

    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgWithBoxes.png", imageWithBoxes);

    // ******************************************************************

    // **************************** DEBUG CODE **************************
    //        for(RotatedRect rect : minAreaRects) {
    //            System.out.println(rect.toString());
    //        }
    // ******************************************************************

    /*
    In order to rotate image without cropping it:
            
    1. Create new square image with dimension = diagonal of initial image.
    2. Draw initial image into the center of new image.
     Insert initial image at ROI (Region of Interest) in new image
    3. Rotate new image
     */

    //Find diagonal/hypotenuse
    int hypotenuse = (int) Math.sqrt((img.rows() * img.rows()) + (img.cols() * img.cols()));

    //New Mat with hypotenuse as height and width
    Mat rotateSpace = new Mat(hypotenuse, hypotenuse, 0);

    int ROI_x = (rotateSpace.width() - imgClose.width()) / 2; //x start of ROI
    int ROI_y = (rotateSpace.height() - imgClose.height()) / 2; //x start of ROI

    //designate region of interest
    Rect r = new Rect(ROI_x, ROI_y, imgClose.width(), imgClose.height());

    //Insert image into region of interest
    imgDilateOCR.copyTo(rotateSpace.submat(r));

    Mat rotatedTemp = new Mat(); //Mat to hold temporarily rotated mat
    Mat rectMat = new Mat();//Mat to hold rect contents (needed for looping through pixels)
    Point[] rectVertices = new Point[4];//Used to build rect to make ROI
    Rect rec = new Rect();

    List<RotatedRect> edgeDensityRects = new ArrayList<>(); //populate new arraylist with rects that satisfy edge density

    int count = 0;

    //Loop through Rotated Rects and find edge density
    for (RotatedRect rect : minAreaRects) {

        count++;

        rect.center = new Point((float) ROI_x + rect.center.x, (float) ROI_y + rect.center.y);

        //rotate image to math orientation of rotated rect
        rotate(rotateSpace, rotatedTemp, rect.center, rect.angle);

        //remove rect rotation
        rect.angle = 0;

        //get vertices from rotatedRect
        rect.points(rectVertices);

        // **************************** DEBUG CODE **************************
        //
        //            for (int k = 0; k < 4; k++) {
        //                System.out.println(rectVertices[k]);
        //                Imgproc.line(rotatedTemp, rectVertices[k], rectVertices[(k + 1) % 4], new Scalar(0, 0, 255), 2);
        //            }
        //
        //            Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rotated" + count + ".png", rotatedTemp);

        // *****************************************************************

        //build rect to use as ROI
        rec = new Rect(rectVertices[1], rectVertices[3]);

        rectMat = rotatedTemp.submat(rec);

        Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/extracted" + count + ".png", rectMat);

        //find edge density

        //            // ------------------------ edge density check NOT IMPLEMENTED --------------------
        //            /*
        //            Checking for edge density was not necessary for this image so it was not implemented due to lack of time
        //             */
        //            for(int i = 0; i < rectMat.rows(); ++i){
        //                for(int j = 0; j < rectMat.cols(); ++j){
        //
        //                  //add up white pixels
        //                }
        //            }
        //
        //            //check number of white pixels against total pixels
        //            //only add rects to new arraylist that satisfy threshold

        edgeDensityRects.add(rect);
    }

    // **************************** DEBUG CODE **************************

    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rotatedSpace.png", rotateSpace);
    //Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rotatedSpaceROTATED.png", rotatedTemp);

    //System.out.println(imgGray.type());

    // *****************************************************************

    // if there is only one rectangle left, its the license plate
    if (edgeDensityRects.size() == 1) {

        String result = ""; //Hold result from OCR
        BufferedImage bimg;
        Mat cropped;

        cropped = rectMat.submat(new Rect(20, 50, rectMat.width() - 40, rectMat.height() - 70));

        Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rectMatCropped.png", cropped);

        bimg = matToBufferedImage(cropped);

        BufferedImage image = bimg;

        try {
            result = tessInstance.doOCR(image);
        } catch (TesseractException e) {
            System.err.println(e.getMessage());
        }

        for (int i = 0; i < 10; ++i) {

        }

        result = result.replace("\n", "");

        System.out.println(result);

        CarProfDBImpl db = new CarProfDBImpl();

        db.connect("localhost:3306/computer_vision", "root", "*******");

        CarProf c = db.getCarProf(result);

        System.out.print(c.toString());

        db.close();

    }

}

From source file:app.AppMain.java

public static Mat crossImage(Mat image, ArrayList<OcrChar[]> foundWords) {
    for (OcrChar[] points : foundWords) {
        System.out.println(points[0].getCenterX() + " " + points[0].getCenterY() + " " + points[1].getCenterX()
                + " " + points[1].getCenterY());
        Imgproc.line(image, new Point(points[0].getCenterX(), points[0].getCenterY()),
                new Point(points[1].getCenterX(), points[1].getCenterY()), new Scalar(0, 0, 0), 1);
    }/*from  w ww. j  a v a2  s  .c om*/
    return image;
}

From source file:app.AppMain.java

/**
 * badly designed main aplication, use with app.GUI.java for fast results.
 *
 * @param image/*from  w w  w.  j  a v a2  s.  c  om*/
 * @param lookUpList
 * @throws IOException
 */
public static void main(String image, String lookUpList) throws IOException {

    //        String imagePath = "resource\\find_small.jpg";
    String imagePath = image;
    File imageFile = new File(imagePath);
    //        File imageFile1 = new File("C:\\Users\\Zygis\\Desktop\\tess_test_data\\Find_1lookup.jpg");

    ArrayList<String> lookupWords = new ArrayList<>();
    ArrayList<OcrChar[]> foundWords = new ArrayList<>();
    ArrayList<ArrayList<OcrChar>> matrix = new ArrayList<>();

    Mat ImageMatrix = Imgcodecs.imread(imagePath);

    Scanner inFile1 = new Scanner(new File(lookUpList));
    while (inFile1.hasNext()) {
        lookupWords.add(inFile1.nextLine().toUpperCase());
    }
    System.out.println(lookupWords);

    Tesseract1 instance = new Tesseract1(); //
    try {

        instance.setHocr(true);
        instance.setTessVariable("tessedit_char_blacklist", "|0123456789"); // blaklistas
        //          instance.setTessVariable("tessedit_create_boxfile", "true");
        //          instance.setTessVariable("save_blob_choices", "T"); // kazka padeda char atpazinimui

        String result = instance.doOCR(imageFile);
        //            System.out.println(result);
        matrix = ParseHOCR.parse(result);
        System.out.println(matrix);
        foundWords = FindWord.findWords(matrix, lookupWords);

        try (
                //            instance.setHocr(false);
                //            result = instance.doOCR(imageFile1);
                //            System.out.println(result);
                //            -----------
                //            writing info to html file
                PrintWriter writer = new PrintWriter("the-file-name.html", "UTF-8")) {
            writer.println(result);
        }
    } catch (TesseractException e) {
        System.err.println(e.getMessage());
    }

    /*----- 
    cross found letters from the image
     */
    for (OcrChar[] points : foundWords) {
        //           System.out.println(points[0].getCenterX()+" "+points[0].getCenterY()+" "+points[1].getCenterX()+" "+points[1].getCenterY());
        Imgproc.line(ImageMatrix, new Point(points[0].getCenterX(), points[0].getCenterY()),
                new Point(points[1].getCenterX(), points[1].getCenterY()), new Scalar(0, 0, 0), 1);
    }

    //        for (OcrChar[] points : foundWords) {
    ////           System.out.println(points[0].getCenterX()+" "+points[0].getCenterY()+" "+points[1].getCenterX()+" "+points[1].getCenterY());
    //            Imgproc.line(ImageMatrix, new Point(points[0].getCenterX(), points[0].getCenterY()),
    //                    new Point(points[1].getCenterX(), points[1].getCenterY()),
    //                    new Scalar(0, 0, 0), 3);
    //        }
    //        ////-------
    //        // DEBUG 
    //        //put regognised chars POSTION on the image ( to check recognised char matches)
    //        for (ArrayList<OcrChar> line_array : matrix) {
    //            for (OcrChar Char : line_array) {
    //                Imgproc.circle(ImageMatrix, new Point(Char.getCenterX(), Char.getCenterY()),
    //                        1, new Scalar(0, 0, 255), 4);
    //            }
    //        }
    //        ////-------
    //        // DEBUG 
    //        // put regognised chars back to the image ( to check recognised char matches)
    //        for (ArrayList<OcrChar> line_array : matrix){
    //            for (OcrChar Char : line_array){
    //                Imgproc.putText(ImageMatrix, Char.toString(), 
    //                        new Point(Char.x2, Char.y2),
    //                                1, 2,  new Scalar(0, 0, 255));
    //            }
    //        }
    Imgcodecs.imwrite("resource\\output.jpeg", ImageMatrix);
}

From source file:com.carver.paul.truesight.ImageRecognition.ImageTools.java

License:Open Source License

public static void drawLinesOnImage(Mat lines, Mat image) {
    for (int i = 0; i < lines.rows(); i++) {
        double[] val = lines.get(i, 0);
        Imgproc.line(image, new Point(val[0], val[1]), new Point(val[2], val[3]), new Scalar(0, 255, 0), 2);
    }/*from  w w w . j  av  a2 s.  c  o m*/
}

From source file:com.mycompany.linedetection.LineDetector.java

public void findLines() {
    Imgproc.Canny(img, edgeDetectedImg, 100, 200, 3, true);
    Mat lines = new Mat();

    int width = img.width();
    int height = img.height();
    double diagonal = Math.sqrt(width * width + height * height);
    int minOfWidthHeight = (width < height) ? width : height;

    Imgproc.HoughLinesP(edgeDetectedImg, lines, 1, Math.PI / 180, minOfWidthHeight * 10 / 100,
            diagonal * 25 / 100, diagonal * 4 / 100);

    int firstN = (lines.rows() < 5) ? lines.rows() : 5;

    for (int x = 0; x < lines.rows(); x++) {
        double[] vec = lines.get(x, 0);
        double[] vec1 = lines.get(x, 1);
        double x1 = vec[0], y1 = vec[1], x2 = vec[2], y2 = vec[3];
        Point startPoint = new Point(x1, y1);
        Point endPoint = new Point(x2, y2);

        double angle_inv = horizontalLine.getAngle(new Line(x1, y1, x2, y2));
        double angle = horizontalLine.getAngle(new Line(x2, y2, x1, y1));
        if ((angle >= diagAngle1 - DIAGONAL_TRESHOLD && angle <= diagAngle1 + DIAGONAL_TRESHOLD)
                || (angle >= diagAngle2 - DIAGONAL_TRESHOLD && angle <= diagAngle2 + DIAGONAL_TRESHOLD)
                || (angle_inv >= diagAngle1 - DIAGONAL_TRESHOLD && angle_inv <= diagAngle1 + DIAGONAL_TRESHOLD)
                || (angle_inv >= diagAngle2 - DIAGONAL_TRESHOLD
                        && angle_inv <= diagAngle2 + DIAGONAL_TRESHOLD)) {
            diagonalLineList.add(new Line(x1, y1, x2, y2));
            Imgproc.line(img, startPoint, endPoint, new Scalar(255, 255, 0), 4);
        } else {/*w ww  .ja  v  a 2 s.  c o  m*/
            lineList.add(new Line(x1, y1, x2, y2));
        }

    }

    Collections.sort(lineList, new Comparator<Line>() {
        @Override
        public int compare(Line l1, Line l2) {
            return (int) (l2.getLength() - l1.getLength());
        }

    });

    ArrayList arr = new ArrayList<Line>();

    for (int i = 0; i < firstN + 1; i++) {
        if (lineList.size() >= firstN + 1) {
            double x1 = lineList.get(i).getX1(), y1 = lineList.get(i).getY1(), x2 = lineList.get(i).getX2(),
                    y2 = lineList.get(i).getY2();
            Point startPoint = new Point(x1, y1);
            Point endPoint = new Point(x2, y2);
            arr.add(lineList.get(i));
            Imgproc.line(img, startPoint, endPoint, new Scalar(0, 0, 255), 3);
        }
    }
    lineList = arr;
}

From source file:com.seleniumtests.util.imaging.ImageDetector.java

License:Apache License

/**
 * Compute the rectangle where the searched picture is and the rotation angle between both images
 * Throw {@link ImageSearchException} if picture is not found
 * @return/*from w ww . j  a  v a 2 s  . c  o m*/
 * @Deprecated Kept here for information, but open CV 3 does not include SURF anymore for java build
 */
public void detectCorrespondingZone() {
    Mat objectImageMat = Imgcodecs.imread(objectImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
    Mat sceneImageMat = Imgcodecs.imread(sceneImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
    FeatureDetector surf = FeatureDetector.create(FeatureDetector.SURF);

    MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint();
    MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint();

    surf.detect(objectImageMat, objectKeyPoints);
    surf.detect(sceneImageMat, sceneKeyPoints);

    DescriptorExtractor surfExtractor = DescriptorExtractor.create(DescriptorExtractor.SURF);
    Mat objectDescriptor = new Mat();
    Mat sceneDescriptor = new Mat();
    surfExtractor.compute(objectImageMat, objectKeyPoints, objectDescriptor);
    surfExtractor.compute(sceneImageMat, sceneKeyPoints, sceneDescriptor);

    try {
        Mat outImage = new Mat();
        Features2d.drawKeypoints(objectImageMat, objectKeyPoints, outImage);
        String tempFile = File.createTempFile("img", ".png").getAbsolutePath();
        writeComparisonPictureToFile(tempFile, outImage);
    } catch (IOException e) {

    }

    // http://stackoverflow.com/questions/29828849/flann-for-opencv-java
    DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
    MatOfDMatch matches = new MatOfDMatch();

    if (objectKeyPoints.toList().isEmpty()) {
        throw new ImageSearchException("No keypoints in object to search, check it's not uniformly coloured: "
                + objectImage.getAbsolutePath());
    }
    if (sceneKeyPoints.toList().isEmpty()) {
        throw new ImageSearchException(
                "No keypoints in scene, check it's not uniformly coloured: " + sceneImage.getAbsolutePath());
    }
    if (objectDescriptor.type() != CvType.CV_32F) {
        objectDescriptor.convertTo(objectDescriptor, CvType.CV_32F);
    }
    if (sceneDescriptor.type() != CvType.CV_32F) {
        sceneDescriptor.convertTo(sceneDescriptor, CvType.CV_32F);
    }

    matcher.match(objectDescriptor, sceneDescriptor, matches);

    double maxDist = 0;
    double minDist = 10000;

    for (int i = 0; i < objectDescriptor.rows(); i++) {
        double dist = matches.toList().get(i).distance;
        if (dist < minDist) {
            minDist = dist;
        }
        if (dist > maxDist) {
            maxDist = dist;
        }
    }

    logger.debug("-- Max dist : " + maxDist);
    logger.debug("-- Min dist : " + minDist);

    LinkedList<DMatch> goodMatches = new LinkedList<>();
    MatOfDMatch gm = new MatOfDMatch();

    for (int i = 0; i < objectDescriptor.rows(); i++) {
        if (matches.toList().get(i).distance < detectionThreshold) {
            goodMatches.addLast(matches.toList().get(i));
        }
    }
    gm.fromList(goodMatches);

    Features2d.drawMatches(objectImageMat, objectKeyPoints, sceneImageMat, sceneKeyPoints, gm, imgMatch,
            Scalar.all(-1), Scalar.all(-1), new MatOfByte(), Features2d.NOT_DRAW_SINGLE_POINTS);

    if (goodMatches.isEmpty()) {
        throw new ImageSearchException("Cannot find matching zone");
    }

    LinkedList<Point> objList = new LinkedList<>();
    LinkedList<Point> sceneList = new LinkedList<>();

    List<KeyPoint> objectKeyPointsList = objectKeyPoints.toList();
    List<KeyPoint> sceneKeyPointsList = sceneKeyPoints.toList();

    for (int i = 0; i < goodMatches.size(); i++) {
        objList.addLast(objectKeyPointsList.get(goodMatches.get(i).queryIdx).pt);
        sceneList.addLast(sceneKeyPointsList.get(goodMatches.get(i).trainIdx).pt);
    }

    MatOfPoint2f obj = new MatOfPoint2f();
    obj.fromList(objList);

    MatOfPoint2f scene = new MatOfPoint2f();
    scene.fromList(sceneList);

    // Calib3d.RANSAC could be used instead of 0
    Mat hg = Calib3d.findHomography(obj, scene, 0, 5);

    Mat objectCorners = new Mat(4, 1, CvType.CV_32FC2);
    Mat sceneCorners = new Mat(4, 1, CvType.CV_32FC2);

    objectCorners.put(0, 0, new double[] { 0, 0 });
    objectCorners.put(1, 0, new double[] { objectImageMat.cols(), 0 });
    objectCorners.put(2, 0, new double[] { objectImageMat.cols(), objectImageMat.rows() });
    objectCorners.put(3, 0, new double[] { 0, objectImageMat.rows() });

    Core.perspectiveTransform(objectCorners, sceneCorners, hg);

    // points of object
    Point po1 = new Point(objectCorners.get(0, 0));
    Point po2 = new Point(objectCorners.get(1, 0));
    Point po3 = new Point(objectCorners.get(2, 0));
    Point po4 = new Point(objectCorners.get(3, 0));

    // point of object in scene
    Point p1 = new Point(sceneCorners.get(0, 0)); // top left
    Point p2 = new Point(sceneCorners.get(1, 0)); // top right
    Point p3 = new Point(sceneCorners.get(2, 0)); // bottom right
    Point p4 = new Point(sceneCorners.get(3, 0)); // bottom left

    logger.debug(po1);
    logger.debug(po2);
    logger.debug(po3);
    logger.debug(po4);
    logger.debug(p1); // top left
    logger.debug(p2); // top right
    logger.debug(p3); // bottom right
    logger.debug(p4); // bottom left

    if (debug) {
        try {
            // translate corners
            p1.set(new double[] { p1.x + objectImageMat.cols(), p1.y });
            p2.set(new double[] { p2.x + objectImageMat.cols(), p2.y });
            p3.set(new double[] { p3.x + objectImageMat.cols(), p3.y });
            p4.set(new double[] { p4.x + objectImageMat.cols(), p4.y });

            Imgproc.line(imgMatch, p1, p2, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p2, p3, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p3, p4, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p4, p1, new Scalar(0, 255, 0), 1);

            showResultingPicture(imgMatch);
        } catch (IOException e) {
        }
    }

    // check rotation angles
    checkRotationAngle(p1, p2, p3, p4, po1, po2, po3, po4);

    // rework on scene points as new, we are sure the object rotation is 0, 90, 180 or 270
    reworkOnScenePoints(p1, p2, p3, p4);

    // check that aspect ratio of the detected height and width are the same
    checkDetectionZoneAspectRatio(p1, p2, p4, po1, po2, po4);

    recordDetectedRectangle(p1, p2, p3, p4);
}

From source file:edu.ucue.tfc.Modelo.VideoProcessor.java

/**
* Processes {@code firstFrame} and {@code secondFrame}.
* @param firstFrame    the first frame of a cycle.
*//* ww  w . j ava  2  s . c o  m*/
private void processFrame(Mat firstFrame) {
    double contourArea = 0;
    int position = 0;
    try {
        /**
         * Redimensiona el el cuadro actual
         *
         */
        Imgproc.resize(firstFrame, firstFrame, frameSize);

        /**
         * Convierte el cuadro por segundo a escala de grises
         */
        Imgproc.cvtColor(firstFrame, firstGrayImage, Imgproc.COLOR_BGR2GRAY);

        /**
         * Lee el siguiente cuadro, lo redimensiona y convierte a escala de grises
         */
        video.read(secondFrame);

        Imgproc.resize(secondFrame, secondFrame, frameSize);

        Imgproc.cvtColor(secondFrame, secondGrayImage, Imgproc.COLOR_BGR2GRAY);

        /**
         * Obtiene la diferencia absoluta por pixel de los cuadros anteriores.
         */
        Core.absdiff(firstGrayImage, secondGrayImage, differenceOfImages);
        Imgproc.threshold(differenceOfImages, thresholdImage, 25, 255, Imgproc.THRESH_BINARY);
        Imgproc.blur(thresholdImage, thresholdImage, new Size(12, 12));
        Imgproc.threshold(thresholdImage, thresholdImage, 20, 255, Imgproc.THRESH_BINARY);
        /////
        for (int i = 0; i < contours.size(); ++i) {
            contours.get(i).release();
        }
        contours.clear();

        /**
         * La linea Horizontal
         */
        Imgproc.line(firstFrame, controlPoints.get(6), controlPoints.get(7), new Scalar(255, 0, 0),
                Imgproc.LINE_4);
        Imgproc.findContours(thresholdImage, contours, hierarchy, Imgproc.RETR_TREE,
                Imgproc.CHAIN_APPROX_SIMPLE);

        for (int i = 0; i < hullPoints.size(); ++i) {
            hullPoints.get(i).release();
        }
        hullPoints.clear();

        for (int i = 0; i < contours.size(); i++) {
            MatOfInt tmp = new MatOfInt();
            Imgproc.convexHull(contours.get(i), tmp, false);
            hullPoints.add(tmp);
        }

        /**
         * Busca el contorno con el rea ms grande
         */
        if (contours.size() > 0) {
            for (int i = 0; i < contours.size(); i++) {
                if (Imgproc.contourArea(contours.get(i)) > contourArea) {
                    contourArea = Imgproc.contourArea(contours.get(i));
                    position = i;
                    boundingRectangle = Imgproc.boundingRect(contours.get(i));
                }

            }
        }
        secondFrame.release();
        hierarchy.release();
        secondGrayImage.release();
        firstGrayImage.release();
        thresholdImage.release();
        differenceOfImages.release();
    } catch (Exception e) {
        System.out.println(e.getMessage());
    }

    if (controlPoints.get(6).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(0), controlPoints.get(1), new Scalar(0, 0, 255), 2);
        wasAtLeftPoint = true;
    } else if (!controlPoints.get(6).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(0), controlPoints.get(1), new Scalar(0, 255, 0), 2);
    }

    if (controlPoints.get(8).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(2), controlPoints.get(3), new Scalar(0, 0, 255), 2);
        wasAtCenterPoint = true;
    } else if (!controlPoints.get(8).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(2), controlPoints.get(3), new Scalar(0, 255, 0), 2);
    }

    if (controlPoints.get(7).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(4), controlPoints.get(5), new Scalar(0, 0, 255), 2);
        wasAtRightPoint = true;
    } else if (!controlPoints.get(7).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(4), controlPoints.get(5), new Scalar(0, 255, 0), 2);
    }

    if (wasAtCenterPoint && wasAtLeftPoint && wasAtRightPoint) {
        detectedCarsCount++;
        wasDetected = true;
        wasAtCenterPoint = false;
        wasAtLeftPoint = false;
        wasAtRightPoint = false;
    }

    if (contourArea > 3000) {
        Imgproc.drawContours(frame, contours, position, new Scalar(255, 255, 255));
    }
}

From source file:emotion.StaticFunctions.java

static void drawCross(Mat img, Point pt, Features feat) {
    Scalar col;//from  w  w w  . j a v a2s.  co  m
    switch (feat) {
    case EYEBROWS_ENDS:
        col = new Scalar(0, 255, 0);
        break;
    case EYELIDS:
        col = new Scalar(100, 210, 255);
        break;
    case EYE_CORNERS:
        col = new Scalar(220, 180, 30);
        break;
    case WHITE_MARK:
        col = new Scalar(255, 255, 255);
    default:
        col = new Scalar(255, 255, 255);

    }
    Imgproc.line(img, new Point(pt.x, pt.y - 5), new Point(pt.x, pt.y + 5), col, 1);

    Imgproc.line(img, new Point(pt.x - 5, pt.y), new Point(pt.x + 5, pt.y), col, 1);
}

From source file:houghtransform.transform_process.MyTransform.java

@Override
public Mat drawLines(Mat scr) {
    for (Point pt : _lines) {
        double r = pt.x, t = pt.y;
        Point pt1 = new Point();
        Point pt2 = new Point();
        if (t / (Math.PI / 180) >= 45 && t / (Math.PI / 180) <= 135) {
            pt1.x = -1000;/*from  www. j  av  a2  s.  c  o m*/
            pt2.x = 1000;
            pt1.y = ((r - _accu_h / 2) - (-1000 - _img_w / 2) * Math.cos(t)) / Math.sin(t) + _img_h / 2;
            pt2.y = ((r - _accu_h / 2) - (1000 - _img_w / 2) * Math.cos(t)) / Math.sin(t) + _img_h / 2;
        } else {
            pt1.y = -1000;
            pt2.y = 1000;
            pt1.x = ((r - _accu_h / 2) - (-1000 - _img_h / 2) * Math.sin(t)) / Math.cos(t) + _img_w / 2;
            pt2.x = ((r - _accu_h / 2) - (1000 - _img_h / 2) * Math.sin(t)) / Math.cos(t) + _img_w / 2;
        }
        //Drawing the lines
        Imgproc.line(scr, pt1, pt2, new Scalar(0, 0, 255), 1);
    }
    return scr;
}

From source file:houghtransform.transform_process.OpenCV.java

@Override
public Mat drawLines(Mat scr) {
    for (int x = 1; x < lines.rows(); x++) {
        double[] vec = lines.get(x, 0);
        double rho = vec[0], theta1 = vec[1];
        Point pt1 = new Point();
        Point pt2 = new Point();
        double a = Math.cos(theta1), b = Math.sin(theta1);
        double x0 = a * rho, y0 = b * rho;
        pt1.x = Math.round(x0 + 1000 * (-b));
        pt1.y = Math.round(y0 + 1000 * (a));
        pt2.x = Math.round(x0 - 1000 * (-b));
        pt2.y = Math.round(y0 - 1000 * (a));

        Imgproc.line(scr, pt1, pt2, new Scalar(0, 0, 255), 1);
    }/* w  w w.j  a va2 s.c om*/
    return scr;
}