Example usage for org.opencv.core Point Point

List of usage examples for org.opencv.core Point Point

Introduction

In this page you can find the example usage for org.opencv.core Point Point.

Prototype

public Point(double x, double y) 

Source Link

Usage

From source file:samples.FtcTestOpenCv.java

License:Open Source License

/**
 * This method rotate the image to the specified angle.
 *
 * @param src specifies the image to be rotated.
 * @param dst specifies the destination to put the rotated image.
 * @param angle specifies the rotation angle.
 *//*  ww w  . j  av  a 2 s .  c om*/
private void rotateImage(Mat src, Mat dst, double angle) {
    angle %= 360.0;
    if (angle == 0.0) {
        src.copyTo(dst);
    } else if (angle == 90.0 || angle == -270.0) {
        Core.transpose(src, dst);
        Core.flip(dst, dst, 1);
    } else if (angle == 180.0 || angle == -180.0) {
        Core.flip(src, dst, -1);
    } else if (angle == 270.0 || angle == -90.0) {
        Core.transpose(src, dst);
        Core.flip(dst, dst, 0);
    } else {
        Mat rotMat = Imgproc.getRotationMatrix2D(new Point(src.cols() / 2.0, src.rows() / 2.0), angle, 1.0);
        Imgproc.warpAffine(src, dst, rotMat, src.size());
    }
}

From source file:samples.LWF.java

private static void save_mesh_images(double[][] puntos, File carpetaalmacen, File image, Mat mat,
        int[][] delaunay_triangles) {
    Mat lienzo = new Mat(300, 300, CV_8UC3, new Scalar(0, 0, 0));
    Mat lienzo2 = new Mat(300, 300, CV_8UC3, new Scalar(0, 0, 0));
    double escala = 128;
    for (int[] tri : faceTemplateTriangles) {
        Imgproc.line(lienzo, new Point(escala * Shape3D[tri[0] - 1][0], escala * Shape3D[tri[0] - 1][1]),
                new Point(escala * Shape3D[tri[1] - 1][0], escala * Shape3D[tri[1] - 1][1]),
                new Scalar(0, 255, 0));
        Imgproc.line(lienzo, new Point(escala * Shape3D[tri[1] - 1][0], escala * Shape3D[tri[1] - 1][1]),
                new Point(escala * Shape3D[tri[2] - 1][0], escala * Shape3D[tri[2] - 1][1]),
                new Scalar(0, 255, 0));
        Imgproc.line(lienzo, new Point(escala * Shape3D[tri[2] - 1][0], escala * Shape3D[tri[2] - 1][1]),
                new Point(escala * Shape3D[tri[0] - 1][0], escala * Shape3D[tri[0] - 1][1]),
                new Scalar(0, 255, 0));

        Imgproc.line(lienzo2, new Point(escala * Shape3D[tri[0] - 1][2], escala * Shape3D[tri[0] - 1][1]),
                new Point(escala * Shape3D[tri[1] - 1][2], escala * Shape3D[tri[1] - 1][1]),
                new Scalar(0, 255, 0));
        Imgproc.line(lienzo2, new Point(escala * Shape3D[tri[1] - 1][2], escala * Shape3D[tri[1] - 1][1]),
                new Point(escala * Shape3D[tri[2] - 1][2], escala * Shape3D[tri[2] - 1][1]),
                new Scalar(0, 255, 0));
        Imgproc.line(lienzo2, new Point(escala * Shape3D[tri[2] - 1][2], escala * Shape3D[tri[2] - 1][1]),
                new Point(escala * Shape3D[tri[0] - 1][2], escala * Shape3D[tri[0] - 1][1]),
                new Scalar(0, 255, 0));

    }/*from ww w.ja  v  a  2s  . c o m*/
    Imgcodecs.imwrite(carpetaalmacen.getAbsolutePath() + "\\frontal_" + image.getName(), lienzo);
    Imgcodecs.imwrite(carpetaalmacen.getAbsolutePath() + "\\lateral_" + image.getName(), lienzo2);
}

From source file:samples.LWF.java

private static void affine(Mat mat, double[][] from, double[][] to, double[][] coeficients, Mat lienzo,
        double escala, double gap) {
    // throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.

    //   http://stackoverflow.com/questions/10100715/opencv-warping-from-one-triangle-to-another
    //  https://www.learnopencv.com/warp-one-triangle-to-another-using-opencv-c-python/
    //   http://docs.opencv.org/2.4/doc/tutorials/imgproc/imgtrans/warp_affine/warp_affine.html
    MatOfPoint2f src_pf = new MatOfPoint2f(new Point(from[0][0], from[0][1]), new Point(from[1][0], from[1][1]),
            new Point(from[2][0], from[2][1]));
    MatOfPoint2f dst_pf = new MatOfPoint2f(new Point(to[0][0], to[0][1]), new Point(to[1][0], to[1][1]),
            new Point(to[2][0], to[2][1]));

    //  https://www.learnopencv.com/warp-one-triangle-to-another-using-opencv-c-python/#download
    //how do I set up the position numbers in MatOfPoint2f here?
    //  Mat perspective_matrix = Imgproc.getAffineTransform(src_pf, dst_pf);
    Rect r1 = Imgproc.boundingRect(new MatOfPoint(new Point(from[0][0], from[0][1]),
            new Point(from[1][0], from[1][1]), new Point(from[2][0], from[2][1])));
    Rect r2 = Imgproc.boundingRect(new MatOfPoint(new Point(to[0][0], to[0][1]), new Point(to[1][0], to[1][1]),
            new Point(to[2][0], to[2][1])));

    MatOfPoint2f tri1Cropped = new MatOfPoint2f(new Point(from[0][0] - r1.x, from[0][1] - r1.y),
            new Point(from[1][0] - r1.x, from[1][1] - r1.y), new Point(from[2][0] - r1.x, from[2][1] - r1.y));

    MatOfPoint tri2CroppedInt = new MatOfPoint(new Point(to[0][0] - r2.x, to[0][1] - r2.y),
            new Point(to[1][0] - r2.x, to[1][1] - r2.y), new Point(to[2][0] - r2.x, to[2][1] - r2.y));

    MatOfPoint2f tri2Cropped = new MatOfPoint2f(new Point((to[0][0] - r2.x), (to[0][1] - r2.y)),
            new Point((to[1][0] - r2.x), (to[1][1] - r2.y)), new Point((to[2][0] - r2.x), (to[2][1] - r2.y)));
    //        for (int i = 0; i < 3; i++) {
    //           // tri1Cropped.push_back(new MatOfPoint(new Point(from[i][0] - r1.x, from[i][1] - r1.y))); //           new Point( from[i][0]  - r1.x, from[i][1]-  r1.y) );
    //            //tri2Cropped.push_back(new MatOfPoint(new Point(to[i][0] - r2.x, to[i][1] - r2.y)));
    ///*from w w w.ja  v  a  2s  . c om*/
    //            // fillConvexPoly needs a vector of Point and not Point2f
    //           // tri2CroppedInt.push_back(new MatOfPoint2f(new Point((int) (to[i][0] - r2.x), (int) (to[i][1] - r2.y))));
    //
    //        }

    // Apply warpImage to small rectangular patches
    Mat img1Cropped = mat.submat(r1);
    //img1(r1).copyTo(img1Cropped);

    // Given a pair of triangles, find the affine transform.
    Mat warpMat = Imgproc.getAffineTransform(tri1Cropped, tri2Cropped);

    //       Mat bbb = warpMat.mul(tri1Cropped);
    //        
    //       System.out.println( warpMat.dump() );
    //       System.out.println( tri2Cropped.dump() );
    //       System.out.println( bbb.dump() );
    // Apply the Affine Transform just found to the src image
    Mat img2Cropped = Mat.zeros(r2.height, r2.width, img1Cropped.type());
    Imgproc.warpAffine(img1Cropped, img2Cropped, warpMat, img2Cropped.size(), 0, Imgproc.INTER_LINEAR,
            new Scalar(Core.BORDER_TRANSPARENT)); //, 0, Imgproc.INTER_LINEAR, new Scalar(Core.BORDER_REFLECT_101));

    // Get mask by filling triangle
    Mat mask = Mat.zeros(r2.height, r2.width, CvType.CV_8UC3); ///CV_8U    CV_32FC3
    Imgproc.fillConvexPoly(mask, tri2CroppedInt, new Scalar(1.0, 1.0, 1.0), 16, 0);

    // Copy triangular region of the rectangular patch to the output image
    //         Core.multiply(img2Cropped,mask, img2Cropped);
    //         
    //         Core.multiply(mask, new Scalar(-1), mask);
    //        Core.(mask,new Scalar(gap), mask);
    //Core.multiply(lienzo.submat(r2),  (new Scalar(1.0,1.0,1.0)). - Core.multiply(mask,), lienzo.submat(r2));
    //         img2(r2) = img2(r2) + img2Cropped;
    // Core.subtract(Mat.ones(mask.height(), mask.width(), CvType.CV_8UC3), mask, mask);
    // Mat ff =   ;
    //   este
    Core.multiply(img2Cropped, mask, img2Cropped);
    //Core.multiply(lienzo.submat(r2), mask  , lienzo.submat(r2));         
    Core.add(lienzo.submat(r2), img2Cropped, lienzo.submat(r2));

    /*
     Mat bb = new Mat(mat, r2);
     bb.setTo(new Scalar(rnd.nextInt(),rnd.nextInt(),rnd.nextInt()));         
     Core.multiply(bb,mask, bb);
     Core.multiply(lienzo.submat(r2), mask  , lienzo.submat(r2));         
     Core.add(lienzo.submat(r2), bb, lienzo.submat(r2));
             
     */
    // lienzo.submat(r2).setTo(new Scalar(rnd.nextInt(),rnd.nextInt(),rnd.nextInt()));
    //         
    //      Imgproc.fillConvexPoly(lienzo, new MatOfPoint(
    //                new Point(to[0][0] , to[0][1]),
    //                new Point(to[1][0] , to[1][1]),
    //                new Point(to[2][0] , to[2][1] )), new Scalar(1,1,1));
    //        img2Cropped.copyTo(lienzo);
    //        return;
    // http://stackoverflow.com/questions/14111716/how-to-set-a-mask-image-for-grabcut-in-opencv  
    //  Imgproc.warpAffine(mat, lienzo, perspective_matrix, lienzo.size());
    // Imgproc.getAffineTransform(null, null);
    /*     
     // Find bounding rectangle for each triangle
     Rect r1 = boundingRect(tri1);
     Rect r2 = boundingRect(tri2);
            
     // Offset points by left top corner of the respective rectangles
     vector<Point2f> tri1Cropped, tri2Cropped;
     vector<Point> tri2CroppedInt;
     for(int i = 0; i < 3; i++)
     {
     tri1Cropped.push_back( Point2f( tri1[i].x - r1.x, tri1[i].y -  r1.y) );
     tri2Cropped.push_back( Point2f( tri2[i].x - r2.x, tri2[i].y - r2.y) );
            
     // fillConvexPoly needs a vector of Point and not Point2f
     tri2CroppedInt.push_back( Point((int)(tri2[i].x - r2.x), (int)(tri2[i].y - r2.y)) );
            
     }
            
     // Apply warpImage to small rectangular patches
     Mat img1Cropped;
     img1(r1).copyTo(img1Cropped);
            
     // Given a pair of triangles, find the affine transform.
     Mat warpMat = getAffineTransform( tri1Cropped, tri2Cropped );
            
     // Apply the Affine Transform just found to the src image
     Mat img2Cropped = Mat::zeros(r2.height, r2.width, img1Cropped.type());
     warpAffine( img1Cropped, img2Cropped, warpMat, img2Cropped.size(), INTER_LINEAR, BORDER_REFLECT_101);
            
     // Get mask by filling triangle
     Mat mask = Mat::zeros(r2.height, r2.width, CV_32FC3);
     fillConvexPoly(mask, tri2CroppedInt, Scalar(1.0, 1.0, 1.0), 16, 0);
            
     // Copy triangular region of the rectangular patch to the output image
     multiply(img2Cropped,mask, img2Cropped);
     multiply(img2(r2), Scalar(1.0,1.0,1.0) - mask, img2(r2));
     img2(r2) = img2(r2) + img2Cropped;*/
}

From source file:servershootingstar.BallDetector.java

public static String getAngleFromRobot(int input) {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    System.out.println("before");
    int point;//from  w w  w . j  a v a2s.  c  o m
    try {
        Mat frame = new Mat();
        System.out.println("AAAAAA");
        Mat originalFrame = new Mat();
        System.out.println("BBBBBB");
        VideoCapture videoCapture = new VideoCapture(0);
        System.out.println("CCCCCCCC");
        videoCapture.read(originalFrame);
        //                System.out.println("original" + originalFrame.dump());
        //                initSwing(originalFrame);
        int workaround = 20;
        while (workaround > 0) {
            System.out.println("workaround " + workaround);
            videoCapture.read(originalFrame);
            //                    System.out.println(originalFrame.dump() + originalFrame.dump().length());
            workaround--;
        }
        //                Imgcodecs.imwrite("C:\\Users\\Goran\\Desktop\\Goran.jpg", originalFrame);
        Mat cropped = originalFrame.submat(originalFrame.rows() / 4, originalFrame.rows() / 4 * 3, 0,
                originalFrame.cols());
        initSwing(cropped);
        Imgproc.cvtColor(cropped, frame, Imgproc.COLOR_BGR2HSV);

        // insert lower and upper bounds for colors
        Scalar greenLowerB = new Scalar(20, 55, 55);
        Scalar greenUpperB = new Scalar(40, 255, 255);

        Scalar redLowerB = new Scalar(160, 100, 35);
        Scalar red1LowerB = new Scalar(0, 100, 35);

        Scalar redUpperB = new Scalar(180, 255, 255);
        Scalar red1UpperB = new Scalar(20, 255, 255);

        Scalar blueLowerB = new Scalar(100, 100, 35);
        Scalar blueUpperB = new Scalar(120, 255, 155);

        Mat mask = new Mat();

        if (input == 1) {
            Mat otherMask = new Mat();
            Core.inRange(frame, redLowerB, redUpperB, mask);
            Core.inRange(frame, red1LowerB, red1UpperB, otherMask);
            Core.bitwise_or(mask, otherMask, mask);
        } else if (input == 2) {
            Core.inRange(frame, greenLowerB, greenUpperB, mask);
        } else {
            Core.inRange(frame, blueLowerB, blueUpperB, mask);
        }
        Imgproc.erode(mask, mask, Imgproc.getStructuringElement(Imgproc.CV_SHAPE_ELLIPSE, new Size(5, 5)));
        Imgproc.erode(mask, mask, Imgproc.getStructuringElement(Imgproc.CV_SHAPE_ELLIPSE, new Size(5, 5)));
        Imgproc.erode(mask, mask, Imgproc.getStructuringElement(Imgproc.CV_SHAPE_ELLIPSE, new Size(5, 5)));
        Imgproc.erode(mask, mask, Imgproc.getStructuringElement(Imgproc.CV_SHAPE_ELLIPSE, new Size(5, 5)));

        int minX = Integer.MAX_VALUE, maxX = Integer.MIN_VALUE, minY = Integer.MAX_VALUE,
                maxY = Integer.MIN_VALUE;
        for (int i = 0; i < mask.rows(); ++i) {
            for (int j = 0; j < mask.cols(); ++j) {
                double value = mask.get(i, j)[0];
                //System.out.println(value);
                if (value > 1) {
                    minX = Math.min(minX, i);
                    maxX = Math.max(maxX, i);
                    minY = Math.min(minY, j);
                    maxY = Math.max(maxY, j);
                }
            }
        }

        Imgproc.circle(mask, new Point((maxY + minY) / 2, (minX + maxX) / 2), 3, new Scalar(0, 0, 0));
        initSwing(mask);

        point = (minY + maxY) / 2;

        point = point - 320;

        cos = point / 320.0;
        System.out.println("OK");
    } catch (Exception ex) {
        point = (new Random()).nextInt(640);
        cos = -1;
        System.out.println("error imase, davam random brojka: " + point);
        ex.printStackTrace();

    }

    //            System.out.println();
    //            System.out.println("tockata u granica od [-320, 320]");
    //            System.out.println(point);
    //            System.out.println("cosinus vrednost");
    //            System.out.println(cos);
    //            System.out.println();
    System.out.println("cos = " + cos);
    if (cos == -1) {
        return "-1";
    }
    int res = (int) (2 * Math.toDegrees(Math.acos(cos)) / 3);
    System.out.println("Res: " + res);
    return String.valueOf(res);
}

From source file:servlets.FillArea.java

/**
 * Processes requests for both HTTP <code>GET</code> and <code>POST</code>
 * methods./*from  ww w. j  av  a2  s .  c  om*/
 *
 * @param request servlet request
 * @param response servlet response
 * @throws ServletException if a servlet-specific error occurs
 * @throws IOException if an I/O error occurs
 */
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {

    response.setContentType("text/html;charset=UTF-8");
    PrintWriter out = response.getWriter();
    try {

        String paramX = request.getParameter("x");
        String paramY = request.getParameter("y");
        String imageFileName = request.getParameter("imageId") + ".png";
        String widgetID = request.getParameter("widgetID") + ".png";

        double x = Double.parseDouble(paramX);
        double y = Double.parseDouble(paramY);

        Mat image = ImageUtils.loadImage(imageFileName, request);

        FloodFiller floodFiller = new FloodFiller(image, widgetID, request);
        Point from = new Point(x, y);
        int lo = 35;
        int up = 35;

        floodFiller.fillFrom(from, lo, up);

        String path = floodFiller.getPath();

        Scalar meanColor = floodFiller.getMeanColor();
        Point topLeftPoint = floodFiller.getTopLeftCorner();

        int filledArea = floodFiller.getFilledArea();
        double contourArea = floodFiller.getContourArea();

        FindingResponse findingResponse = new FindingResponse(path, meanColor, topLeftPoint, filledArea,
                contourArea);

        Gson gson = new Gson();
        String jsonResponse = gson.toJson(findingResponse, FindingResponse.class);

        out.println(jsonResponse);

    } finally {
        out.close();
    }
}

From source file:servlets.FillAreaByScribble.java

/**
 * Processes requests for both HTTP <code>GET</code> and <code>POST</code>
 * methods./*from w ww . j av a 2 s . c o  m*/
 *
 * @param request servlet request
 * @param response servlet response
 * @throws ServletException if a servlet-specific error occurs
 * @throws IOException if an I/O error occurs
 */
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    response.setContentType("text/html;charset=UTF-8");
    try (PrintWriter out = response.getWriter()) {

        String imageForTextRecognition = request.getParameter("imageForTextRecognition") + ".png";
        String isSingleRegion = request.getParameter("isSingleRegion");
        boolean makeSingleRegion = isSingleRegion.toLowerCase().equals("true");

        Mat original = ImageUtils.loadImage(imageForTextRecognition, request);
        Mat image = original.clone();
        Mat mask = Mat.zeros(image.rows() + 2, image.cols() + 2, CvType.CV_8UC1);

        String samplingPoints = request.getParameter("samplingPoints");

        Gson gson = new Gson();
        Point[] tmpPoints = gson.fromJson(samplingPoints, Point[].class);

        ArrayList<Point> userPoints = new ArrayList<Point>(Arrays.asList(tmpPoints));

        Mat userPointsImage = image.clone();

        ArrayList<Mat> maskRegions = new ArrayList<>();

        Random random = new Random();
        int b = random.nextInt(256);
        int g = random.nextInt(256);
        int r = random.nextInt(256);
        Scalar newVal = new Scalar(b, g, r);
        FloodFillFacade floodFillFacade = new FloodFillFacade();

        int k = 0;

        for (int i = 0; i < userPoints.size(); i++) {
            Point point = userPoints.get(i);

            image = floodFillFacade.fill(image, mask, (int) point.x, (int) point.y, newVal);

            Mat seedImage = original.clone();
            Core.circle(seedImage, point, 9, new Scalar(0, 0, 255), -1);
            Core.putText(userPointsImage, "" + k, new Point(point.x + 5, point.y + 5), 3, 0.5,
                    new Scalar(0, 0, 0));
            //                ImageUtils.saveImage(seedImage, "mask_" + k + "_seed" + imageForTextRecognition + ".png", request);

            if (!makeSingleRegion) {
                Mat element = new Mat(3, 3, CvType.CV_8U, new Scalar(1));
                Imgproc.morphologyEx(mask, mask, Imgproc.MORPH_CLOSE, element, new Point(-1, -1), 3);
                Imgproc.resize(mask, mask, original.size());
            }

            //                ImageUtils.saveImage(mask, "mask_" + k + "" + imageForTextRecognition + ".png", request);

            Mat dilatedMask = new Mat();

            int elementSide = 21;
            Mat element = new Mat(elementSide, elementSide, CvType.CV_8U, new Scalar(1));
            Imgproc.morphologyEx(mask, dilatedMask, Imgproc.MORPH_DILATE, element, new Point(-1, -1), 1);
            Imgproc.resize(dilatedMask, dilatedMask, original.size());

            //                ImageUtils.saveImage(dilatedMask, "mask_" + k + "_dilated" + imageForTextRecognition + ".png", request);

            maskRegions.add(mask);

            if (!makeSingleRegion) {
                int totalRemovedPoints = filterPoints(userPoints, dilatedMask);
                if (totalRemovedPoints > 0) {
                    i = -1; // so that the algorithm starts again at the first element of the userPoints array
                }
            } else {
                filterPoints(userPoints, mask);
            }

            //                System.out.println("Total points after filtering:");
            //                System.out.println(userPoints.size());

            if (!makeSingleRegion) {
                mask = Mat.zeros(original.rows() + 2, original.cols() + 2, CvType.CV_8UC1);
            }

            k++;
        }

        ArrayList<FindingResponse> findingResponses = new ArrayList<>();

        if (makeSingleRegion) {

            Mat element = new Mat(3, 3, CvType.CV_8U, new Scalar(1));
            Imgproc.morphologyEx(mask, mask, Imgproc.MORPH_CLOSE, element, new Point(-1, -1), 3);

            Imgproc.resize(mask, mask, image.size());

            List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
            Imgproc.findContours(mask.clone(), contours, new Mat(), Imgproc.RETR_EXTERNAL,
                    Imgproc.CHAIN_APPROX_NONE);

            MatOfPoint biggestContour = contours.get(0); // getting the biggest contour
            double contourArea = Imgproc.contourArea(biggestContour);

            if (contours.size() > 1) {
                biggestContour = Collections.max(contours, new ContourComparator()); // getting the biggest contour in case there are more than one
            }

            Point[] biggestContourPoints = biggestContour.toArray();
            String path = "M " + (int) biggestContourPoints[0].x + " " + (int) biggestContourPoints[0].y + " ";
            for (int i = 1; i < biggestContourPoints.length; ++i) {
                Point v = biggestContourPoints[i];
                path += "L " + (int) v.x + " " + (int) v.y + " ";
            }
            path += "Z";

            //                System.out.println("path:");
            //                System.out.println(path);

            Rect computedSearchWindow = Imgproc.boundingRect(biggestContour);
            Point massCenter = computedSearchWindow.tl();

            Scalar meanColor = Core.mean(original, mask);

            //                ImageUtils.saveImage(mask, "single_mask_" + imageForTextRecognition + ".png", request);

            FindingResponse findingResponse = new FindingResponse(path, meanColor, massCenter, -1, contourArea);
            findingResponses.add(findingResponse);

        } else {

            float imageArea = image.cols() * image.rows();

            for (int j = 0; j < maskRegions.size(); j++) {
                Mat region = maskRegions.get(j);

                List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
                Imgproc.findContours(region.clone(), contours, new Mat(), Imgproc.RETR_EXTERNAL,
                        Imgproc.CHAIN_APPROX_NONE);

                MatOfPoint biggestContour = contours.get(0); // getting the biggest contour

                if (contours.size() > 1) {
                    biggestContour = Collections.max(contours, new ContourComparator()); // getting the biggest contour in case there are more than one
                }

                double contourArea = Imgproc.contourArea(biggestContour);

                if (contourArea / imageArea < 0.8) { // only areas less than 80% of that of the image are accepted

                    Point[] biggestContourPoints = biggestContour.toArray();
                    String path = "M " + (int) biggestContourPoints[0].x + " " + (int) biggestContourPoints[0].y
                            + " ";
                    for (int i = 1; i < biggestContourPoints.length; ++i) {
                        Point v = biggestContourPoints[i];
                        path += "L " + (int) v.x + " " + (int) v.y + " ";
                    }
                    path += "Z";

                    Rect computedSearchWindow = Imgproc.boundingRect(biggestContour);
                    Point massCenter = computedSearchWindow.tl();

                    //                        System.out.println("Contour area: " + contourArea);

                    Mat contoursImage = userPointsImage.clone();
                    Imgproc.drawContours(contoursImage, contours, 0, newVal, 1);

                    Scalar meanColor = Core.mean(original, region);

                    FindingResponse findingResponse = new FindingResponse(path, meanColor, massCenter, -1,
                            contourArea);
                    findingResponses.add(findingResponse);

                    //                        ImageUtils.saveImage(contoursImage, "mask_" + j + "_contourned" + imageForTextRecognition + ".png", request);

                }

            }

        }

        String jsonResponse = gson.toJson(findingResponses, ArrayList.class);

        out.println(jsonResponse);

    }
}

From source file:servlets.processScribble.java

/**
 * Processes requests for both HTTP <code>GET</code> and <code>POST</code>
 * methods.// ww w . ja va 2s  .  c  om
 *
 * @param request servlet request
 * @param response servlet response
 * @throws ServletException if a servlet-specific error occurs
 * @throws IOException if an I/O error occurs
 */
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    response.setContentType("text/html;charset=UTF-8");
    try (PrintWriter out = response.getWriter()) {

        String imageForTextRecognition = request.getParameter("imageForTextRecognition") + ".png";

        Mat original = ImageUtils.loadImage(imageForTextRecognition, request);
        Mat image = original.clone();
        Mat mask = Mat.zeros(image.rows() + 2, image.cols() + 2, CvType.CV_8UC1);

        String samplingPoints = request.getParameter("samplingPoints");

        Gson gson = new Gson();
        Point[] userPoints = gson.fromJson(samplingPoints, Point[].class);

        MatOfPoint points = new MatOfPoint(new Mat(userPoints.length, 1, CvType.CV_32SC2));
        int cont = 0;

        for (Point point : userPoints) {
            int y = (int) point.y;
            int x = (int) point.x;
            int[] data = { x, y };
            points.put(cont++, 0, data);
        }

        MatOfInt hull = new MatOfInt();
        Imgproc.convexHull(points, hull);

        MatOfPoint mopOut = new MatOfPoint();
        mopOut.create((int) hull.size().height, 1, CvType.CV_32SC2);

        int totalPoints = (int) hull.size().height;

        Point[] convexHullPoints = new Point[totalPoints];
        ArrayList<Point> seeds = new ArrayList<>();

        for (int i = 0; i < totalPoints; i++) {
            int index = (int) hull.get(i, 0)[0];
            double[] point = new double[] { points.get(index, 0)[0], points.get(index, 0)[1] };
            mopOut.put(i, 0, point);

            convexHullPoints[i] = new Point(point[0], point[1]);
            seeds.add(new Point(point[0], point[1]));

        }

        MatOfPoint mop = new MatOfPoint();
        mop.fromArray(convexHullPoints);

        ArrayList<MatOfPoint> arrayList = new ArrayList<MatOfPoint>();
        arrayList.add(mop);

        Random random = new Random();
        int b = random.nextInt(256);
        int g = random.nextInt(256);
        int r = random.nextInt(256);
        Scalar newVal = new Scalar(b, g, r);

        FloodFillFacade floodFillFacade = new FloodFillFacade();

        for (int i = 0; i < seeds.size(); i++) {
            Point seed = seeds.get(i);
            image = floodFillFacade.fill(image, mask, (int) seed.x, (int) seed.y, newVal);
        }

        Imgproc.drawContours(image, arrayList, 0, newVal, -1);

        Imgproc.resize(mask, mask, image.size());

        Scalar meanColor = Core.mean(original, mask);

        //            Highgui.imwrite("C:\\Users\\Gonzalo\\Documents\\NetBeansProjects\\iVoLVER\\uploads\\the_convexHull.png", image);
        ImageUtils.saveImage(image, imageForTextRecognition + "_the_convexHull.png", request);

        newVal = new Scalar(255, 255, 0);

        floodFillFacade.setMasked(false);
        System.out.println("Last one:");
        floodFillFacade.fill(image, mask, 211, 194, newVal);

        Core.circle(image, new Point(211, 194), 5, new Scalar(0, 0, 0), -1);
        ImageUtils.saveImage(image, imageForTextRecognition + "_final.png", request);
        //            Highgui.imwrite("C:\\Users\\Gonzalo\\Documents\\NetBeansProjects\\iVoLVER\\uploads\\final.png", image);

        Mat element = new Mat(3, 3, CvType.CV_8U, new Scalar(1));
        Imgproc.morphologyEx(mask, mask, Imgproc.MORPH_CLOSE, element, new Point(-1, -1), 3);

        Imgproc.resize(mask, mask, image.size());

        //            ImageUtils.saveImage(mask, "final_mask_dilated.png", request);
        //            Highgui.imwrite("C:\\Users\\Gonzalo\\Documents\\NetBeansProjects\\iVoLVER\\uploads\\final_mask_dilated.png", mask);

        List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
        Imgproc.findContours(mask.clone(), contours, new Mat(), Imgproc.RETR_EXTERNAL,
                Imgproc.CHAIN_APPROX_NONE);
        double contourArea = 0;
        String path = "";

        MatOfPoint biggestContour = contours.get(0); // getting the biggest contour
        contourArea = Imgproc.contourArea(biggestContour);

        if (contours.size() > 1) {
            biggestContour = Collections.max(contours, new ContourComparator()); // getting the biggest contour in case there are more than one
        }

        Point[] biggestContourPoints = biggestContour.toArray();
        path = "M " + (int) biggestContourPoints[0].x + " " + (int) biggestContourPoints[0].y + " ";
        for (int i = 1; i < biggestContourPoints.length; ++i) {
            Point v = biggestContourPoints[i];
            path += "L " + (int) v.x + " " + (int) v.y + " ";
        }
        path += "Z";

        System.out.println("path:");
        System.out.println(path);

        Rect computedSearchWindow = Imgproc.boundingRect(biggestContour);
        Point massCenter = computedSearchWindow.tl();

        FindingResponse findingResponse = new FindingResponse(path, meanColor, massCenter, -1, contourArea);
        String jsonResponse = gson.toJson(findingResponse, FindingResponse.class);

        out.println(jsonResponse);

        //            String jsonResponse = gson.toJson(path);
        //            out.println(jsonResponse);
    }
}

From source file:simeav.filtros.instanciaciones.DetectorConectoresEstandar.java

@Override
public Mat detectarConectores(Mat original, Mat mascaraModulos, Diagrama diagrama) {
    Mat sinCuadrados = Utils.borrarMascara(original, mascaraModulos);
    // dilato los conectores para que se superpongan con los cuadrados
    sinCuadrados = Utils.dilate(sinCuadrados);
    sinCuadrados = Utils.dilate(sinCuadrados);
    sinCuadrados = Utils.dilate(sinCuadrados);
    //elimino puntos que pueden haber quedado de la eliminacion de cuadrados
    ArrayList<MatOfPoint> contornos = Utils.detectarContornos(sinCuadrados);
    for (int i = 0; i < contornos.size(); i++) {
        double area = Imgproc.contourArea(contornos.get(i));
        if (area <= 50) {
            Imgproc.drawContours(sinCuadrados, contornos, i, new Scalar(0, 0, 0), -1);
        }/*from  w w  w .  j av  a2 s  .c  o  m*/
    }

    this.extremos = original.clone();
    Mat mascara;
    String tipo_extremo1, tipo_extremo2;
    // Imagen en la que se va a dibuja el resultado
    Mat conectores = Mat.zeros(sinCuadrados.size(), CvType.CV_8UC3);
    Mat contorno;
    contornos = Utils.detectarContornos(sinCuadrados);
    Mat intersec = new Mat();

    ArrayList<MatOfPoint> contornos_intersec;
    int r, g, b;
    for (int j = contornos.size() - 1; j >= 0; j--) {
        //Dibujo el contorno relleno, para despues sacar la interseccion con los cuadrados
        contorno = Mat.zeros(sinCuadrados.size(), CvType.CV_8UC3);
        Imgproc.drawContours(contorno, contornos, j, new Scalar(180, 255, 255), -1);
        Imgproc.cvtColor(contorno, contorno, Imgproc.COLOR_BGR2GRAY);
        //Calculo la interseccion con los cuadrados (se dibujan en intersec)
        Core.bitwise_and(contorno, mascaraModulos, intersec);
        //Saco los contornos de las intersecciones para saber donde estan
        contornos_intersec = Utils.detectarContornos(intersec);
        if (contornos_intersec.size() > 1) {
            Scalar color = Utils.getColorRandom();
            for (int z = 0; z < contornos_intersec.size(); z++) {
                Imgproc.drawContours(conectores, contornos_intersec, z, color, -1);
            }
            ArrayList<Point> centros_extremos = Utils.getCentros(contornos_intersec);
            for (Point centros_extremo : centros_extremos) {
                Core.circle(conectores, centros_extremo, 4, color, -1);
            }
            analizarExtremos(j, centros_extremos, diagrama);
            Conector c = diagrama.getConector(j);

            Core.rectangle(conectores, c.getModuloDesde().getRectangulo().tl(),
                    c.getModuloDesde().getRectangulo().br(), color, 3);
            Core.rectangle(conectores, c.getModuloHasta().getRectangulo().tl(),
                    c.getModuloHasta().getRectangulo().br(), color, 3);
            Point tl_desde = new Point(c.getDesde().x - 20, c.getDesde().y - 20);
            Point br_desde = new Point(c.getDesde().x + 20, c.getDesde().y + 20);
            Point tl_hasta = new Point(c.getHasta().x - 20, c.getHasta().y - 20);
            Point br_hasta = new Point(c.getHasta().x + 20, c.getHasta().y + 20);
            mascara = new Mat(sinCuadrados.size(), CvType.CV_8U, new Scalar(255, 255, 255));
            Core.rectangle(mascara, tl_desde, br_desde, new Scalar(0, 0, 0), -1);
            tipo_extremo1 = clasificarExtremo(Utils.borrarMascara(original, mascara));

            mascara = new Mat(sinCuadrados.size(), CvType.CV_8U, new Scalar(255, 255, 255));
            Core.rectangle(mascara, tl_hasta, br_hasta, new Scalar(0, 0, 0), -1);
            tipo_extremo2 = clasificarExtremo(Utils.borrarMascara(original, mascara));
            if (!tipo_extremo1.equals(tipo_extremo2)) {
                if (tipo_extremo1.equals("Normal"))
                    c.setTipo(tipo_extremo2);
                else if (tipo_extremo2.equals("Normal")) {
                    Modulo aux = c.getModuloDesde();
                    c.setDesde(c.getModuloHasta());
                    c.setHacia(aux);
                    Point p_aux = c.getDesde();
                    c.setDesde(c.getHasta());
                    c.setHasta(p_aux);
                    c.setTipo(tipo_extremo1);
                } else {
                    c.setTipo("Indeterminado");
                }
            } else {
                c.setTipo("Indeterminado");
            }
        }
    }
    return conectores;
}

From source file:simeav.filtros.instanciaciones.DetectorConectoresEstandar.java

private void analizarExtremos(Integer id_conector, ArrayList<Point> extremos, Diagrama diagrama) {
    ArrayList<Modulo> modulos = diagrama.getModulos();
    ArrayList<Modulo> modulos_conectados = new ArrayList<>();
    for (int i = 0; i < extremos.size(); i++) {
        for (int j = 0; j < modulos.size(); j++) {
            Rect rectangulo = modulos.get(j).getRectangulo();
            if (Utils.conecta(rectangulo, extremos.get(i))) {
                modulos_conectados.add(modulos.get(j));
            }//from w  w w.j a v a  2s .co m
        }
    }
    int i = modulos_conectados.size();
    if (i > 2) {
        int j = 0;
        int k = 1;
        Modulo m1, m2;
        Point p1, p2;
        while (j < extremos.size()) {
            m1 = modulos_conectados.get(j);
            m2 = modulos_conectados.get(k);
            p1 = extremos.get(j);
            p2 = extremos.get(k);
            if ((m1.equals(m2)) && ((abs(p1.x - p2.x) < 0.1) || (abs(p1.y - p2.y) < 0.1))) {
                Point centro = new Point((p1.x + p2.x) / 2, (p1.y + p2.y) / 2);
                extremos.remove(p1);
                extremos.remove(p2);
                extremos.add(centro);
                modulos_conectados.remove(m1);
                j = extremos.size();
                k = j + 1;
            } else if (k == extremos.size() - 1) {
                j = k;
                k = 0;
            } else {
                j++;
                k++;
            }
        }
    }
    i = modulos_conectados.size();
    if (i >= 2) {
        Modulo m1 = (Modulo) modulos_conectados.get(0);
        Modulo m2 = (Modulo) modulos_conectados.get(1);
        diagrama.addConector(id_conector, m1, m2, extremos.get(0), extremos.get(1));
    }
}

From source file:simeav.filtros.instanciaciones.DetectorModulosEstandar.java

@Override
public Mat detectarModulos(Mat original, Diagrama diagrama) {
    Imgproc.blur(original, original, new Size(15, 15));
    original = Utils.dilate(original);//from w  w  w. j a  v  a 2  s .c  om
    Mat jerarquia = new Mat();
    ArrayList<MatOfPoint> contornos = new ArrayList<>();
    Imgproc.findContours(original.clone(), contornos, jerarquia, Imgproc.RETR_CCOMP,
            Imgproc.CHAIN_APPROX_SIMPLE);
    ArrayList<MatOfPoint> cp = new ArrayList<>(contornos.size());
    Map<Integer, Rect> rectangulos = new HashMap<>();
    Integer id_cuadrado = 0;
    Mat resultado = Mat.zeros(original.size(), CvType.CV_8U);
    for (int i = contornos.size() - 1; i >= 0; i--) {
        if (jerarquia.get(0, i)[3] > -1) {
            MatOfPoint2f contorno2f = new MatOfPoint2f();
            contorno2f.fromList(contornos.get(i).toList());
            MatOfPoint2f c = new MatOfPoint2f();
            Imgproc.approxPolyDP(contorno2f, c, 3, true);
            cp.add(new MatOfPoint(c.toArray()));
            int lados = cp.get(cp.size() - 1).height();
            if ((4 <= lados) && lados < 12) {
                rectangulos.put(id_cuadrado, Imgproc.boundingRect(new MatOfPoint(c.toArray())));
                Point tl = new Point(rectangulos.get(id_cuadrado).tl().x - 20,
                        rectangulos.get(id_cuadrado).tl().y - 20);
                Point br = new Point(rectangulos.get(id_cuadrado).br().x + 20,
                        rectangulos.get(id_cuadrado).br().y + 20);
                Core.rectangle(resultado, tl, br, new Scalar(255, 255, 255), -1);
                diagrama.addModulo(id_cuadrado, new Rect(tl, br));
                Imgproc.drawContours(resultado, contornos, i, new Scalar(0, 0, 0), -1);
                id_cuadrado++;
            }
        }
    }
    return resultado;
}