Example usage for org.opencv.core Point Point

List of usage examples for org.opencv.core Point Point

Introduction

In this page you can find the example usage for org.opencv.core Point Point.

Prototype

public Point(double[] vals) 

Source Link

Usage

From source file:com.seleniumtests.util.imaging.ImageDetector.java

License:Apache License

/**
 * Compute the rectangle where the searched picture is and the rotation angle between both images
 * Throw {@link ImageSearchException} if picture is not found
 * @return/*from   ww  w  . j  av  a2 s  .  c  om*/
 * @Deprecated Kept here for information, but open CV 3 does not include SURF anymore for java build
 */
public void detectCorrespondingZone() {
    Mat objectImageMat = Imgcodecs.imread(objectImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
    Mat sceneImageMat = Imgcodecs.imread(sceneImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
    FeatureDetector surf = FeatureDetector.create(FeatureDetector.SURF);

    MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint();
    MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint();

    surf.detect(objectImageMat, objectKeyPoints);
    surf.detect(sceneImageMat, sceneKeyPoints);

    DescriptorExtractor surfExtractor = DescriptorExtractor.create(DescriptorExtractor.SURF);
    Mat objectDescriptor = new Mat();
    Mat sceneDescriptor = new Mat();
    surfExtractor.compute(objectImageMat, objectKeyPoints, objectDescriptor);
    surfExtractor.compute(sceneImageMat, sceneKeyPoints, sceneDescriptor);

    try {
        Mat outImage = new Mat();
        Features2d.drawKeypoints(objectImageMat, objectKeyPoints, outImage);
        String tempFile = File.createTempFile("img", ".png").getAbsolutePath();
        writeComparisonPictureToFile(tempFile, outImage);
    } catch (IOException e) {

    }

    // http://stackoverflow.com/questions/29828849/flann-for-opencv-java
    DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
    MatOfDMatch matches = new MatOfDMatch();

    if (objectKeyPoints.toList().isEmpty()) {
        throw new ImageSearchException("No keypoints in object to search, check it's not uniformly coloured: "
                + objectImage.getAbsolutePath());
    }
    if (sceneKeyPoints.toList().isEmpty()) {
        throw new ImageSearchException(
                "No keypoints in scene, check it's not uniformly coloured: " + sceneImage.getAbsolutePath());
    }
    if (objectDescriptor.type() != CvType.CV_32F) {
        objectDescriptor.convertTo(objectDescriptor, CvType.CV_32F);
    }
    if (sceneDescriptor.type() != CvType.CV_32F) {
        sceneDescriptor.convertTo(sceneDescriptor, CvType.CV_32F);
    }

    matcher.match(objectDescriptor, sceneDescriptor, matches);

    double maxDist = 0;
    double minDist = 10000;

    for (int i = 0; i < objectDescriptor.rows(); i++) {
        double dist = matches.toList().get(i).distance;
        if (dist < minDist) {
            minDist = dist;
        }
        if (dist > maxDist) {
            maxDist = dist;
        }
    }

    logger.debug("-- Max dist : " + maxDist);
    logger.debug("-- Min dist : " + minDist);

    LinkedList<DMatch> goodMatches = new LinkedList<>();
    MatOfDMatch gm = new MatOfDMatch();

    for (int i = 0; i < objectDescriptor.rows(); i++) {
        if (matches.toList().get(i).distance < detectionThreshold) {
            goodMatches.addLast(matches.toList().get(i));
        }
    }
    gm.fromList(goodMatches);

    Features2d.drawMatches(objectImageMat, objectKeyPoints, sceneImageMat, sceneKeyPoints, gm, imgMatch,
            Scalar.all(-1), Scalar.all(-1), new MatOfByte(), Features2d.NOT_DRAW_SINGLE_POINTS);

    if (goodMatches.isEmpty()) {
        throw new ImageSearchException("Cannot find matching zone");
    }

    LinkedList<Point> objList = new LinkedList<>();
    LinkedList<Point> sceneList = new LinkedList<>();

    List<KeyPoint> objectKeyPointsList = objectKeyPoints.toList();
    List<KeyPoint> sceneKeyPointsList = sceneKeyPoints.toList();

    for (int i = 0; i < goodMatches.size(); i++) {
        objList.addLast(objectKeyPointsList.get(goodMatches.get(i).queryIdx).pt);
        sceneList.addLast(sceneKeyPointsList.get(goodMatches.get(i).trainIdx).pt);
    }

    MatOfPoint2f obj = new MatOfPoint2f();
    obj.fromList(objList);

    MatOfPoint2f scene = new MatOfPoint2f();
    scene.fromList(sceneList);

    // Calib3d.RANSAC could be used instead of 0
    Mat hg = Calib3d.findHomography(obj, scene, 0, 5);

    Mat objectCorners = new Mat(4, 1, CvType.CV_32FC2);
    Mat sceneCorners = new Mat(4, 1, CvType.CV_32FC2);

    objectCorners.put(0, 0, new double[] { 0, 0 });
    objectCorners.put(1, 0, new double[] { objectImageMat.cols(), 0 });
    objectCorners.put(2, 0, new double[] { objectImageMat.cols(), objectImageMat.rows() });
    objectCorners.put(3, 0, new double[] { 0, objectImageMat.rows() });

    Core.perspectiveTransform(objectCorners, sceneCorners, hg);

    // points of object
    Point po1 = new Point(objectCorners.get(0, 0));
    Point po2 = new Point(objectCorners.get(1, 0));
    Point po3 = new Point(objectCorners.get(2, 0));
    Point po4 = new Point(objectCorners.get(3, 0));

    // point of object in scene
    Point p1 = new Point(sceneCorners.get(0, 0)); // top left
    Point p2 = new Point(sceneCorners.get(1, 0)); // top right
    Point p3 = new Point(sceneCorners.get(2, 0)); // bottom right
    Point p4 = new Point(sceneCorners.get(3, 0)); // bottom left

    logger.debug(po1);
    logger.debug(po2);
    logger.debug(po3);
    logger.debug(po4);
    logger.debug(p1); // top left
    logger.debug(p2); // top right
    logger.debug(p3); // bottom right
    logger.debug(p4); // bottom left

    if (debug) {
        try {
            // translate corners
            p1.set(new double[] { p1.x + objectImageMat.cols(), p1.y });
            p2.set(new double[] { p2.x + objectImageMat.cols(), p2.y });
            p3.set(new double[] { p3.x + objectImageMat.cols(), p3.y });
            p4.set(new double[] { p4.x + objectImageMat.cols(), p4.y });

            Imgproc.line(imgMatch, p1, p2, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p2, p3, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p3, p4, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p4, p1, new Scalar(0, 255, 0), 1);

            showResultingPicture(imgMatch);
        } catch (IOException e) {
        }
    }

    // check rotation angles
    checkRotationAngle(p1, p2, p3, p4, po1, po2, po3, po4);

    // rework on scene points as new, we are sure the object rotation is 0, 90, 180 or 270
    reworkOnScenePoints(p1, p2, p3, p4);

    // check that aspect ratio of the detected height and width are the same
    checkDetectionZoneAspectRatio(p1, p2, p4, po1, po2, po4);

    recordDetectedRectangle(p1, p2, p3, p4);
}

From source file:gov.nasa.jpl.memex.pooledtimeseries.PoT.java

License:Apache License

static void updateOpticalHistogram(double[][][] hist, Mat flow) {
    int d1 = hist.length;
    int d2 = hist[0].length;
    int d3 = hist[0][0].length;

    int step = 4; // 5;

    for (int x = 0; x < frame_width; x += step) {
        int x_type = (int) (x * d1 / frame_width);

        for (int y = 0; y < frame_height; y += step) {
            int y_type = (int) (y * d2 / frame_height);

            Point fxy = new Point(flow.get(y, x));

            double size = (fxy.x + fxy.y) * (fxy.x + fxy.y);

            if (size < 9) {
                continue; // 25
            } else {
                int f_type = opticalFlowType(fxy, d3);

                hist[x_type][y_type][f_type]++;
            }//w  w w . j a  v  a 2  s .co m
        }
    }
}

From source file:org.surmon.pattern.editor2d.components.Mapping.java

public static List<MatOfPoint> process(Mat source, List<Particle> particles) {

    Mat partImage = new Mat(source.size(), CvType.CV_8UC1);

    // Draw particles as images
    Point p;/*from   ww  w.j ava  2s  .co m*/
    for (Particle part : particles) {
        p = new Point(part.getPosition().toArray());
        Core.circle(partImage, p, 1, new Scalar(255));
    }

    // Blur with Gaussian kernel
    Mat blured = new Mat();
    Imgproc.GaussianBlur(partImage, blured, new Size(101, 101), -1, -1);

    // Equalize histogram
    List<Mat> eqChannels = new ArrayList<>();
    List<Mat> channels = new ArrayList<>();
    Core.split(blured, channels);

    for (Mat channel : channels) {
        Mat eqImage = new Mat();
        Imgproc.equalizeHist(channel, eqImage);
        eqChannels.add(eqImage);
    }
    Mat eqResult = new Mat();
    Core.merge(eqChannels, eqResult);

    // Binary threshold
    Mat bin = new Mat();
    Imgproc.threshold(eqResult, bin, 0, 255, Imgproc.THRESH_OTSU);
    //        Imgproc.threshold(eqResult, bin, 10, 255, Imgproc.THRESH_BINARY);

    // Find contours
    Mat imMat = bin.clone();
    Mat canny_output = new Mat();
    Mat hierarchy = new Mat();
    int thresh = 100;
    //median filter:
    List<MatOfPoint> borders = new ArrayList<>();
    Imgproc.Canny(imMat, canny_output, thresh, thresh * 2);
    Imgproc.findContours(canny_output, borders, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); // Find contours

    return borders;

    //        Mat result = source.clone();
    //        Imgproc.drawContours(result, borders, -1, new Scalar(255, 0, 255));
    //        
    //        return result;
}

From source file:org.usfirst.frc.team2084.CMonster2016.vision.Target.java

License:Open Source License

/**
 * Creates a new possible target based on the specified blob and calculates
 * its score./* w  ww . j  a  va2 s . co  m*/
 *
 * @param p the shape of the possible target
 */
public Target(MatOfPoint contour, Mat grayImage) {
    // Simplify contour to make the corner finding algorithm work better
    MatOfPoint2f fContour = new MatOfPoint2f();
    contour.convertTo(fContour, CvType.CV_32F);
    Imgproc.approxPolyDP(fContour, fContour, VisionParameters.getGoalApproxPolyEpsilon(), true);
    fContour.convertTo(contour, CvType.CV_32S);

    this.contour = contour;

    // Check area, and don't do any calculations if it is not valid
    if (validArea = validateArea()) {

        // Find a bounding rectangle
        RotatedRect rect = Imgproc.minAreaRect(fContour);

        Point[] rectPoints = new Point[4];
        rect.points(rectPoints);

        for (int j = 0; j < rectPoints.length; j++) {
            Point rectPoint = rectPoints[j];

            double minDistance = Double.MAX_VALUE;
            Point point = null;

            for (int i = 0; i < contour.rows(); i++) {
                Point contourPoint = new Point(contour.get(i, 0));
                double dist = distance(rectPoint, contourPoint);
                if (dist < minDistance) {
                    minDistance = dist;
                    point = contourPoint;
                }
            }

            rectPoints[j] = point;
        }
        MatOfPoint2f rectMat = new MatOfPoint2f(rectPoints);
        // Refine the corners to improve accuracy
        Imgproc.cornerSubPix(grayImage, rectMat, new Size(4, 10), new Size(-1, -1),
                new TermCriteria(TermCriteria.EPS + TermCriteria.COUNT, 30, 0.1));
        rectPoints = rectMat.toArray();

        // Identify each corner
        SortedMap<Double, List<Point>> x = new TreeMap<>();
        Arrays.stream(rectPoints).forEach((p) -> {
            List<Point> points;
            if ((points = x.get(p.x)) == null) {
                x.put(p.x, points = new LinkedList<>());
            }
            points.add(p);
        });

        int i = 0;
        for (Iterator<List<Point>> it = x.values().iterator(); it.hasNext();) {
            List<Point> s = it.next();

            for (Point p : s) {
                switch (i) {
                case 0:
                    topLeft = p;
                    break;
                case 1:
                    bottomLeft = p;
                    break;
                case 2:
                    topRight = p;
                    break;
                case 3:
                    bottomRight = p;
                }
                i++;
            }
        }

        // Organize corners
        if (topLeft.y > bottomLeft.y) {
            Point p = bottomLeft;
            bottomLeft = topLeft;
            topLeft = p;
        }

        if (topRight.y > bottomRight.y) {
            Point p = bottomRight;
            bottomRight = topRight;
            topRight = p;
        }

        // Create corners for centroid calculation
        corners = new MatOfPoint2f(rectPoints);

        // Calculate center
        Moments moments = Imgproc.moments(corners);
        center = new Point(moments.m10 / moments.m00, moments.m01 / moments.m00);

        // Put the points in the correct order for solvePNP
        rectPoints[0] = topLeft;
        rectPoints[1] = topRight;
        rectPoints[2] = bottomLeft;
        rectPoints[3] = bottomRight;
        // Recreate corners in the new order
        corners = new MatOfPoint2f(rectPoints);

        widthTop = distance(topLeft, topRight);
        widthBottom = distance(bottomLeft, bottomRight);
        width = (widthTop + widthBottom) / 2.0;
        heightLeft = distance(topLeft, bottomLeft);
        heightRight = distance(topRight, bottomRight);
        height = (heightLeft + heightRight) / 2.0;

        Mat tvec = new Mat();

        // Calculate target's location
        Calib3d.solvePnP(OBJECT_POINTS, corners, CAMERA_MAT, DISTORTION_MAT, rotation, tvec, false,
                Calib3d.CV_P3P);

        // =======================================
        // Position and Orientation Transformation
        // =======================================

        double armAngle = VisionResults.getArmAngle();

        // Flip y axis to point upward
        Core.multiply(tvec, SIGN_NORMALIZATION_MATRIX, tvec);

        // Shift origin to arm pivot point, on the robot's centerline
        CoordinateMath.translate(tvec, CAMERA_X_OFFSET, CAMERA_Y_OFFSET, ARM_LENGTH);

        // Align axes with ground
        CoordinateMath.rotateX(tvec, -armAngle);
        Core.add(rotation, new MatOfDouble(armAngle, 0, 0), rotation);

        // Shift origin to robot center of rotation
        CoordinateMath.translate(tvec, 0, ARM_PIVOT_Y_OFFSET, -ARM_PIVOT_Z_OFFSET);

        double xPosFeet = tvec.get(0, 0)[0];
        double yPosFeet = tvec.get(1, 0)[0];
        double zPosFeet = tvec.get(2, 0)[0];

        // Old less effective aiming heading and distance calculation
        // double pixelsToFeet = TARGET_WIDTH / width;

        // distance = (TARGET_WIDTH * HighGoalProcessor.IMAGE_SIZE.width
        // / (2 * width ** Math.tan(VisionParameters.getFOVAngle() / 2)));
        // double xPosFeet = (center.x - (HighGoalProcessor.IMAGE_SIZE.width
        // / 2)) * pixelsToFeet;
        // double yPosFeet = -(center.y -
        // (HighGoalProcessor.IMAGE_SIZE.height / 2)) * pixelsToFeet;

        distance = Math.sqrt(xPosFeet * xPosFeet + zPosFeet * zPosFeet);

        position = new Point3(xPosFeet, yPosFeet, zPosFeet);

        xGoalAngle = Math.atan(xPosFeet / zPosFeet);
        yGoalAngle = Math.atan(yPosFeet / zPosFeet);

        validate();
        score = calculateScore();
    } else {
        valid = false;
    }
}

From source file:overwatchteampicker.OverwatchTeamPicker.java

public static ReturnValues findImage(String template, String source, int flag) {
    File lib = null;/*from w  ww.  j  a v  a  2 s.  c  om*/
    BufferedImage image = null;
    try {
        image = ImageIO.read(new File(source));
    } catch (Exception e) {
        e.printStackTrace();
    }

    String os = System.getProperty("os.name");
    String bitness = System.getProperty("sun.arch.data.model");

    if (os.toUpperCase().contains("WINDOWS")) {
        if (bitness.endsWith("64")) {
            lib = new File("C:\\Users\\POWERUSER\\Downloads\\opencv\\build\\java\\x64\\"
                    + System.mapLibraryName("opencv_java2413"));
        } else {
            lib = new File("libs//x86//" + System.mapLibraryName("opencv_java2413"));
        }
    }
    System.load(lib.getAbsolutePath());
    String tempObject = "images\\hero_templates\\" + template + ".png";
    String source_pic = source;
    Mat objectImage = Highgui.imread(tempObject, Highgui.CV_LOAD_IMAGE_GRAYSCALE);
    Mat sceneImage = Highgui.imread(source_pic, Highgui.CV_LOAD_IMAGE_GRAYSCALE);

    MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint();
    FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.SURF);
    featureDetector.detect(objectImage, objectKeyPoints);
    KeyPoint[] keypoints = objectKeyPoints.toArray();
    MatOfKeyPoint objectDescriptors = new MatOfKeyPoint();
    DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.SURF);
    descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors);

    // Create the matrix for output image.
    Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Highgui.CV_LOAD_IMAGE_COLOR);
    Scalar newKeypointColor = new Scalar(255, 0, 0);
    Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0);

    // Match object image with the scene image
    MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint();
    MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint();
    featureDetector.detect(sceneImage, sceneKeyPoints);
    descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors);

    Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Highgui.CV_LOAD_IMAGE_COLOR);
    Scalar matchestColor = new Scalar(0, 255, 25);

    List<MatOfDMatch> matches = new LinkedList<MatOfDMatch>();
    DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
    descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2);

    LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>();

    float nndrRatio = .78f;

    for (int i = 0; i < matches.size(); i++) {
        MatOfDMatch matofDMatch = matches.get(i);
        DMatch[] dmatcharray = matofDMatch.toArray();
        DMatch m1 = dmatcharray[0];
        DMatch m2 = dmatcharray[1];

        if (m1.distance <= m2.distance * nndrRatio) {
            goodMatchesList.addLast(m1);

        }
    }

    if (goodMatchesList.size() >= 4) {

        List<KeyPoint> objKeypointlist = objectKeyPoints.toList();
        List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList();

        LinkedList<Point> objectPoints = new LinkedList<>();
        LinkedList<Point> scenePoints = new LinkedList<>();

        for (int i = 0; i < goodMatchesList.size(); i++) {
            objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt);
            scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt);
        }

        MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f();
        objMatOfPoint2f.fromList(objectPoints);
        MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f();
        scnMatOfPoint2f.fromList(scenePoints);

        Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3);

        Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2);
        Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2);

        obj_corners.put(0, 0, new double[] { 0, 0 });
        obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 });
        obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() });
        obj_corners.put(3, 0, new double[] { 0, objectImage.rows() });

        Core.perspectiveTransform(obj_corners, scene_corners, homography);

        Mat img = Highgui.imread(source_pic, Highgui.CV_LOAD_IMAGE_COLOR);

        Core.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)),
                new Scalar(0, 255, 255), 4);
        Core.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)),
                new Scalar(255, 255, 0), 4);
        Core.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)),
                new Scalar(0, 255, 0), 4);
        Core.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)),
                new Scalar(0, 255, 0), 4);

        MatOfDMatch goodMatches = new MatOfDMatch();
        goodMatches.fromList(goodMatchesList);

        Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches,
                matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2);
        if (new Point(scene_corners.get(0, 0)).x < new Point(scene_corners.get(1, 0)).x
                && new Point(scene_corners.get(0, 0)).y < new Point(scene_corners.get(2, 0)).y) {
            System.out.println("found " + template);
            Highgui.imwrite("points.jpg", outputImage);
            Highgui.imwrite("matches.jpg", matchoutput);
            Highgui.imwrite("final.jpg", img);

            if (flag == 0) {
                ReturnValues retVal = null;
                int y = (int) new Point(scene_corners.get(3, 0)).y;
                int yHeight = (int) new Point(scene_corners.get(3, 0)).y
                        - (int) new Point(scene_corners.get(2, 0)).y;
                if (y < image.getHeight() * .6) { //if found hero is in upper half of image then return point 3,0
                    retVal = new ReturnValues(y + (int) (image.getHeight() * .01), yHeight);
                } else { //if found hero is in lower half of image then return point 2,0
                    y = (int) new Point(scene_corners.get(2, 0)).y;
                    retVal = new ReturnValues(y + (int) (image.getHeight() * .3), yHeight);
                }
                return retVal;
            } else if (flag == 1) {
                int[] xPoints = new int[4];
                int[] yPoints = new int[4];

                xPoints[0] = (int) (new Point(scene_corners.get(0, 0)).x);
                xPoints[1] = (int) (new Point(scene_corners.get(1, 0)).x);
                xPoints[2] = (int) (new Point(scene_corners.get(2, 0)).x);
                xPoints[3] = (int) (new Point(scene_corners.get(3, 0)).x);

                yPoints[0] = (int) (new Point(scene_corners.get(0, 0)).y);
                yPoints[1] = (int) (new Point(scene_corners.get(1, 0)).y);
                yPoints[2] = (int) (new Point(scene_corners.get(2, 0)).y);
                yPoints[3] = (int) (new Point(scene_corners.get(3, 0)).y);

                ReturnValues retVal = new ReturnValues(xPoints, yPoints);
                return retVal;

            }
        }
    }
    return null;

}

From source file:samples.LWF.java

private static void save_meshed_images(double[][] puntos, File carpetaalmacen, File image, Mat mat,
        int[][] delaunay_triangles) {
    Mat mat_copy = mat.clone();/*from  w  w  w .  ja va 2s. co  m*/
    int radii = 1000;
    for (double[] punto : puntos) {
        Imgproc.ellipse(mat_copy, new Point(punto), new Size(radii, radii), 0, 0, 0, new Scalar(0, 255, 0));
        //            Imgproc.line(mat_copy, null, null, null);
    }
    for (int[] tri : faceTemplateTriangles) {
        Imgproc.line(mat_copy, new Point(puntos[tri[0] - 1]), new Point(puntos[tri[1] - 1]),
                new Scalar(0, 255, 0));
        Imgproc.line(mat_copy, new Point(puntos[tri[1] - 1]), new Point(puntos[tri[2] - 1]),
                new Scalar(0, 255, 0));
        Imgproc.line(mat_copy, new Point(puntos[tri[2] - 1]), new Point(puntos[tri[0] - 1]),
                new Scalar(0, 255, 0));
    }
    Imgcodecs.imwrite(carpetaalmacen.getAbsolutePath() + "\\" + image.getName(), mat_copy);
}

From source file:simeav.Utils.java

public static ArrayList<Point> detectarVertices(Mat original) {
    MatOfPoint corners = new MatOfPoint();
    Imgproc.goodFeaturesToTrack(original, corners, 100, 0.01, 0, new Mat(), 2, false, 0.04);
    Mat vertices = Mat.zeros(original.size(), CvType.CV_8UC3);
    for (int i = 0; i < corners.height(); i++) {
        Core.circle(vertices, new Point(corners.get(i, 0)), 8, new Scalar(180, 170, 5), -1);
    }//from ww  w .  j a  va  2s  .co m

    Mat imGrises = new Mat();
    Mat bw = new Mat();
    Imgproc.cvtColor(vertices, imGrises, Imgproc.COLOR_BGR2GRAY);
    Imgproc.Canny(imGrises, bw, 100, 150, 5, true);

    Mat jerarquia = new Mat();
    ArrayList<MatOfPoint> contornos = new ArrayList<>();
    Imgproc.findContours(bw.clone(), contornos, jerarquia, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
    ArrayList<Point> mc = Utils.getCentros(contornos);
    Mat resultado = Mat.zeros(original.size(), CvType.CV_8UC3);
    for (int i = 0; i < contornos.size(); i++) {
        Scalar color = new Scalar(180, 170, 5);
        //            Imgproc.drawContours(resultado, contornos, i, color, 2, 8, jerarquia, 0, new Point());
        Core.circle(resultado, mc.get(i), 4, color, -1, 8, 0);
    }
    return mc;
}

From source file:View.Signature.java

public static int sift(String routeVal, String route, String n_img1, String n_img2, String extension) {

    String bookObject = routeVal + n_img2 + extension;
    String bookScene = route + n_img1 + extension;

    //System.out.println("Iniciando SIFT");
    //java.lang.System.out.print("Abriendo imagenes | ");
    Mat objectImage = Highgui.imread(bookObject, Highgui.CV_LOAD_IMAGE_COLOR);
    Mat sceneImage = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR);

    MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint();
    FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.SIFT);
    //java.lang.System.out.print("Encontrar keypoints con SIFT | ");  
    featureDetector.detect(objectImage, objectKeyPoints);
    KeyPoint[] keypoints = objectKeyPoints.toArray();

    MatOfKeyPoint objectDescriptors = new MatOfKeyPoint();
    DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.SIFT);
    //java.lang.System.out.print("Computando descriptores | ");  
    descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors);

    // Create the matrix for output image.   
    Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Highgui.CV_LOAD_IMAGE_COLOR);
    Scalar newKeypointColor = new Scalar(255, 0, 0);

    //java.lang.System.out.print("Dibujando keypoints en imagen base | ");  
    Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0);

    // Match object image with the scene image  
    MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint();
    MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint();
    //java.lang.System.out.print("Detectando keypoints en imagen base | ");
    featureDetector.detect(sceneImage, sceneKeyPoints);
    //java.lang.System.out.print("Computando descriptores en imagen base | ");
    descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors);

    Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Highgui.CV_LOAD_IMAGE_COLOR);
    Scalar matchestColor = new Scalar(0, 255, 0);

    List<MatOfDMatch> matches = new LinkedList<MatOfDMatch>();
    DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
    //java.lang.System.out.print("Encontrando matches entre imagenes | ");  
    descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2);

    //java.lang.System.out.println("Calculando buenos matches");
    LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>();

    float nndrRatio = 0.7f;
    java.lang.System.out.println(matches.size());
    for (int i = 0; i < matches.size(); i++) {
        MatOfDMatch matofDMatch = matches.get(i);
        DMatch[] dmatcharray = matofDMatch.toArray();
        DMatch m1 = dmatcharray[0];// w w w  .  ja  v a 2  s . c  o m
        DMatch m2 = dmatcharray[1];

        if (m1.distance <= m2.distance * nndrRatio) {
            goodMatchesList.addLast(m1);

        }
    }

    if (goodMatchesList.size() >= 7) {
        //java.lang.System.out.println("Match enontrado!!! Matches: "+goodMatchesList.size());
        //if(goodMatchesList.size()>max){

        //cambio = 1;
        //}    

        List<KeyPoint> objKeypointlist = objectKeyPoints.toList();
        List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList();

        LinkedList<Point> objectPoints = new LinkedList<>();
        LinkedList<Point> scenePoints = new LinkedList<>();

        for (int i = 0; i < goodMatchesList.size(); i++) {
            objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt);
            scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt);
        }

        MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f();
        objMatOfPoint2f.fromList(objectPoints);
        MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f();
        scnMatOfPoint2f.fromList(scenePoints);

        Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3);

        Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2);
        Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2);

        obj_corners.put(0, 0, new double[] { 0, 0 });
        obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 });
        obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() });
        obj_corners.put(3, 0, new double[] { 0, objectImage.rows() });

        //System.out.println("Transforming object corners to scene corners...");  
        Core.perspectiveTransform(obj_corners, scene_corners, homography);

        Mat img = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR);

        Core.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)),
                new Scalar(0, 255, 0), 4);
        Core.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)),
                new Scalar(0, 255, 0), 4);
        Core.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)),
                new Scalar(0, 255, 0), 4);
        Core.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)),
                new Scalar(0, 255, 0), 4);

        //java.lang.System.out.println("Dibujando imagen de coincidencias");
        MatOfDMatch goodMatches = new MatOfDMatch();
        goodMatches.fromList(goodMatchesList);

        Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches,
                matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2);
        String n_outputImage = route + "results\\" + n_img2 + "_outputImage_sift" + extension;
        String n_matchoutput = route + "results\\" + n_img2 + "_matchoutput_sift" + extension;
        String n_img = route + "results\\" + n_img2 + "_sift" + extension;
        Highgui.imwrite(n_outputImage, outputImage);
        Highgui.imwrite(n_matchoutput, matchoutput);
        //Highgui.imwrite(n_img, img);  
        java.lang.System.out.println(goodMatches.size().height);
        double result = goodMatches.size().height * 100 / matches.size();

        java.lang.System.out.println((int) result);
        //double result =goodMatches.size().height;
        if (result > 100) {
            return 100;
        } else if (result <= 100 && result > 85) {
            return 85;
        } else if (result <= 85 && result > 50) {
            return 50;
        } else if (result <= 50 && result > 25) {
            return 25;
        } else {
            return 0;
        }
    } else {
        //java.lang.System.out.println("Firma no encontrada");  
    }
    return 0;
    //System.out.println("Terminando SIFT");  
}

From source file:View.SignatureLib.java

public static int sift(String routeRNV, String routeAdherent) {

    String bookObject = routeAdherent;
    String bookScene = routeRNV;/*from  www.j a va2  s  . c o  m*/

    //System.out.println("Iniciando SIFT");
    //java.lang.System.out.print("Abriendo imagenes | ");
    Mat objectImage = Highgui.imread(bookObject, Highgui.CV_LOAD_IMAGE_COLOR);
    Mat sceneImage = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR);

    MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint();
    FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.SIFT);
    //java.lang.System.out.print("Encontrar keypoints con SIFT | ");  
    featureDetector.detect(objectImage, objectKeyPoints);
    KeyPoint[] keypoints = objectKeyPoints.toArray();

    MatOfKeyPoint objectDescriptors = new MatOfKeyPoint();
    DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.SIFT);
    //java.lang.System.out.print("Computando descriptores | ");  
    descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors);

    // Create the matrix for output image.   
    Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Highgui.CV_LOAD_IMAGE_COLOR);
    Scalar newKeypointColor = new Scalar(255, 0, 0);

    //java.lang.System.out.print("Dibujando keypoints en imagen base | ");  
    Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0);

    // Match object image with the scene image  
    MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint();
    MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint();
    //java.lang.System.out.print("Detectando keypoints en imagen base | ");
    featureDetector.detect(sceneImage, sceneKeyPoints);
    //java.lang.System.out.print("Computando descriptores en imagen base | ");
    descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors);

    Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Highgui.CV_LOAD_IMAGE_COLOR);
    Scalar matchestColor = new Scalar(0, 255, 0);

    List<MatOfDMatch> matches = new LinkedList<MatOfDMatch>();
    DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
    //java.lang.System.out.println(sceneDescriptors);  

    if (sceneDescriptors.empty()) {
        java.lang.System.out.println("Objeto no encontrado");
        return 0;
    }

    descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2);

    //java.lang.System.out.println("Calculando buenos matches");
    LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>();

    float nndrRatio = 0.7f;

    for (int i = 0; i < matches.size(); i++) {
        MatOfDMatch matofDMatch = matches.get(i);
        DMatch[] dmatcharray = matofDMatch.toArray();
        DMatch m1 = dmatcharray[0];
        DMatch m2 = dmatcharray[1];

        if (m1.distance <= m2.distance * nndrRatio) {
            goodMatchesList.addLast(m1);

        }
    }

    if (goodMatchesList.size() >= 7) {
        max = goodMatchesList.size();

        List<KeyPoint> objKeypointlist = objectKeyPoints.toList();
        List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList();

        LinkedList<Point> objectPoints = new LinkedList<>();
        LinkedList<Point> scenePoints = new LinkedList<>();

        for (int i = 0; i < goodMatchesList.size(); i++) {
            objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt);
            scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt);
        }

        MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f();
        objMatOfPoint2f.fromList(objectPoints);
        MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f();
        scnMatOfPoint2f.fromList(scenePoints);

        Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3);

        Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2);
        Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2);

        obj_corners.put(0, 0, new double[] { 0, 0 });
        obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 });
        obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() });
        obj_corners.put(3, 0, new double[] { 0, objectImage.rows() });

        //System.out.println("Transforming object corners to scene corners...");  
        Core.perspectiveTransform(obj_corners, scene_corners, homography);

        Mat img = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR);

        Core.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)),
                new Scalar(0, 255, 0), 4);
        Core.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)),
                new Scalar(0, 255, 0), 4);
        Core.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)),
                new Scalar(0, 255, 0), 4);
        Core.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)),
                new Scalar(0, 255, 0), 4);

        //java.lang.System.out.println("Dibujando imagen de coincidencias");
        MatOfDMatch goodMatches = new MatOfDMatch();
        goodMatches.fromList(goodMatchesList);

        Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches,
                matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2);

        String n_outputImage = "../pre/outputImage_sift.jpg";
        String n_matchoutput = "../pre/matchoutput_sift.jpg";
        String n_img = "../pre/sift.jpg";
        Highgui.imwrite(n_outputImage, outputImage);
        Highgui.imwrite(n_matchoutput, matchoutput);
        Highgui.imwrite(n_img, img);
        java.lang.System.out.println(goodMatches.size().height);
        double result = goodMatches.size().height;//*100/matches.size();
        int score = 0;
        if (result > 26) {
            score = 100;
        } else if (result <= 26 && result > 22) {
            score = 85;
        } else if (result <= 22 && result > 17) {
            score = 50;
        } else if (result <= 17 && result > 11) {
            score = 25;
        } else {
            score = 0;
        }
        java.lang.System.out.println("Score: " + score);
        return score;
    } else {
        java.lang.System.out.println("Objeto no encontrado");
        return 0;
    }
    //System.out.println("Terminando SIFT");  
}

From source file:vinylsleevedetection.Analyze.java

public void Check() {
    count = 1;// ww  w .  j a v a2  s.c o  m
    //load openCV library
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    //for loop to compare source images to user image
    for (int j = 1; j < 4; j++) {
        //source image location (record sleeve)
        String Object = "E:\\Users\\Jamie\\Documents\\NetBeansProjects\\VinylSleeveDetection\\Source\\" + j
                + ".jpg";
        //user image location
        String Scene = "E:\\Users\\Jamie\\Documents\\NetBeansProjects\\VinylSleeveDetection\\Output\\camera.jpg";
        //load images
        Mat objectImage = Imgcodecs.imread(Object, Imgcodecs.CV_LOAD_IMAGE_COLOR);
        Mat sceneImage = Imgcodecs.imread(Scene, Imgcodecs.CV_LOAD_IMAGE_COLOR);
        //use BRISK feature detection
        MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint();
        FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.BRISK);
        //perform feature detection on source image
        featureDetector.detect(objectImage, objectKeyPoints);
        KeyPoint[] keypoints = objectKeyPoints.toArray();
        //use descriptor extractor
        MatOfKeyPoint objectDescriptors = new MatOfKeyPoint();
        DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.BRISK);
        descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors);

        Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
        Scalar newKeypointColor = new Scalar(255, 0, 0);

        Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0);

        MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint();
        MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint();
        featureDetector.detect(sceneImage, sceneKeyPoints);
        descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors);

        Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Imgcodecs.CV_LOAD_IMAGE_COLOR);
        Scalar matchestColor = new Scalar(0, 255, 0);

        List<MatOfDMatch> matches = new LinkedList<>();
        DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE);
        descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2);

        LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>();

        float nndrRatio = 0.7f;

        for (int i = 0; i < matches.size(); i++) {
            MatOfDMatch matofDMatch = matches.get(i);
            DMatch[] dmatcharray = matofDMatch.toArray();
            DMatch m1 = dmatcharray[0];
            DMatch m2 = dmatcharray[1];

            if (m1.distance <= m2.distance * nndrRatio) {
                goodMatchesList.addLast(m1);

            }
        }
        //if the number of good mathces is more than 150 a match is found
        if (goodMatchesList.size() > 150) {
            System.out.println("Object Found");

            List<KeyPoint> objKeypointlist = objectKeyPoints.toList();
            List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList();

            LinkedList<Point> objectPoints = new LinkedList<>();
            LinkedList<Point> scenePoints = new LinkedList<>();

            for (int i = 0; i < goodMatchesList.size(); i++) {
                objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt);
                scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt);
            }

            MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f();
            objMatOfPoint2f.fromList(objectPoints);
            MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f();
            scnMatOfPoint2f.fromList(scenePoints);

            Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3);

            Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2);
            Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2);

            obj_corners.put(0, 0, new double[] { 0, 0 });
            obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 });
            obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() });
            obj_corners.put(3, 0, new double[] { 0, objectImage.rows() });

            Core.perspectiveTransform(obj_corners, scene_corners, homography);

            Mat img = Imgcodecs.imread(Scene, Imgcodecs.CV_LOAD_IMAGE_COLOR);
            //draw a green square around the matched object
            Imgproc.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)),
                    new Scalar(0, 255, 0), 10);
            Imgproc.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)),
                    new Scalar(0, 255, 0), 10);
            Imgproc.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)),
                    new Scalar(0, 255, 0), 10);
            Imgproc.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)),
                    new Scalar(0, 255, 0), 10);

            MatOfDMatch goodMatches = new MatOfDMatch();
            goodMatches.fromList(goodMatchesList);

            Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches,
                    matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2);
            //output image with match, image of the match locations and keypoints image
            String folder = "E:\\Users\\Jamie\\Documents\\NetBeansProjects\\VinylSleeveDetection\\Output\\";
            Imgcodecs.imwrite(folder + "outputImage.jpg", outputImage);
            Imgcodecs.imwrite(folder + "matchoutput.jpg", matchoutput);
            Imgcodecs.imwrite(folder + "found.jpg", img);
            count = j;
            break;
        } else {
            System.out.println("Object Not Found");
            count = 0;
        }

    }

}