Example usage for org.opencv.core Point Point

List of usage examples for org.opencv.core Point Point

Introduction

In this page you can find the example usage for org.opencv.core Point Point.

Prototype

public Point(double x, double y) 

Source Link

Usage

From source file:ch.zhaw.facerecognitionlibrary.PreProcessor.StandardPreprocessing.EyeAlignment.java

License:Open Source License

public PreProcessor preprocessImage(PreProcessor preProcessor) {
    List<Mat> images = preProcessor.getImages();
    List<Mat> processed = new ArrayList<Mat>();
    preProcessor.setEyes();//from   ww  w  .  j  a va2 s .  c  om
    Eyes[] eyes = preProcessor.getEyes();
    if (eyes == null || eyes[0] == null) {
        return null;
    }
    for (int i = 0; i < images.size(); i++) {
        Mat img = images.get(i);
        Eyes eye = eyes[i];
        double desiredLen = (DESIRED_LEFT_EYE_X - DESIRED_RIGHT_EYE_X) * img.cols();
        double scale = 0.9 * desiredLen / eye.getDist();
        MatOfFloat leftCenter = eye.getLeftCenter();
        MatOfFloat rightCenter = eye.getRightCenter();
        double centerX = ((leftCenter.get(0, 0)[0] + rightCenter.get(0, 0)[0]) / 2);
        double centerY = ((leftCenter.get(1, 0)[0] + rightCenter.get(1, 0)[0]) / 2);
        Mat rotMat = Imgproc.getRotationMatrix2D(new Point(centerX, centerY), eye.getAngle(), scale);
        rotMat.put(2, 0, img.cols() * 0.5 - centerX);
        rotMat.put(2, 1, img.rows() * DESIRED_RIGHT_EYE_Y - centerY);
        Imgproc.warpAffine(img, img, rotMat, new Size(img.cols(), img.rows()));
        processed.add(img);
    }
    preProcessor.setImages(processed);
    return preProcessor;
}

From source file:Clases.Segmentador.java

public Mat eriosionarImg(Mat umbralizada) {
    Mat dste = umbralizada.clone();/* w w w . ja va2 s  .co  m*/
    int erosion_size = 5;
    Size s = new Size(2 * erosion_size + 1, 2 * erosion_size + 1);
    Point p = new Point(erosion_size, erosion_size);
    Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, s, p);
    Imgproc.erode(umbralizada, dste, element);
    return dste;
}

From source file:Clases.Segmentador.java

public Mat dilatarImg(Mat umbralizada) {
    Mat dstd = umbralizada.clone();/*from   w  ww  .j  a  v  a  2s.  co  m*/
    int dilatacion_size = 5;
    Size sd = new Size(2 * dilatacion_size + 1, 2 * dilatacion_size + 1);
    Point pd = new Point(dilatacion_size, dilatacion_size);
    Mat elementd = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, sd, pd);
    Imgproc.dilate(umbralizada, dstd, elementd);
    return dstd;
}

From source file:Clases.Segmentador.java

public double[] TransfHough(Mat edges, int inv, int distMinCir, int umbMin, int umbMax, int radMin,
        int radMax) {

    Mat circles = new Mat();
    double[] data = null;
    //Vector<Mat> circlesList = new Vector<Mat>();
    Imgproc.HoughCircles(edges, circles, Imgproc.CV_HOUGH_GRADIENT, inv, distMinCir, umbMin, umbMax, radMin,
            radMax);/* w w  w .j  a v  a 2s .co  m*/
    //Imgproc.HoughCircles(edges, circles, Imgproc.CV_HOUGH_GRADIENT, 1, 200, 1, 10, 20, 40  );
    //Imgproc.HoughCircles(edges, circles, Imgproc.CV_HOUGH_GRADIENT, 1, 50) ;

    System.out.println("#rows " + circles.rows() + " #cols " + circles.cols());
    double x = 0.0;
    double y = 0.0;
    int r = 0;

    for (int i = 0; i < circles.rows(); i++) {
        data = circles.get(i, 0);
        for (int j = 0; j < data.length; j++) {
            x = data[0];
            y = data[1];
            r = (int) data[2];
        }
        Point center = new Point(x, y);

        // circle center
        //            Core.circle(color, center, 1, new Scalar(0, 0, 0), -1);
        // circle outline
        //          Core.circle(color, center, r, new Scalar(0, 255, 0), 1);
        //Ventana ventana8 = new Ventana(convertir((color)),0,2);
        //ventana8.setTitle("Houg");
        /*
         Rect bbox = new Rect((int)Math.abs(x-r), (int)Math.abs(y-r), (int)2*r, (int)2*r);
         Mat croped_image = new Mat(color, bbox);
         Imgproc.resize(croped_image, croped_image, new Size(200,200));
         circlesList.add(croped_image);
         Ventana ventana9 = new Ventana(convertir(croped_image),1,2);
         */
    }

    return data;
}

From source file:classes.BlobsFinder.java

public void findBlobContours() {

    Mat grayImage = new Mat();
    Imgproc.cvtColor(image, grayImage, Imgproc.COLOR_BGR2GRAY);
    ImageUtils.saveImage(grayImage, outImageName + "_grayImage.png", request);

    Mat gaussianImage = new Mat();
    Imgproc.GaussianBlur(grayImage, gaussianImage, new Size(0, 0), 3);
    Core.addWeighted(grayImage, 1.5, gaussianImage, -1, 0, gaussianImage);
    ImageUtils.saveImage(gaussianImage, outImageName + "_gaussianGrayImage.png", request);

    Mat binaryImage = new Mat();
    Imgproc.adaptiveThreshold(gaussianImage, binaryImage, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C,
            Imgproc.THRESH_BINARY_INV, 15, 4);
    ImageUtils.saveImage(binaryImage, outImageName + "_binaryImage.png", request);

    Mat erodedImage = new Mat();

    binaryImage.copyTo(erodedImage);//from ww  w .  j av a 2  s . co m

    Mat structuringElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 3));
    Point anchor = new Point(-1, -1);

    Imgproc.morphologyEx(erodedImage, erodedImage, Imgproc.MORPH_CLOSE, structuringElement, anchor, 1);
    ImageUtils.saveImage(erodedImage, outImageName + "_erodedImage.png", request);

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();

    Imgproc.findContours(erodedImage, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

    Mat originalContoursImage = new Mat(image.size(), CvType.CV_8UC1, new Scalar(0));
    Scalar contourColor = new Scalar(255);
    int thickness = -1; // Thicknes should be lower than zero in order to drawn the filled contours
    Imgproc.drawContours(originalContoursImage, contours, -1, contourColor, thickness); // Drawing all the contours found
    ImageUtils.saveImage(originalContoursImage, outImageName + "_originalContoursImage.png", request);

    Mat erodedContoursImage = new Mat();
    Imgproc.erode(originalContoursImage, erodedContoursImage, structuringElement, anchor, 1);
    ImageUtils.saveImage(erodedContoursImage, outImageName + "_erodedContoursImage.png", request);

    ArrayList<MatOfPoint> finalContours = new ArrayList<MatOfPoint>();
    Mat finalContourImage = new Mat(image.size(), CvType.CV_8UC1, new Scalar(0));
    Imgproc.findContours(erodedContoursImage, finalContours, new Mat(), Imgproc.RETR_EXTERNAL,
            Imgproc.CHAIN_APPROX_SIMPLE);

    for (int i = 0; i < finalContours.size(); i++) {
        MatOfPoint currentContour = finalContours.get(i);
        double area = Imgproc.contourArea(currentContour);
        if (area > MIN_AREA) {

            validContours.add(currentContour);

            String fabricPath = generateFabricPathString(currentContour);
            contourPaths.add(fabricPath);

            Rect boundingRect = Imgproc.boundingRect(currentContour);
            topLeftCorners.add(boundingRect.tl());

            contoursAreas.add(area);
        }
    }

    // Drawing ALL the valid contours
    Imgproc.drawContours(finalContourImage, validContours, -1, contourColor, thickness);
    ImageUtils.saveImage(finalContourImage, outImageName + "_finalContourImage.png", request);

}

From source file:classes.FaceDetector.java

public static String detectFace(String filePath) {

    //        String dirName = "C:/Users/ggm/Documents/NetBeansProjects/MyWebApplication";
    String dirName = "C:/Users/Gonzalo/Documents/NetBeansProjects/MyWebApplication";
    //        String dirName = "/Users/ggmendez/Development/MyWebApplication";

    System.out.println(dirName);/*from   w w  w .j  av a2 s.  c o  m*/

    String frontalfaceFile = dirName + "/data/lbpcascades/lbpcascade_frontalface.xml";

    System.out.println(frontalfaceFile);

    CascadeClassifier faceDetector = new CascadeClassifier(frontalfaceFile);

    Mat image = Highgui.imread(filePath);

    System.out.println(image);

    // Detect faces in the image 
    // MatOfRect is a special container class for Rect.
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);

    System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));

    // Draw a bounding box around each face.
    for (Rect rect : faceDetections.toArray()) {
        Core.rectangle(image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height),
                new Scalar(0, 255, 0));
    }

    // Save the visualized detection.            
    Date date = new Date();
    Format formatter = new SimpleDateFormat("YYYY-MM-dd_hh-mm-ss");
    String filename = dirName + "/imgs/out_" + formatter.format(date) + ".png";

    System.out.println(String.format("Writing %s", filename));
    Highgui.imwrite(filename, image);

    Gson gson = new Gson();
    String jsonResponse = gson.toJson(faceDetections.toArray());
    jsonResponse = jsonResponse.replaceAll("x", "left").replaceAll("y", "top");

    return jsonResponse;

}

From source file:classes.FloodFiller.java

private void fillFrom(Point seed, int lo, int up, Scalar backgroundColor, Scalar contourFillingColor) {

    Mat object = ObjectGenerator.extract(image, seed.x, seed.y, 10, 10);
    this.meanColor = Core.mean(object);

    Rect ccomp = new Rect();
    Mat mask = Mat.zeros(image.rows() + 2, image.cols() + 2, CvType.CV_8UC1);

    int connectivity = 4;
    int newMaskVal = 255;
    int ffillMode = 1;

    int flags = connectivity + (newMaskVal << 8) + (ffillMode == 1 ? Imgproc.FLOODFILL_FIXED_RANGE : 0);

    Scalar newVal = new Scalar(0.299, 0.587, 0.114);

    Imgproc.threshold(mask, mask, 1, 128, Imgproc.THRESH_BINARY);

    filledArea = Imgproc.floodFill(image.clone(), mask, seed, newVal, ccomp, new Scalar(lo, lo, lo),
            new Scalar(up, up, up), flags);

    //        Highgui.imwrite("mask.png", mask);
    ImageUtils.saveImage(mask, "mask.png", request);

    morphologicalImage = new Mat(image.size(), CvType.CV_8UC3);

    Mat element = new Mat(3, 3, CvType.CV_8U, new Scalar(1));

    ArrayList<Mat> mask3 = new ArrayList<Mat>();
    mask3.add(mask);/*from   www.java 2s .c o  m*/
    mask3.add(mask);
    mask3.add(mask);
    Core.merge(mask3, mask);

    // Applying morphological filters
    Imgproc.erode(mask, morphologicalImage, element);
    Imgproc.morphologyEx(morphologicalImage, morphologicalImage, Imgproc.MORPH_CLOSE, element,
            new Point(-1, -1), 9);
    Imgproc.morphologyEx(morphologicalImage, morphologicalImage, Imgproc.MORPH_OPEN, element, new Point(-1, -1),
            2);
    Imgproc.resize(morphologicalImage, morphologicalImage, image.size());

    //        Highgui.imwrite("morphologicalImage.png", morphologicalImage);
    ImageUtils.saveImage(morphologicalImage, "morphologicalImage.png", request);

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();

    Core.split(mask, mask3);
    Mat binarymorphologicalImage = mask3.get(0);

    Imgproc.findContours(binarymorphologicalImage.clone(), contours, new Mat(), Imgproc.RETR_EXTERNAL,
            Imgproc.CHAIN_APPROX_NONE);

    contoursImage = new Mat(image.size(), CvType.CV_8UC3, backgroundColor);

    int thickness = -1; // Thicknes should be lower than zero in order to drawn the filled contours
    Imgproc.drawContours(contoursImage, contours, -1, contourFillingColor, thickness); // Drawing all the contours found
    //        Highgui.imwrite("allContoursImage.png", contoursImage);
    ImageUtils.saveImage(contoursImage, "allContoursImage.png", request);

    if (contours.size() > 1) {

        int minContourWith = 20;
        int minContourHeight = 20;
        int maxContourWith = 6400 / 2;
        int maxContourHeight = 4800 / 2;

        contours = filterContours(contours, minContourWith, minContourHeight, maxContourWith, maxContourHeight);
    }

    if (contours.size() > 0) {

        MatOfPoint biggestContour = contours.get(0); // getting the biggest contour
        contourArea = Imgproc.contourArea(biggestContour);

        if (contours.size() > 1) {
            biggestContour = Collections.max(contours, new ContourComparator()); // getting the biggest contour in case there are more than one
        }

        Point[] points = biggestContour.toArray();
        path = "M " + (int) points[0].x + " " + (int) points[0].y + " ";
        for (int i = 1; i < points.length; ++i) {
            Point v = points[i];
            path += "L " + (int) v.x + " " + (int) v.y + " ";
        }
        path += "Z";

        biggestContourImage = new Mat(image.size(), CvType.CV_8UC3, backgroundColor);

        Imgproc.drawContours(biggestContourImage, contours, 0, contourFillingColor, thickness);

        //            Highgui.imwrite("biggestContourImage.png", biggestContourImage);
        ImageUtils.saveImage(biggestContourImage, "biggestContourImage.png", request);

        Mat maskForColorExtraction = biggestContourImage.clone();

        if (isWhite(backgroundColor)) {
            Imgproc.dilate(maskForColorExtraction, maskForColorExtraction, new Mat(), new Point(-1, -1), 3);
        } else {
            Imgproc.erode(maskForColorExtraction, maskForColorExtraction, new Mat(), new Point(-1, -1), 3);
        }

        //            Highgui.imwrite("maskForColorExtraction.png", maskForColorExtraction);
        ImageUtils.saveImage(maskForColorExtraction, "maskForColorExtraction.png", request);

        Mat extractedColor = new Mat();

        if (isBlack(backgroundColor) && isWhite(contourFillingColor)) {
            Core.bitwise_and(maskForColorExtraction, image, extractedColor);

        } else {
            Core.bitwise_or(maskForColorExtraction, image, extractedColor);
        }

        //            Highgui.imwrite("extractedColor.png", extractedColor);
        ImageUtils.saveImage(extractedColor, "extractedColor.png", request);

        computedSearchWindow = Imgproc.boundingRect(biggestContour);
        topLeftCorner = computedSearchWindow.tl();

        Rect croppingRect = new Rect(computedSearchWindow.x, computedSearchWindow.y,
                computedSearchWindow.width - 1, computedSearchWindow.height - 1);

        Mat imageForTextRecognition = new Mat(extractedColor.clone(), croppingRect);
        //            Highgui.imwrite(outImageName, imageForTextRecognition);
        ImageUtils.saveImage(imageForTextRecognition, outImageName, request);

        //            
        //
        //            Mat data = new Mat(imageForTextRecognition.size(), CvType.CV_8UC3, backgroundColor);
        //            imageForTextRecognition.copyTo(data);
        //            data.convertTo(data, CvType.CV_8UC3);
        //
        //            // The meanColor variable represents the color in the GBR space, the following line transforms this to the RGB color space, which
        //            // is assumed in the prepareImage method of the TextRecognitionPreparer class
        //            Scalar userColor = new Scalar(meanColor.val[2], meanColor.val[1], meanColor.val[0]);
        //
        //            ArrayList<String> recognizableImageNames = TextRecognitionPreparer.generateRecognizableImagesNames(data, backgroundColor, userColor);
        //            for (String imageName : recognizableImageNames) {
        //
        //                try {
        //                    // First recognition step
        //                    String recognizedText = TextRecognizer.recognize(imageName, true).trim();
        //                    if (recognizedText != null && !recognizedText.isEmpty()) {
        //                        recognizedStrings.add(recognizedText);
        //                    }
        //                    // Second recognition step
        //                    recognizedText = TextRecognizer.recognize(imageName, false).trim();
        //                    if (recognizedText != null && !recognizedText.isEmpty()) {
        //                        recognizedStrings.add(recognizedText);
        //                    }
        //                    
        //                } catch (Exception e) {
        //                }
        //            }
        //            
        ////            ArrayList<BufferedImage> recognizableBufferedImages = TextRecognitionPreparer.generateRecognizableBufferedImages(data, backgroundColor, userColor);
        ////            for (BufferedImage bufferedImage : recognizableBufferedImages) {
        ////                try {
        ////                    // First recognition step
        ////                    String recognizedText = TextRecognizer.recognize(bufferedImage, true).trim();
        ////                    if (recognizedText != null && !recognizedText.isEmpty()) {
        ////                        recognizedStrings.add(recognizedText);
        ////                    }
        ////                    // Second recognition step
        ////                    recognizedText = TextRecognizer.recognize(bufferedImage, false).trim();
        ////                    if (recognizedText != null && !recognizedText.isEmpty()) {
        ////                        recognizedStrings.add(recognizedText);
        ////                    }
        ////                    
        ////                } catch (Exception e) {
        ////                }
        ////            }
        //
        //            
        //            

        // compute all moments
        Moments mom = Imgproc.moments(biggestContour);
        massCenter = new Point(mom.get_m10() / mom.get_m00(), mom.get_m01() / mom.get_m00());

        // draw black dot
        Core.circle(contoursImage, massCenter, 4, contourFillingColor, 8);
    }

}

From source file:classes.FloodFillFacade.java

public Mat fill(Mat image, Mat mask, int x, int y, Scalar newVal) {

    Point seedPoint = new Point(x, y);

    Rect rect = new Rect();

    //        Scalar newVal = isColored() ? new Scalar(b, g, r) : new Scalar(r * 0.299 + g * 0.587 + b * 0.114);
    Scalar lowerDifference = new Scalar(lowerDiff, lowerDiff, lowerDiff);
    Scalar upperDifference = new Scalar(upperDiff, upperDiff, upperDiff);
    if (range == NULL_RANGE) {
        lowerDifference = new Scalar(0, 0, 0);
        upperDifference = new Scalar(0, 0, 0);
    }/*from  w ww  .jav  a 2 s .  c o m*/
    int flags = connectivity + (newMaskVal << 8)
            + ((range == FIXED_RANGE ? Imgproc.FLOODFILL_FIXED_RANGE : 0) | 0);//Imgproc.FLOODFILL_MASK_ONLY);
    int area = 0;
    if (masked) {
        area = Imgproc.floodFill(image, mask, seedPoint, newVal, rect, lowerDifference, upperDifference, flags);
    } else {
        area = Imgproc.floodFill(image, new Mat(), seedPoint, newVal, rect, lowerDifference, upperDifference,
                flags);
    }

    //        Highgui.imwrite("C:\\Users\\Gonzalo\\Documents\\NetBeansProjects\\iVoLVER\\uploads\\image_after_flood_" + cont + ".png", image);
    //        Highgui.imwrite("C:\\Users\\Gonzalo\\Documents\\NetBeansProjects\\iVoLVER\\uploads\\mask_" + cont + ".png", mask);

    //        System.out.println("area: " + area);

    cont++;

    return image;

}

From source file:classes.ObjectFinder.java

private void applyMorphologicalFilters() {
    Mat element = new Mat(3, 3, CvType.CV_8U, new Scalar(1));
    Imgproc.erode(thresholdedBackprojection, morphologicalImage, element);
    Imgproc.morphologyEx(morphologicalImage, morphologicalImage, Imgproc.MORPH_CLOSE, element,
            new Point(-1, -1), 2);
    Imgproc.morphologyEx(morphologicalImage, morphologicalImage, Imgproc.MORPH_OPEN, element, new Point(-1, -1),
            2);/* www .  ja  va2 s  . co  m*/
}

From source file:classes.ObjectFinder.java

private void computeSearchWindow() {

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();

    // a vector of contours
    // retrieve the external contours
    // all pixels of each contours    
    Imgproc.findContours(this.morphologicalImage.clone(), contours, new Mat(), Imgproc.RETR_EXTERNAL,
            Imgproc.CHAIN_APPROX_NONE);//from   w w  w . j  a  va2  s  .co  m

    // Draw black contours on a white image
    this.contoursImage = new Mat(morphologicalImage.size(), CvType.CV_8U, new Scalar(255));

    if (contours.size() > 1) {

        int minContourWith = 20;
        int minContourHeight = 20;
        int maxContourWith = 6400 / 2;
        int maxContourHeight = 4800 / 2;

        contours = filterContours(contours, minContourWith, minContourHeight, maxContourWith, maxContourHeight);
    }

    if (contours.size() > 1) {
        Collections.sort(contours, new ContourComparator()); // Sorttig the contours to take ONLY the bigger one
    }

    computedSearchWindow = new Rect();
    massCenter = new Point(-1, -1);

    if (contours.size() > 0) {

        this.firstContour = contours.get(0);

        Mat contournedImage = this.firstContour;

        // draw all contours in black with a thickness of 2
        Scalar color = new Scalar(0);
        int thickness = 2;
        Imgproc.drawContours(contoursImage, contours, 0, color, thickness); //

        // testing the bounding box
        computedSearchWindow = Imgproc.boundingRect(this.firstContour);

        topLeftCorner = computedSearchWindow.tl();

        // compute all moments
        Moments mom = Imgproc.moments(contournedImage);

        massCenter = new Point(mom.get_m10() / mom.get_m00(), mom.get_m01() / mom.get_m00());

        // draw black dot
        Core.circle(contoursImage, massCenter, 4, color, 8);
    }
}