Example usage for org.opencv.core Point Point

List of usage examples for org.opencv.core Point Point

Introduction

In this page you can find the example usage for org.opencv.core Point Point.

Prototype

public Point(double x, double y) 

Source Link

Usage

From source file:org.vinesrobotics.bot.utils.opencv.ColorBlobDetector.java

License:Open Source License

public void process(Mat rgbaImage) {
    Imgproc.pyrDown(rgbaImage, mPyrDownMat);
    Imgproc.pyrDown(mPyrDownMat, mPyrDownMat);

    Imgproc.cvtColor(mPyrDownMat, mHsvMat, Imgproc.COLOR_RGB2HSV_FULL);

    Core.inRange(mHsvMat, mLowerBound, mUpperBound, mMask);
    Imgproc.dilate(mMask, mDilatedMask, new Mat());

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();

    Imgproc.findContours(mDilatedMask, contours, mHierarchy, Imgproc.RETR_EXTERNAL,
            Imgproc.CHAIN_APPROX_SIMPLE);

    // Filter contours by area and resize to fit the original image size
    double maxArea = 0;
    Iterator<MatOfPoint> each = contours.iterator();

    while (each.hasNext()) {
        MatOfPoint wrapper = each.next();
        double area = Imgproc.contourArea(wrapper);
        if (area > maxArea)
            maxArea = area;//from   w w w.j av a  2s  .  co m
    }

    mContours.clear();
    each = contours.iterator();
    while (each.hasNext()) {
        MatOfPoint contour = each.next();
        if (Imgproc.contourArea(contour) > mMinContourArea * maxArea) {
            Core.multiply(contour, new Scalar(4, 4), contour);
            mContours.add(contour);
        }
    }

    Imgproc.drawContours(rgbaImage, mContours, -1, mBaseColor);

    each = mContours.iterator();
    ArrayList<Point> centers = new ArrayList<>();
    while (each.hasNext()) {
        MatOfPoint wrapper = each.next();
        Moments moments = Imgproc.moments(wrapper);
        centers.add(new Point(moments.m10 / moments.m00, moments.m01 / moments.m00));
    }
    colorCenterPoints = centers;

    Point avg = new Point(0, 0);
    for (Point p : centers)
        avg.set(new double[] { avg.x + p.x, avg.y + p.y });
    avg.set(new double[] { avg.x / centers.size(), avg.y / centers.size() });
    centerOfAll = avg;

}

From source file:qupath.opencv.features.DelaunayTriangulation.java

License:Open Source License

void computeDelaunay(final List<PathObject> pathObjectList, final double pixelWidth, final double pixelHeight) {

    if (pathObjectList.size() <= 2)
        return;// ww  w .j  a  v  a2  s .  co m

    this.vertexMap = new HashMap<>(pathObjectList.size(), 1f);

    // Extract the centroids
    double minX = Double.POSITIVE_INFINITY;
    double minY = Double.POSITIVE_INFINITY;
    double maxX = Double.NEGATIVE_INFINITY;
    double maxY = Double.NEGATIVE_INFINITY;
    List<Point> centroids = new ArrayList<>(pathObjectList.size());
    for (PathObject pathObject : pathObjectList) {
        ROI pathROI = null;

        // First, try to get a nucleus ROI if we have a cell - otherwise just get the normal ROI
        pathROI = getROI(pathObject);

        // Check if we have a ROI at all
        if (pathROI == null) {
            centroids.add(null);
            continue;
        }
        double x = pathROI.getCentroidX();
        double y = pathROI.getCentroidY();
        if (Double.isNaN(x) || Double.isNaN(y)) {
            centroids.add(null);
            continue;
        }
        if (x < minX)
            minX = x;
        else if (x > maxX)
            maxX = x;
        if (y < minY)
            minY = y;
        else if (y > maxY)
            maxY = y;

        centroids.add(new Point(x, y));
    }

    // Create Delaunay triangulation, updating vertex map
    Subdiv2D subdiv = new Subdiv2D2();
    Rect bounds = new Rect((int) minX - 1, (int) minY - 1, (int) (maxX - minX) + 100,
            (int) (maxY - minY) + 100);
    subdiv.initDelaunay(bounds);
    for (int i = 0; i < centroids.size(); i++) {
        Point p = centroids.get(i);
        if (p == null)
            continue;
        int v = subdiv.insert(p);
        vertexMap.put(v, pathObjectList.get(i));
    }

    updateNodeMap(subdiv, pixelWidth, pixelHeight);

    //      // Connect only the closest paired nodes
    //      Map<DelaunayNode, Double> medianDistances = new HashMap<>();
    //      for (DelaunayNode node : nodeMap.values()) {
    //         medianDistances.put(node, node.medianDistance());
    //      }
    //      
    //      for (DelaunayNode node : nodeMap.values()) {
    //         if (node.nNeighbors() <= 2)
    //            continue;
    //         double distance = medianDistances.get(node);
    //         Iterator<DelaunayNode> iter = node.nodeList.iterator();
    //         while (iter.hasNext()) {
    //            DelaunayNode node2 = iter.next();
    //            if (distance(node, node2) >= distance) {
    //               node2.nodeList.remove(node);
    //               iter.remove();
    //            }
    //         }
    //      }

    //      // Optionally require a minimum number of connected nodes
    //      List<DelaunayNode> toRemove = new ArrayList<>();
    //      for (DelaunayNode node : nodeMap.values()) {
    //         if (node.nNeighbors() <= 2) {
    //            toRemove.add(node);
    //         }
    //      }
    //      for (DelaunayNode node : toRemove) {
    //         for (DelaunayNode node2 : node.nodeList)
    //            node2.nodeList.remove(node);
    //         node.nodeList.clear();
    //      }
    //      for (DelaunayNode node : nodeMap.values()) {
    //         node.ensureDistancesUpdated();
    //         node.ensureTrianglesCalculated();
    //      }
}

From source file:qupath.opencv.processing.OpenCVTools.java

License:Open Source License

public static void labelImage(Mat matBinary, Mat matLabels, int contourType) {
    List<MatOfPoint> contours = new ArrayList<>();
    Mat hierarchy = new Mat();
    Imgproc.findContours(matBinary, contours, hierarchy, contourType, Imgproc.CHAIN_APPROX_SIMPLE);
    // It's convoluted, but drawing contours this way is *much* faster than passing the full list (which is copied by the OpenCV 2.4.9 Java code)
    List<MatOfPoint> temp = new ArrayList<>(1);
    int i = 2;//from w ww.  jav  a 2  s  .  co  m
    int ind = 0;
    for (MatOfPoint contour : contours) {
        temp.clear();
        temp.add(contour);
        Imgproc.drawContours(matLabels, temp, 0, new Scalar(i++), -1, 8, hierarchy.col(ind), 2,
                new Point(0, 0));
        //         Imgproc.drawContours(matLabels, temp, 0, new Scalar(i++), -1);
        ind++;
    }
}

From source file:qupath.opencv.processing.OpenCVTools.java

License:Open Source License

public static Mat getCircularStructuringElement(int radius) {
    Mat strel = new Mat(radius * 2 + 1, radius * 2 + 1, CvType.CV_8UC1, new Scalar(0));
    Imgproc.circle(strel, new Point(radius, radius), radius, new Scalar(1), -1);
    return strel;
}

From source file:readnumber.ReadNumber.java

/**
 * detectFaces//from  w w w.  j av a2  s.  c o m
 */
private void detectFaces() {
    // Save video to image           
    Imgcodecs.imwrite(filePath, webcamMatImage);

    // Detect faces in the image
    imagefase = Imgcodecs.imread(filePath);
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(imagefase, faceDetections);

    inform.setText("Information:");
    String cauntfase = "total face: " + faceDetections.toArray().length;
    caunttext.setText(cauntfase);

    // Draw a bounding box around each face
    for (Rect rect : faceDetections.toArray()) {
        Imgproc.rectangle(imagefase, new Point(rect.x, rect.y),
                new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0));
    }

    // Save video to image
    Imgcodecs.imwrite(filePath, imagefase);
    message.setText("Detect faces ...");

    // Output image to form (JLabel)
    imageView.setBounds(330, 61, 320, 240);
    newImage = Imgcodecs.imread(filePath);
    Image loadedImage = imageProcessor.toBufferedImage(newImage);
    ImageIcon imgIcon = new ImageIcon(loadedImage, "img");
    imageView.setIcon(imgIcon);
    textImage.setBounds(340, 295, 120, 30);
}

From source file:Recognizer.Recognizer.java

public Image TemplateMatching(Image imQuery, Image imDB, int match_method) {
    System.out.println("Running Template Matching ...");

    //Mat img = Highgui.imread(inFile); // Image in which area has to be searched
    //Mat template_img = Highgui.imread(templateFile); // Search Image

    Mat matQuery = imQuery.Image3CtoMat_CV();
    Mat matDB = imDB.Image3CtoMat_CV();

    Mat hsvQ = new Mat(), hsvDB = new Mat();

    Imgproc.cvtColor(matQuery, hsvQ, COLOR_RGB2HSV);
    Imgproc.cvtColor(matDB, hsvDB, COLOR_RGB2HSV);

    // Create result image matrix
    int resultImg_cols = matDB.cols() - matQuery.cols() + 1;
    int resultImg_rows = matDB.rows() - matQuery.rows() + 1;

    Mat matRes = new Mat(resultImg_rows, resultImg_cols, CvType.CV_32FC1);

    // Template Matching with Normalization
    Imgproc.matchTemplate(hsvDB, hsvQ, matRes, match_method);
    Core.normalize(matRes, matRes, 0, 1, Core.NORM_MINMAX, -1, new Mat());

    // / Localizing the best match with minMaxLoc
    Core.MinMaxLocResult Location_Result = Core.minMaxLoc(matRes);
    Point matchLocation;//www.  ja v a  2  s.  c  om

    if (match_method == Imgproc.TM_SQDIFF || match_method == Imgproc.TM_SQDIFF_NORMED) {
        matchLocation = Location_Result.minLoc;
    } else {
        matchLocation = Location_Result.maxLoc;
    }

    // Display Area by Rectangle
    Core.rectangle(matDB, matchLocation,
            new Point(matchLocation.x + matQuery.cols(), matchLocation.y + matQuery.rows()),
            new Scalar(0, 255, 0));

    Image imOut = new Image(matDB.width(), matDB.height());
    //Image imOut = new Image(matQuery.cols(), matQuery.rows());

    //Mat m = new Mat(matDB);

    //m =//matDB.submat((int)matchLocation.y, (int)matchLocation.y + matQuery.rows(),(int)matchLocation.x, (int)matchLocation.x + matQuery.cols());

    imOut.Mat_CVtoImage3C(matDB);

    System.out.println("Location: " + Location_Result.minLoc.x + " " + Location_Result.minLoc.y + "   "
            + Location_Result.maxLoc.x + " " + Location_Result.maxLoc.y);

    return imOut;
}

From source file:Recognizer.Recognizer.java

public Image HistMatch(Image imQuery, Image imDB) {
    Image imOut = new Image(352, 288);

    Mat srcQ, srcDB;/* w ww . j ava2s . c o m*/
    Mat hsvQ = new Mat(), hsvDB = new Mat();

    srcQ = imQuery.Image3CtoMat_CV();
    srcDB = imDB.Image3CtoMat_CV();

    //Convert To HSV
    Imgproc.cvtColor(srcQ, hsvQ, Imgproc.COLOR_RGB2HSV);
    Imgproc.cvtColor(srcDB, hsvDB, Imgproc.COLOR_RGB2HSV);

    java.util.List<Mat> matlistQ = Arrays.asList(hsvQ);
    java.util.List<Mat> matlistDB = Arrays.asList(hsvDB);

    //Use 100 bins for hue, 100 for Saturation
    int h_bins = 360, s_bins = 4;
    int[] histsize = { h_bins, s_bins };
    MatOfInt histSize = new MatOfInt(histsize);

    MatOfFloat Ranges = new MatOfFloat(0, 180, 0, 256);

    int[] channels = { 0, 1 };
    MatOfInt CH = new MatOfInt(channels);

    Mat hist_Q = new Mat();
    Mat hist_DB = new Mat();

    Imgproc.calcHist(matlistQ, CH, new Mat(), hist_Q, histSize, Ranges);
    Core.normalize(hist_Q, hist_Q, 0, 1, Core.NORM_MINMAX, -1, new Mat());

    float res;

    Mat[] hsvaLev1 = new Mat[4];
    Mat[] hsvaLev2 = new Mat[16];
    Mat[] hsvaLev3 = new Mat[64];
    // Mat[] hsvaLev4 = new Mat[256];

    float[] iaLev1 = new float[4];
    float[] iaLev2 = new float[16];
    float[] iaLev3 = new float[64];
    //float[] iaLev4 = new float[256];

    for (int i = 0; i < 2; i++) {
        for (int j = 0; j < 2; j++) {
            hsvaLev1[i * 2 + j] = hsvDB.submat(0 + i * 288 / 2, 143 + i * 288 / 2, 0 + j * 352 / 2,
                    175 + j * 352 / 2);
        }
    }

    for (int i = 0; i < 4; i++) {
        for (int j = 0; j < 4; j++) {
            hsvaLev2[i * 4 + j] = hsvDB.submat(0 + i * 288 / 4, 71 + i * 288 / 4, 0 + j * 352 / 4,
                    87 + j * 352 / 4);
        }
    }

    for (int i = 0; i < 8; i++) {
        for (int j = 0; j < 8; j++) {
            hsvaLev3[i * 8 + j] = hsvDB.submat(0 + i * 288 / 8, 35 + i * 288 / 8, 0 + j * 352 / 8,
                    43 + j * 352 / 8);
        }
    }

    System.out.println("Lev_1");
    for (int m = 0; m < 4; m++) {
        matlistDB = Arrays.asList(hsvaLev1[m]);
        Imgproc.calcHist(matlistDB, CH, new Mat(), hist_DB, histSize, Ranges);
        Core.normalize(hist_DB, hist_DB, 0, 1, Core.NORM_MINMAX, -1, new Mat());
        res = (float) Imgproc.compareHist(hist_Q, hist_DB, Imgproc.CV_COMP_BHATTACHARYYA);

        System.out.println("Res: " + res);
        iaLev1[m] = res;
    }

    System.out.println("Lev_2");
    for (int m = 0; m < 16; m++) {
        matlistDB = Arrays.asList(hsvaLev2[m]);
        Imgproc.calcHist(matlistDB, CH, new Mat(), hist_DB, histSize, Ranges);
        Core.normalize(hist_DB, hist_DB, 0, 1, Core.NORM_MINMAX, -1, new Mat());
        res = (float) Imgproc.compareHist(hist_Q, hist_DB, Imgproc.CV_COMP_BHATTACHARYYA);

        System.out.println("Res: " + res);
        iaLev2[m] = res;
    }

    System.out.println("Lev_3");
    for (int m = 0; m < 64; m++) {
        matlistDB = Arrays.asList(hsvaLev3[m]);
        Imgproc.calcHist(matlistDB, CH, new Mat(), hist_DB, histSize, Ranges);
        Core.normalize(hist_DB, hist_DB, 0, 1, Core.NORM_MINMAX, -1, new Mat());
        res = (float) Imgproc.compareHist(hist_Q, hist_DB, Imgproc.CV_COMP_BHATTACHARYYA);

        System.out.println("Res: " + res);
        iaLev3[m] = res;
    }

    int x = MinIndex(iaLev1);
    int i = x % 2;
    int j = x / 2;
    Core.rectangle(srcDB, new Point(0 + j * 352 / 2, 0 + i * 288 / 2),
            new Point(175 + j * 352 / 2, 143 + i * 288 / 2), new Scalar(0, 255, 0));

    x = MinIndex(iaLev2);
    i = x % 4;
    j = x / 4;
    Core.rectangle(srcDB, new Point(0 + j * 352 / 4, 0 + i * 288 / 4),
            new Point(87 + j * 352 / 4, 71 + i * 288 / 4), new Scalar(0, 0, 255));

    x = MinIndex(iaLev3);
    i = x % 8;
    j = x / 8;
    Core.rectangle(srcDB, new Point(0 + j * 352 / 8, 0 + i * 288 / 8),
            new Point(43 + j * 352 / 8, 35 + i * 288 / 8), new Scalar(255, 0, 0));

    imOut.Mat_CVtoImage3C(srcDB);

    return imOut;
}

From source file:Recognizer.Recognizer.java

public Image HistBlockCompare(Image imQuery, Image imDB, int m, int n) // SingleBlock Size mxn -> Eg: 88x72 -> m =88; n = 72
{
    // Initialzations
    Image imOut = new Image(352, 288);

    Mat srcQ, srcDB;/*www  . j  a  v a 2 s  . c om*/
    Mat hsvQ = new Mat(), hsvDB = new Mat();

    srcQ = imQuery.Image3CtoMat_CV();
    srcDB = imDB.Image3CtoMat_CV();

    //Convert To HSV
    Imgproc.cvtColor(srcQ, hsvQ, Imgproc.COLOR_RGB2HSV);
    Imgproc.cvtColor(srcDB, hsvDB, Imgproc.COLOR_RGB2HSV);

    java.util.List<Mat> matlistQ = Arrays.asList(hsvQ);
    java.util.List<Mat> matlistDB = Arrays.asList(hsvDB);

    //Use 100 bins for hue, 100 for Saturation
    int h_bins = 180, s_bins = 2;
    int[] histsize = { h_bins, s_bins };
    MatOfInt histSize = new MatOfInt(histsize);

    MatOfFloat Ranges = new MatOfFloat(0, 180, 0, 256);

    int[] channels = { 0, 1 };
    MatOfInt CH = new MatOfInt(channels);

    Mat hist_Q = new Mat();
    Mat hist_DB = new Mat();

    Imgproc.calcHist(matlistQ, CH, new Mat(), hist_Q, histSize, Ranges);
    Core.normalize(hist_Q, hist_Q, 0, 1, Core.NORM_MINMAX, -1, new Mat());

    float[][] CompareHistResult = new float[352 - m][288 - n];

    for (int i = 0; i < (352 - m); i++) // width
    {
        for (int j = 0; j < (288 - n); j++) // height
        {
            // Get Indiaviadua Submatrix for Matching putrposes
            hist_DB = hsvDB.submat(j, (j + n), i, (i + m));
            // Now Compare Histogram using OpenCV functions
            matlistDB = Arrays.asList(hist_DB);
            Imgproc.calcHist(matlistDB, CH, new Mat(), hist_DB, histSize, Ranges);
            Core.normalize(hist_DB, hist_DB, 0, 1, Core.NORM_MINMAX, -1, new Mat());
            CompareHistResult[i][j] = (float) Imgproc.compareHist(hist_Q, hist_DB, Imgproc.CV_COMP_CHISQR);
        }
    }

    // Search min from result
    float min = CompareHistResult[0][0];
    int minIndex_i = 0;
    int minIndex_j = 0;
    for (int i = 0; i < (352 - m); i++) // width
    {
        for (int j = 0; j < (288 - n); j++) // height
        {
            if (CompareHistResult[i][j] < min) {
                min = CompareHistResult[i][j];
                minIndex_i = i;
                minIndex_j = j;
            }
        }
    }
    //
    Core.rectangle(srcDB, new Point(minIndex_i, minIndex_j), new Point(minIndex_i + m, minIndex_j + n),
            new Scalar(0, 255, 0));

    System.out.println("Result: " + CompareHistResult[minIndex_i][minIndex_j]);
    imOut.Mat_CVtoImage3C(srcDB);

    return imOut;
}

From source file:Reconhecimento.Circulo.java

/**
segmenta o circulo para acompanhamento do movimento 
**///ww w  . j av  a 2s  .c  om
public static void segmentarCirculo(int minRaio, int maxRaio, int minThreshold, int maxThreshold,
        int medianBlurKernel) {

    class threadSegmentar extends Thread {

        public boolean closed = false;
        public double CentroX;
        public double CentroY;

        @Override
        public void run() {
            int contador = 0;

            File folder = new File("imagens/frames");
            if (folder.exists() == false) {
                folder.mkdir();
            }
            for (String file : folder.list()) {
                new File(folder, file).delete();
            }

            ind = (char) ((int) ind + 1);

            JFrame frame = new JFrame();
            JLabel label = new JLabel();
            frame.add(label);
            frame.setBounds(10, 10, 640, 480);
            label.setSize(640, 480);
            frame.setLocation(250, 250);
            frame.setVisible(true);
            closed = false;

            frame.addWindowListener(new WindowAdapter() {
                @Override
                public void windowClosing(WindowEvent e) {
                    closed = true;
                }
            });

            Mat img = new Mat();
            Mat circles = new Mat();
            Mat grayImg = new Mat();
            Mat gravar = new Mat();
            Mat element = new Mat();

            VideoCapture cap = new VideoCapture(Video.videoAtual);

            // capturar primeiro frame do video 
            cap.read(img);

            Imgproc.cvtColor(img, grayImg, Imgproc.COLOR_BGR2GRAY);

            Imgproc.medianBlur(grayImg, grayImg, 5);

            Imgproc.HoughCircles(grayImg, circles, Imgproc.CV_HOUGH_GRADIENT, 1, 100, 220, 10, minRaio,
                    maxRaio);

            double Circle[] = circles.get(0, 0);

            Point center = new Point(Math.round(Circle[0]), Math.round(Circle[1]));

            int radius = (int) Math.round(Circle[2]);

            CentroX = center.x;
            CentroY = center.y;

            cap.read(img);

            boolean continuar = true;

            while (continuar) {

                // passar a imagem para tons de cinza
                Imgproc.cvtColor(img, grayImg, Imgproc.COLOR_BGR2GRAY);

                // limiarizacao
                Imgproc.threshold(grayImg, grayImg, minThreshold, maxThreshold, THRESH_BINARY_INV);

                Core.bitwise_not(grayImg, grayImg);

                // filtro da mediana
                Imgproc.medianBlur(grayImg, grayImg, medianBlurKernel);

                // deteccao de vertices
                Imgproc.Canny(grayImg, grayImg, 100, 255);

                // aplicar transformada circular de hough
                Imgproc.HoughCircles(grayImg, circles, Imgproc.CV_HOUGH_GRADIENT, 1, 100, 220, 9, minRaio,
                        maxRaio);

                try {
                    for (int x = 0; x < circles.cols(); x++) {
                        double vCircle[] = circles.get(0, x);

                        center = new Point(Math.round(vCircle[0]), Math.round(vCircle[1]));
                        radius = (int) Math.round(vCircle[2]);

                        // analisa a distancia entre o circulo do frame anterior e atual
                        if (((center.x <= CentroX) || (center.x - CentroX <= 5))
                                && (Math.sqrt(CentroX * CentroX + CentroY * CentroY)
                                        - Math.sqrt(center.x * center.x + center.y * center.y) <= 70.0)
                                && (Math.sqrt(CentroX * CentroX + CentroY * CentroY)
                                        - Math.sqrt(center.x * center.x + center.y * center.y) >= -70.0)) {

                            Core.circle(img, center, radius, new Scalar(0, 0, 255), 3, 8, 0);

                            CentroX = center.x;
                            CentroY = center.y;
                        }
                    }
                } catch (Exception e) {
                }

                Imgproc.resize(img, gravar, new Size(640, 480));
                Highgui.imwrite("imagens/frames/houghcircles" + contador + ind + ".jpg", gravar);

                label.setIcon(new ImageIcon("imagens/frames/houghcircles" + contador + ind + ".jpg"));

                contador++;

                continuar = cap.read(img) && !closed;
            }
        }
    }

    if (Video.videoAtual == null) {
        JOptionPane.showMessageDialog(null, "Selecione um arquivo de video!", "Nenhum vdeo selecionado",
                JOptionPane.WARNING_MESSAGE);
        Video.abrirVideo();
    }

    threadSegmentar t = new threadSegmentar();
    t.start();
}

From source file:Reconhecimento.Regua.java

public static void segmentarRegua() {

    long tempoInicio = System.currentTimeMillis();

    // coordenadas do retangulo de selecao 
    int x0 = TelaSegmentarRegua.localizarReguaPanel1.x0;
    int y0 = TelaSegmentarRegua.localizarReguaPanel1.y0;
    int x = TelaSegmentarRegua.localizarReguaPanel1.xf;
    int y = TelaSegmentarRegua.localizarReguaPanel1.yf;

    if (x0 > x) {
        int aux = x0;
        x0 = x;/*from  w w w  .ja  va2  s .  co m*/
        x = aux;
    }

    if (y0 > y) {
        int aux = y0;
        y0 = y;
        y = aux;
    }

    Mat bigImage = Highgui.imread(TelaSegmentarRegua.localizarReguaPanel1.imagem);
    // cortar imagem de acordo com a selecao
    Mat img = new Mat(bigImage, new Rect(x0, y0, x - x0, y - y0));

    Mat grayImg = new Mat();
    // passar imagem para tons de cinza
    Imgproc.cvtColor(img, grayImg, Imgproc.COLOR_BGR2GRAY);
    // limiarizacao 
    Imgproc.threshold(grayImg, grayImg, 190, 255, THRESH_BINARY_INV);
    Core.bitwise_not(grayImg, grayImg);

    List<Point> pontos = new ArrayList<Point>();

    // adicionar todos os pontos da referentes a regua em um vetor
    for (int i = 0; i < grayImg.rows(); i++) {
        for (int j = 0; j < grayImg.cols(); j++) {
            if (Arrays.toString(grayImg.get(i, j)).equals("[255.0]")) {
                pontos.add(new Point(j, i));
                Core.line(img, new Point(j, i), new Point(j, i), new Scalar(255, 0, 0));
            }
        }
    }

    String filename = "imagens/regua_segmentada" + Math.random() * 1000 + ".jpg";

    Mat img2 = new Mat();
    Imgproc.resize(img, img2, new Size(img.size().width * 3.0, img.size().height * 3.0));
    Highgui.imwrite(filename, img2);

    int xMin = 5000, yMin = 5000;
    int xMax = 0, yMax = 0;

    // pontos extremos da regua
    for (Point ponto : pontos) {
        if (ponto.x > xMax) {
            xMax = (int) ponto.x;
        }
        if (ponto.x < xMin) {
            xMin = (int) ponto.x;
        }
        if (ponto.y > yMax) {
            yMax = (int) ponto.y;
        }
        if (ponto.y < yMin) {
            yMin = (int) ponto.y;
        }
    }

    // regua na posicao horizontal
    if (xMax - xMin > yMax - yMin) {
        /*
        a proporcao da imagem utilizada no processamento torna necessario
        a multiplicacao por 2 para manter a proporcao das medidas 
        */
        larguraPixels = (xMax - xMin) * 2;
    }
    // regua na posicao vertical
    else {
        larguraPixels = (yMax - yMin) * 2;
    }

    long tempoFim = System.currentTimeMillis() - tempoInicio;

    centimetrosPorPixel = 30.0 / larguraPixels;

    TelaSegmentarRegua2 telaResposta = new TelaSegmentarRegua2();
    telaResposta.jLabel1.setIcon(new ImageIcon(filename));
    telaResposta.jLabel4.setText(larguraPixels + " pixels");
    telaResposta.jLabel5.setText(String.valueOf(centimetrosPorPixel).substring(0, 5));
    telaResposta.jLabel7.setText(tempoFim + " ms");
    telaResposta.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE);
    telaResposta.setLocation(200, 200);
    telaResposta.setVisible(true);

}