Example usage for org.opencv.core Point Point

List of usage examples for org.opencv.core Point Point

Introduction

In this page you can find the example usage for org.opencv.core Point Point.

Prototype

public Point(double x, double y) 

Source Link

Usage

From source file:edu.soict.hust.k57.mmdb.components.HistogramImageBulder.java

private ImageIcon createImageIcon(Mat hist, int bin, Channel c) {
    int hist_w = 150; // width of the histogram image
    int hist_h = 100; // height of the histogram image
    int bin_w = (int) Math.round(hist_w * 1.0 / bin);

    Mat histImage = new Mat(hist_h, hist_w, CvType.CV_8UC3, new Scalar(80, 60, 60));
    Mat normalizeHist = hist.clone();//  w w  w .ja  v  a2s . co m
    Core.normalize(normalizeHist, normalizeHist, 0, histImage.rows(), Core.NORM_MINMAX, -1, new Mat());

    Scalar scalar = null;
    switch (c) {
    case B:
        scalar = new Scalar(255, 0, 0);
        break;
    case G:
        scalar = new Scalar(0, 255, 0);
        break;
    case R:
        scalar = new Scalar(0, 0, 255);
    }

    for (int i = 1; i < bin; i++) {
        Imgproc.line(histImage, new Point(bin_w * (i - 1), hist_h - Math.round(normalizeHist.get(i - 1, 0)[0])),
                new Point(bin_w * (i), hist_h - Math.round(normalizeHist.get(i - 1, 0)[0])), scalar, 1, 8, 0);
        Imgproc.line(histImage, new Point(bin_w * (i), hist_h - Math.round(normalizeHist.get(i - 1, 0)[0])),
                new Point(bin_w * (i), hist_h - Math.round(normalizeHist.get(i, 0)[0])), scalar, 1, 8, 0);
    }
    MatOfByte buffer = new MatOfByte();
    Imgcodecs.imencode(".png", histImage, buffer);
    return new ImageIcon(buffer.toArray());
}

From source file:edu.sust.cse.util.Histogram.java

public static Mat getHistogram(Mat image) {

    try {//ww w  .jav  a2  s . c  o m
        Mat src = new Mat(image.height(), image.width(), CvType.CV_8UC2);
        Imgproc.cvtColor(image, src, Imgproc.COLOR_RGB2GRAY);
        ArrayList<Mat> bgr_planes = new ArrayList<>();
        Core.split(src, bgr_planes);

        MatOfInt histSize = new MatOfInt(256);

        final MatOfFloat histRange = new MatOfFloat(0f, 256f);

        boolean accumulate = false;

        Mat b_hist = new Mat();

        Imgproc.calcHist(bgr_planes, new MatOfInt(0), new Mat(), b_hist, histSize, histRange, accumulate);

        int hist_w = 512;
        int hist_h = 600;
        long bin_w;
        bin_w = Math.round((double) (hist_w / 256));

        Mat histImage = new Mat(hist_h, hist_w, CvType.CV_8UC1);

        Core.normalize(b_hist, b_hist, 3, histImage.rows(), Core.NORM_MINMAX);

        for (int i = 1; i < 256; i++) {

            Core.line(histImage, new Point(bin_w * (i - 1), hist_h - Math.round(b_hist.get(i - 1, 0)[0])),
                    new Point(bin_w * (i), hist_h - Math.round(Math.round(b_hist.get(i, 0)[0]))),
                    new Scalar(255, 0, 0), 2, 8, 0);

        }

        return histImage;
    } catch (Exception ex) {
        System.out.println("[HISTOGRAM][ERROR][" + ex.getMessage() + "]");
        return null;
    }
}

From source file:edu.ucue.tfc.Modelo.VideoProcessor.java

private void initControlPoints() {
    try {/*from w w w  .j av a2s  . co  m*/
        controlPoints.add(new Point(80, 100));
        controlPoints.add(new Point(80, frameSize.height - 100));

        controlPoints.add(new Point(frameSize.width / 2, 100));
        controlPoints.add(new Point(frameSize.width / 2, frameSize.height - 100));

        controlPoints.add(new Point(frameSize.width - 80, 100));
        controlPoints.add(new Point(frameSize.width - 80, frameSize.height - 100));

        controlPoints.add(new Point(80, controlPointsHeight));
        controlPoints.add(new Point(frameSize.width - 80, controlPointsHeight));

        controlPoints.add(new Point(frameSize.width / 2, controlPointsHeight));

        System.out.println("Puntos de control inicializados satisfactoriamente!");

    } catch (Exception e) {
        System.out.println(e.getMessage());
    }
}

From source file:edu.wpi.first.wpilibj.examples.axiscamera.Robot.java

License:Open Source License

@Override
public void robotInit() {
    m_visionThread = new Thread(() -> {
        // Get the Axis camera from CameraServer
        AxisCamera camera = CameraServer.getInstance().addAxisCamera("axis-camera.local");
        // Set the resolution
        camera.setResolution(640, 480);/*from   w w  w.ja va 2  s  .c  o m*/

        // Get a CvSink. This will capture Mats from the camera
        CvSink cvSink = CameraServer.getInstance().getVideo();
        // Setup a CvSource. This will send images back to the Dashboard
        CvSource outputStream = CameraServer.getInstance().putVideo("Rectangle", 640, 480);

        // Mats are very memory expensive. Lets reuse this Mat.
        Mat mat = new Mat();

        // This cannot be 'true'. The program will never exit if it is. This
        // lets the robot stop this thread when restarting robot code or
        // deploying.
        while (!Thread.interrupted()) {
            // Tell the CvSink to grab a frame from the camera and put it
            // in the source mat.  If there is an error notify the output.
            if (cvSink.grabFrame(mat) == 0) {
                // Send the output the error.
                outputStream.notifyError(cvSink.getError());
                // skip the rest of the current iteration
                continue;
            }
            // Put a rectangle on the image
            Imgproc.rectangle(mat, new Point(100, 100), new Point(400, 400), new Scalar(255, 255, 255), 5);
            // Give the output stream a new image to display
            outputStream.putFrame(mat);
        }
    });
    m_visionThread.setDaemon(true);
    m_visionThread.start();
}

From source file:edu.wpi.first.wpilibj.examples.intermediatevision.Robot.java

License:Open Source License

@Override
public void robotInit() {
    m_visionThread = new Thread(() -> {
        // Get the UsbCamera from CameraServer
        UsbCamera camera = CameraServer.getInstance().startAutomaticCapture();
        // Set the resolution
        camera.setResolution(640, 480);//w  w  w. j  a v  a2  s . c  o  m

        // Get a CvSink. This will capture Mats from the camera
        CvSink cvSink = CameraServer.getInstance().getVideo();
        // Setup a CvSource. This will send images back to the Dashboard
        CvSource outputStream = CameraServer.getInstance().putVideo("Rectangle", 640, 480);

        // Mats are very memory expensive. Lets reuse this Mat.
        Mat mat = new Mat();

        // This cannot be 'true'. The program will never exit if it is. This
        // lets the robot stop this thread when restarting robot code or
        // deploying.
        while (!Thread.interrupted()) {
            // Tell the CvSink to grab a frame from the camera and put it
            // in the source mat.  If there is an error notify the output.
            if (cvSink.grabFrame(mat) == 0) {
                // Send the output the error.
                outputStream.notifyError(cvSink.getError());
                // skip the rest of the current iteration
                continue;
            }
            // Put a rectangle on the image
            Imgproc.rectangle(mat, new Point(100, 100), new Point(400, 400), new Scalar(255, 255, 255), 5);
            // Give the output stream a new image to display
            outputStream.putFrame(mat);
        }
    });
    m_visionThread.setDaemon(true);
    m_visionThread.start();
}

From source file:emotion.Eye.java

private void templatingOuterCorner(Mat eyeRegion, boolean rightEyeFlag) {
    //        Mat template=imread("E:\\Studia\\II YEAR\\Team Project\\"
    //                + "Face database\\eyecorners\\rightOuter.jpg",CV_8UC1);
    Mat template = imread("src\\Templates\\rightOuter.jpg", CV_8UC1);
    Mat temp = new Mat(eyeRegion.width(), eyeRegion.height(), CV_8UC1);
    cvtColor(eyeRegion, temp, Imgproc.COLOR_BGR2GRAY);
    temp = rightEyeFlag//  w  w w . ja  va2 s .c o m
            ? new Mat(temp, new Rect((int) (temp.width() * 0.5), 0, (int) (temp.width() * 0.5), temp.height()))
            : new Mat(temp, new Rect(0, 0, (int) (temp.width() * 0.5), temp.height()));
    Mat result = new Mat(eyeRegion.width(), eyeRegion.height(), eyeRegion.type());

    //(9,9)- coordinates of eye outerCorner in the template
    if (rightEyeFlag) {
        imwrite("rightEyeForOuterTemplating.jpg", temp);
        Imgproc.matchTemplate(temp, template, result, Imgproc.TM_CCOEFF_NORMED);
        Core.normalize(result, result, 0, 100, Core.NORM_MINMAX);
        Core.MinMaxLocResult maxVal = Core.minMaxLoc(result);
        //(9,9)- coordinates of eye outerCorner in the template
        Point outerCorner = new Point(maxVal.maxLoc.x + 9, maxVal.maxLoc.y + 9);

        //Adjust coordinates according to whole face
        outerCorner.y += Eye.rightRect.y;
        outerCorner.x += Eye.rightRect.x;
        outerCorner.x += temp.width(); //We examine just right half on the right eye
        ////////////////////////////////////////////
        EyeRegion.rightOuterEyeCorner = outerCorner;
    } else {
        imwrite("leftEyeForOuterTemplating.jpg", temp);
        Core.flip(template, template, 1);
        Imgproc.matchTemplate(temp, template, result, Imgproc.TM_CCOEFF_NORMED);
        Core.normalize(result, result, 0, 100, Core.NORM_MINMAX);
        Core.MinMaxLocResult maxVal = Core.minMaxLoc(result);

        Point outerCorner = new Point(maxVal.maxLoc.x + 4, maxVal.maxLoc.y + 9);
        //Adjust coordinates according to whole face
        outerCorner.y += Eye.leftRect.y;
        outerCorner.x += Eye.leftRect.x;
        ////////////////////////////////////////////
        EyeRegion.leftOuterEyeCorner = outerCorner;
    }
    //Mat tempw=reg._face.clone();
    //Face.drawCross(tempw, outerCorner);
    //imwrite("checkcorner.png",tempw);

}

From source file:emotion.Eye.java

private void templatingInnerCorner(Mat eyeRegion, boolean rightEyeFlag) {
    //        Mat template=imread("E:\\Studia\\II YEAR\\Team Project\\"
    //                + "Face database\\eyecorners\\rightInner.jpg",CV_8UC1);
    Mat template = imread("src\\Templates\\rightInner.jpg", CV_8UC1);
    Mat temp = new Mat(eyeRegion.width(), eyeRegion.height(), CV_8UC1);
    cvtColor(eyeRegion, temp, Imgproc.COLOR_BGR2GRAY);
    temp = rightEyeFlag ? new Mat(temp, new Rect(0, 0, (int) (temp.width() * 0.5), temp.height()))
            : new Mat(temp, new Rect((int) (temp.width() * 0.5), 0, (int) (temp.width() * 0.5), temp.height()));
    Mat result = new Mat(eyeRegion.width(), eyeRegion.height(), eyeRegion.type());

    //(4,7)- coordinates of eye innerCorner in the template
    if (rightEyeFlag) {
        imwrite("template4righteye.jpg", template);
        imwrite("rightEyeForInnerTemplating.jpg", temp);
        Imgproc.matchTemplate(temp, template, result, Imgproc.TM_CCOEFF_NORMED);
        Core.normalize(result, result, 0, 100, Core.NORM_MINMAX);
        Core.MinMaxLocResult maxVal = Core.minMaxLoc(result);
        //(4,7)- coordinates of eye innerCorner in the template
        Point innerCorner = new Point(maxVal.maxLoc.x + 4, maxVal.maxLoc.y + 7);

        StaticFunctions.drawCross(temp, innerCorner, StaticFunctions.Features.EYE_CORNERS);
        imwrite("rightEyeForInnerTemplating.jpg", temp);
        //Adjust coordinates according to whole face
        innerCorner.y += Eye.rightRect.y;
        innerCorner.x += Eye.rightRect.x;
        //We examine just left half on the right eye
        ////////////////////////////////////////////
        EyeRegion.rightInnerEyeCorner = innerCorner;
    } else {/*from w  w  w .  j  a  v a2 s  . c  o m*/
        imwrite("leftEyeForInnerTemplating.jpg", temp);
        Core.flip(template, template, 1);
        Imgproc.matchTemplate(temp, template, result, Imgproc.TM_CCOEFF_NORMED);
        Core.normalize(result, result, 0, 100, Core.NORM_MINMAX);
        Core.MinMaxLocResult maxVal = Core.minMaxLoc(result);

        Point innerCorner = new Point(maxVal.maxLoc.x + 8, maxVal.maxLoc.y + 7);

        //Adjust coordinates according to whole face
        innerCorner.y += Eye.leftRect.y;
        innerCorner.x += Eye.leftRect.x;
        //We examine just right half on the left eye
        innerCorner.x += temp.width();
        ////////////////////////////////////////////
        EyeRegion.leftInnerEyeCorner = innerCorner;
    }
}

From source file:emotion.Eye.java

public void examineEyeOpeness(boolean rightEyeFlag) {
    Rect pureEyeRegion;//from w ww.ja  va  2 s  .c  o m
    //We take just middle half of strict eye region determined
    //by localized eye corners
    if (rightEyeFlag) {
        double regionWidth = EyeRegion.rightOuterEyeCorner.x - EyeRegion.rightInnerEyeCorner.x;
        pureEyeRegion = new Rect((int) (EyeRegion.rightInnerEyeCorner.x + regionWidth / 2 - 2),
                (int) (Eye.rightRect.y), (4), Eye.rightRect.height);
        imwrite("strictEyeRegRight.jpg", new Mat(EyeRegion._face, pureEyeRegion));
        //Setting x coordinates of eyelids
        EyeRegion.rightLowerEyelid.x = (EyeRegion.rightOuterEyeCorner.x + EyeRegion.rightInnerEyeCorner.x) / 2;
        EyeRegion.rightUpperEyelid.x = EyeRegion.rightLowerEyelid.x;
        EyeRegion.rightEyeOpeness = (EyeRegion.rightUpperEyelid.y - EyeRegion.rightLowerEyelid.y);
    } else {
        double regionWidth;
        regionWidth = EyeRegion.leftInnerEyeCorner.x - EyeRegion.leftOuterEyeCorner.x;
        pureEyeRegion = new Rect((int) (regionWidth / 2 + EyeRegion.leftOuterEyeCorner.x - 2),
                (int) (Eye.leftRect.y), (4), Eye.leftRect.height);
        imwrite("leftEyeReg.jpg", new Mat(EyeRegion._face, pureEyeRegion));
        //Setting x coordinates of eyelids
        EyeRegion.leftLowerEyelid.x = (EyeRegion.leftInnerEyeCorner.x + EyeRegion.leftOuterEyeCorner.x) / 2;
        EyeRegion.leftUpperEyelid.x = EyeRegion.leftLowerEyelid.x;
        EyeRegion.leftEyeOpeness = (EyeRegion.leftUpperEyelid.y - EyeRegion.leftLowerEyelid.y);
    }

    Mat strictEyeRegion = new Mat(EyeRegion._face, pureEyeRegion);
    Mat result = new Mat();

    strictEyeRegion.convertTo(strictEyeRegion, CvType.CV_32F);
    Core.pow(strictEyeRegion, 1.27, strictEyeRegion);
    cvtColor(strictEyeRegion, strictEyeRegion, Imgproc.COLOR_BGR2GRAY);
    imwrite("improved.jpg", strictEyeRegion);

    threshold(strictEyeRegion, result, 100, 255, Imgproc.THRESH_BINARY_INV);

    Mat strEl = Imgproc.getStructuringElement(Imgproc.MORPH_CROSS, new Size(3, 1));
    dilate(result, result, strEl, new Point(1, 0), 3);

    for (int i = 0; i < result.width(); i++) {
        for (int j = 0; j < result.height() * 0.4; j++) {
            result.put(j, i, new double[] { 0, 0, 0 });
        }
    }
    for (int j = result.height() - 1; j >= 0; j--) {
        if (result.get(j, 0)[0] == 255) {
            if (rightEyeFlag) {

                if (EyeRegion.rightLowerEyelid.y == 0) {
                    EyeRegion.rightLowerEyelid.y = j + 3;
                    EyeRegion.rightLowerEyelid.y += Eye.rightRect.y;
                }
                EyeRegion.rightUpperEyelid.y = j;
                EyeRegion.rightUpperEyelid.y += Eye.rightRect.y;
            } else {
                if (EyeRegion.leftLowerEyelid.y == 0) {
                    EyeRegion.leftLowerEyelid.y = j + 3;
                    EyeRegion.leftLowerEyelid.y += Eye.leftRect.y;
                }
                EyeRegion.leftUpperEyelid.y = j;
                EyeRegion.leftUpperEyelid.y += Eye.leftRect.y;
            }
        }
    }
    imwrite("openessResult.jpg", result);
}

From source file:emotion.Eyebrow.java

public static void Harris(Mat img, boolean rightEyeFlag) {
    //Harris point extraction
    Mat harrisTestimg;/*from w w  w  .  j a va2 s  .  c  o m*/
    harrisTestimg = img.clone();
    cvtColor(harrisTestimg, harrisTestimg, Imgproc.COLOR_BGR2GRAY);
    threshold(harrisTestimg, harrisTestimg, 200, 255, Imgproc.THRESH_BINARY_INV);
    Mat struct = Imgproc.getStructuringElement(Imgproc.MORPH_CROSS, new Size(3, 3));
    erode(harrisTestimg, harrisTestimg, struct);
    dilate(harrisTestimg, harrisTestimg, struct);
    imwrite("intermediateHaaris.jpg", harrisTestimg);
    harrisTestimg.convertTo(harrisTestimg, CV_8UC1);
    ArrayList<MatOfPoint> contours = new ArrayList<>();
    Mat hierarchy = new Mat();

    Imgproc.findContours(harrisTestimg, contours, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_NONE);

    //System.out.println("Average Y for contours:");
    float[] averageY = new float[contours.size()];
    for (int i = 0; i < contours.size(); ++i) {
        //We calculate mean of Y coordinates for each contour
        for (int j = 0; j < contours.get(i).total(); ++j) {
            int val = (int) contours.get(i).toArray()[j].y;
            averageY[i] += val;
        }
        averageY[i] /= contours.get(i).total();
        //System.out.println(i+") "+averageY[i]);

        if (averageY[i] <= img.height() / 2 && //We consider just up half of an image
                contours.get(i).total() >= img.width()) //and longer than threshold
            Imgproc.drawContours(harrisTestimg, contours, i, new Scalar(255, 255, 255));
        else
            Imgproc.drawContours(harrisTestimg, contours, i, new Scalar(0, 0, 0));
    }

    MatOfPoint features = new MatOfPoint();
    Imgproc.goodFeaturesToTrack(harrisTestimg, features, 100, 0.00001, 0);

    //We draw just 2 extreme points- first and last
    Point eyebrowsPoints[] = new Point[2];
    for (int i = 0; i < features.toList().size(); i++) {
        if (i == 0) {
            eyebrowsPoints[0] = new Point(harrisTestimg.width() / 2, 0);
            eyebrowsPoints[1] = new Point(harrisTestimg.width() / 2, 0);
        }
        if (features.toArray()[i].x < eyebrowsPoints[0].x
                && features.toArray()[i].y < harrisTestimg.height() / 2) {
            eyebrowsPoints[0] = features.toArray()[i];
        }
        if (features.toArray()[i].x > eyebrowsPoints[1].x
                && features.toArray()[i].y < harrisTestimg.height() / 2) {
            eyebrowsPoints[1] = features.toArray()[i];
        }
    }
    StaticFunctions.drawCross(img, eyebrowsPoints[1], StaticFunctions.Features.EYEBROWS_ENDS);
    StaticFunctions.drawCross(img, eyebrowsPoints[0], StaticFunctions.Features.EYEBROWS_ENDS);
    imwrite("testHaris.jpg", img);
    if (rightEyeFlag) {
        EyeRegion.rightInnerEyebrowsCorner = eyebrowsPoints[0];
        EyeRegion.rightInnerEyebrowsCorner.x += Eye.rightRect.x;
        EyeRegion.rightInnerEyebrowsCorner.y += Eye.rightRect.y;

        EyeRegion.rightOuterEyebrowsCorner = eyebrowsPoints[1];
        EyeRegion.rightOuterEyebrowsCorner.x += Eye.rightRect.x;
        EyeRegion.rightOuterEyebrowsCorner.y += Eye.rightRect.y;
    } else {
        EyeRegion.leftInnerEyebrowsCorner = eyebrowsPoints[1];
        EyeRegion.leftInnerEyebrowsCorner.x += Eye.leftRect.x;
        EyeRegion.leftInnerEyebrowsCorner.y += Eye.leftRect.y;

        EyeRegion.leftOuterEyebrowsCorner = eyebrowsPoints[0];
        EyeRegion.leftOuterEyebrowsCorner.x += Eye.leftRect.x;
        EyeRegion.leftOuterEyebrowsCorner.y += Eye.leftRect.y;
    }
}

From source file:emotion.EyeRegion.java

public static void showLegend() {
    StaticFunctions.drawCross(EyeRegion._face, new Point(5, 5), StaticFunctions.Features.EYEBROWS_ENDS);
    Imgproc.putText(EyeRegion._face, "Eyebrows ends", new Point(12, 7), Core.FONT_HERSHEY_SIMPLEX, 0.3,
            new Scalar(255, 255, 255));

    StaticFunctions.drawCross(EyeRegion._face, new Point(5, 15), StaticFunctions.Features.EYE_CORNERS);
    Imgproc.putText(EyeRegion._face, "Eyes' corners", new Point(12, 17), Core.FONT_HERSHEY_SIMPLEX, 0.3,
            new Scalar(255, 255, 255));

    StaticFunctions.drawCross(EyeRegion._face, new Point(5, 25), StaticFunctions.Features.EYELIDS);
    Imgproc.putText(EyeRegion._face, "Eyelids", new Point(12, 27), Core.FONT_HERSHEY_SIMPLEX, 0.3,
            new Scalar(255, 255, 255));
}