Example usage for org.opencv.imgproc Imgproc rectangle

List of usage examples for org.opencv.imgproc Imgproc rectangle

Introduction

In this page you can find the example usage for org.opencv.imgproc Imgproc rectangle.

Prototype

public static void rectangle(Mat img, Point pt1, Point pt2, Scalar color) 

Source Link

Usage

From source file:cv.FaceDetector.java

License:Open Source License

public BufferedImage detectFace(String imagePath) {
    // Create a face detector from the cascade file in the resources directory.
    String facePropertiesFilePath = getClass().getResource("/lbpcascade_frontalface.xml").getPath();
    CascadeClassifier faceDetector = new CascadeClassifier(facePropertiesFilePath);
    Mat image = Imgcodecs.imread(imagePath);
    // Detect faces in the image.
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);

    System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));
    // Draw a bounding box around each face.
    for (Rect rect : faceDetections.toArray()) {
        Imgproc.rectangle(image, new org.opencv.core.Point(rect.x, rect.y),
                new org.opencv.core.Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0));
    }//from ww w. jav  a2s.  c o m

    // Return the image
    return matToBufferedImage(image);
}

From source file:drawing.application.FaceDetection.java

License:Open Source License

@Override
public void run() {
    CascadeClassifier faceDetector = new CascadeClassifier("lbpcascade_frontalface.xml");
    Mat image = Imgcodecs.imread(filePath);

    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);

    for (Rect rect : faceDetections.toArray()) {
        Imgproc.rectangle(image, new Point(rect.x, rect.y),
                new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0));
    }//from   w  w  w . j  a  v a2 s .  co  m

    System.out.println(String.format("Writing %s", filePath));
    Imgcodecs.imwrite(filePath, image);

    int numFaces = faceDetections.toArray().length;
    JOptionPane.showMessageDialog(null,
            "Detected " + faceDetections.toArray().length + (numFaces == 1 ? " face" : " faces"));
}

From source file:emotion.EyeRegion.java

public static void areEyebrowsWrinkles() {
    //setting parameters
    int height = (int) (abs(rightInnerEyebrowsCorner.y - rightInnerEyeCorner.y) * 1.2);
    int width = (int) (rightInnerEyeCorner.x - leftInnerEyeCorner.x);
    int y = (int) (rightInnerEyebrowsCorner.y - height / 2);
    int x = (int) leftInnerEyebrowsCorner.x;

    Rect wrinklesRect = new Rect(x, y, width, height);
    Mat wrinklesArea = new Mat(_face, wrinklesRect).clone();

    wrinklesThreshold = (int) (wrinklesArea.width() * wrinklesArea.height() * 0.085);
    //Wrinkles between eyebrows are vertical
    int[] gradientMask = new int[9];
    gradientMask[0] = -1;//from w w w .  j  a  va2 s.  com
    gradientMask[1] = 0;
    gradientMask[2] = 1;
    gradientMask[3] = -5;
    gradientMask[4] = 0;
    gradientMask[5] = 5;
    gradientMask[6] = -1;
    gradientMask[7] = 0;
    gradientMask[8] = 1;

    wrinklesArea.convertTo(wrinklesArea, CvType.CV_32F);
    Imgproc.cvtColor(wrinklesArea, wrinklesArea, Imgproc.COLOR_BGR2GRAY);
    Core.pow(wrinklesArea, 1.09, wrinklesArea);
    imwrite("wrinklesArea.jpg", wrinklesArea);

    wrinklesArea = StaticFunctions.convolution(gradientMask, wrinklesArea);
    threshold(wrinklesArea, wrinklesArea, 110, 255, Imgproc.THRESH_BINARY);
    imwrite("wrinklesAreaGradiented.jpg", wrinklesArea);

    long wrinklesPoints = 0;
    for (int i = 0; i < wrinklesArea.width(); i++) {
        for (int j = 0; j < wrinklesArea.height(); j++) {
            if (wrinklesArea.get(j, i)[0] == 255) {
                wrinklesPoints++;
            }
        }
    }
    EyeRegion.wrinklesFactor = wrinklesPoints;
    //        System.out.println("Wrinkles factor: "+wrinklesPoints);
    if (wrinklesPoints >= wrinklesThreshold) {
        //            System.out.println("Expression wrinkles detected! Threshold exceeded");
        Imgproc.rectangle(EyeRegion._face, wrinklesRect.br(), wrinklesRect.tl(), new Scalar(0, 50, 205));
    }
}

From source file:facedetection.FaceDetector.java

public void findFaces() {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    CascadeClassifier faceDetector = new CascadeClassifier(
            "D:\\opencv\\sources\\data\\lbpcascades\\lbpcascade_frontalface.xml");
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(img, faceDetections);

    System.out.println(faceDetections);

    for (Rect rect : faceDetections.toArray()) {
        faceList.add(rect);//  w  w  w.  j a v a 2 s  .  c o m
        Imgproc.rectangle(img, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height),
                new Scalar(0, 255, 0));
    }
}

From source file:facerecognition.sample1.java

private static Rect find_enclosing_rectangle(double[][] puntos, File image_file) {

    Mat image = Imgcodecs.imread(image_file.getAbsolutePath());
    int i = 0;/*from   w ww .  j  a  va2  s.co m*/
    Mat img2 = image.clone();
    for (CascadeClassifier faceDetector : faceDetectors) {

        // Detect faces in the image.
        // MatOfRect is a special container class for Rect.
        MatOfRect faceDetections = new MatOfRect();
        faceDetector.detectMultiScale(image, faceDetections);

        System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));

        // Draw a bounding box around each face.
        //            double percent = 0.4;
        for (Rect rect : faceDetections.toArray()) {
            Rect piv = rect.clone();
            //  falta expandir
            int h = piv.height, w = piv.width;
            piv.x -= w * percent / 2;
            piv.y -= h * percent / 2;
            piv.height *= (1 + percent);
            piv.width *= (1 + percent);

            //            Mat croped = new Mat(image, rect);
            //             Imgcodecs.imwrite("face"+(++i)+".png", croped);
            Imgproc.rectangle(img2, new Point(rect.x, rect.y),
                    new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0));

            int r = 10;
            boolean dentro = true;
            for (double[] punto : puntos) {
                //                    Imgproc.circle(img2, new Point(rect.x, rect.y), r, new Scalar(0, 255, 0));

                if (piv.contains(new Point(punto)) == false) {
                    dentro = false;
                    //                        break;
                }
            }
            if (dentro) {
                //                    Imgcodecs.imwrite(urlHelen + "\\face" + (Math.random()) + ".png", img2);
                return piv;
            }
        }

    }
    //        Imgcodecs.imwrite( urlHelen + "\\face"+(Math.random())+".png", img2);

    return null;
}

From source file:facerecognition.sample1.java

private static void draw_initial_points() {
    //        PrintWriter pw = null;
    //        try {
    faceDetectors = new CascadeClassifier[] { new CascadeClassifier("haarcascade_frontalface_alt_tree.xml"),
            new CascadeClassifier("haarcascade_frontalface_alt2.xml"),
            new CascadeClassifier("haarcascade_profileface.xml") };
    File[] image_files = get_images();
    int index = 0;
    int contador = 0;
    //            File resumen = new File(urlHelen + "\\summary.sum");
    //            pw = new PrintWriter(resumen);

    double[][] mask = leer_mask();

    for (File image_file : image_files) {
        System.out.println("Analizando imagen " + (++index) + " de " + image_files.length);
        //            BufferedImage img = convert_to_BufferedImage(image_file);
        //                File puntos_file = get_puntos_file(image_file);
        //                double[][] puntos = LWF.leerpuntos(puntos_file);

        Mat image = Imgcodecs.imread(image_file.getAbsolutePath());
        Mat img2 = image.clone();/* w  w w  . j  a v a  2s.c o  m*/

        for (CascadeClassifier faceDetector : faceDetectors) {

            // Detect faces in the image.
            // MatOfRect is a special container class for Rect.
            MatOfRect faceDetections = new MatOfRect();
            faceDetector.detectMultiScale(image, faceDetections);

            System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));

            // Draw a bounding box around each face.
            for (Rect rect : faceDetections.toArray()) {
                Rect piv = rect.clone();
                //  falta expandir
                int h = piv.height, w = piv.width;
                piv.x -= w * percent / 2;
                piv.y -= h * percent / 2;
                piv.height *= (1 + percent);
                piv.width *= (1 + percent);

                //            Mat croped = new Mat(image, rect);
                //             Imgcodecs.imwrite("face"+(++i)+".png", croped);
                Imgproc.rectangle(img2, new Point(piv.x, piv.y),
                        new Point(piv.x + piv.width, piv.y + piv.height), new Scalar(0, 255, 0));

                for (double[] punto : mask) {
                    Imgproc.circle(img2, new Point(piv.x + piv.width * punto[0], piv.y + piv.height * punto[1]),
                            5, new Scalar(0, 255, 0));
                }
            }

        }
        //            pw.close();
        Imgcodecs.imwrite(urlHelen + "\\face" + (Math.random()) + ".png", img2);

    }

}

From source file:io.github.jakejmattson.facialrecognition.FacialRecognition.java

License:Open Source License

private static Mat detectFaces(Mat image, CascadeClassifier faceDetector, ImageFrame frame) {
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);
    Rect[] faces = faceDetections.toArray();
    boolean shouldSave = frame.shouldSave();
    String name = frame.getFileName();
    Scalar color = frame.getTextColor();

    for (Rect face : faces) {
        Mat croppedImage = new Mat(image, face);

        if (shouldSave)
            saveImage(croppedImage, name);

        Imgproc.putText(image, "ID: " + identifyFace(croppedImage), face.tl(), Font.BOLD, 1.5, color);
        Imgproc.rectangle(image, face.tl(), face.br(), color);
    }/*from w  w  w. j  a v a2 s.c o  m*/

    int faceCount = faces.length;
    String message = faceCount + (faceCount == 1 ? "face" : "faces") + " detected!";
    Imgproc.putText(image, message, new Point(3, 25), Font.BOLD, 2, color);

    return image;
}

From source file:objectdetection.ObjectDetector.java

public void findObjects() {

    preProcessImg();//from ww  w  .j a v  a2  s . com
    Imgproc.findContours(imgCanny, contours, imgCanny, RETR_EXTERNAL, CHAIN_APPROX_SIMPLE);

    for (MatOfPoint mop : contours) {
        MatOfPoint2f m2p;
        m2p = new MatOfPoint2f(mop.toArray());
        Double peri = Imgproc.arcLength(m2p, true);
        Imgproc.approxPolyDP(m2p, m2p, 0.02 * peri, true);
        //Imgproc.drawContours(img, contours, -1, new Scalar(0, 0, 255), 2);

        float area = img.width() * img.height();
        Rect rect = Imgproc.boundingRect(mop);
        objList.add(rect);
        //if (rect.height * rect.width > area*5/100) {
        Imgproc.rectangle(img, rect.tl(), rect.br(), new Scalar(255, 0, 0));
        //}
    }
    Collections.sort(objList, new Comparator<Rect>() {
        @Override
        public int compare(Rect r1, Rect r2) {
            return (int) (r2.area() - r1.area());
        }

    });

    List<Rect> arr = objList;

    Rect bigRect = arr.get(0);
    Rect bigRect2 = arr.get(1);

    while (!equals(bigRect, bigRect2)) {
        bigRect2 = bigRect;
        for (int i = 1; i < arr.size(); ++i) {
            if (doOverlap(bigRect, arr.get(i))) {
                bigRect = union(bigRect, arr.get(i));
                arr.remove(i);
                break;
            }
        }

    }

    Imgproc.rectangle(img, bigRect.tl(), bigRect.br(), new Scalar(255, 255, 0));
    mainRect = bigRect;
}

From source file:opencv.CamCapture.java

private void searchForMovement(Mat thresholdImage, Mat frame) {
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Mat hierarchy = new Mat();
    Imgproc.findContours(thresholdImage, contours, hierarchy, Imgproc.RETR_EXTERNAL,
            Imgproc.CHAIN_APPROX_SIMPLE);

    Rect objectBoundingRectangle = new Rect(0, 0, 0, 0);
    for (int i = 0; i < contours.size(); i++) {
        objectBoundingRectangle = Imgproc.boundingRect(contours.get(i));
        if (objectBoundingRectangle.area() > 500)
            Imgproc.rectangle(frame, objectBoundingRectangle.tl(), objectBoundingRectangle.br(),
                    new Scalar(0, 255, 0));
    }//from   w  w w  .  jav a 2s .  c  o m
    ImageIcon i2 = new ImageIcon(Mat2bufferedImage(frame));
    jLabel5.setIcon(i2);

}

From source file:org.technowolves.vision.TowerTracker.java

License:Open Source License

/**
 * /*w  w  w  .  j a  v  a  2  s  .  c  o  m*/
 * reads an image from a live image capture and outputs information to the SmartDashboard or a file
 */
public static void processImage() {
    ArrayList<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    double x, y, targetX, targetY, distance, azimuth;
    // frame counter
    int FrameCount = 0;
    long before = System.currentTimeMillis();
    // only run for the specified time
    while (FrameCount < 100) {
        contours.clear();
        // capture from the axis camera
        videoCapture.read(matOriginal);
        // captures from a static file for testing
        // matOriginal = Imgcodecs.imread("someFile.png");
        Imgproc.cvtColor(matOriginal, matHSV, Imgproc.COLOR_BGR2HSV);
        Core.inRange(matHSV, LOWER_BOUNDS, UPPER_BOUNDS, matThresh);
        Imgproc.findContours(matThresh, contours, matHeirarchy, Imgproc.RETR_EXTERNAL,
                Imgproc.CHAIN_APPROX_SIMPLE);
        // make sure the contours that are detected are at least 20x20 
        // pixels with an area of 400 and an aspect ration greater then 1
        for (Iterator<MatOfPoint> iterator = contours.iterator(); iterator.hasNext();) {
            MatOfPoint matOfPoint = (MatOfPoint) iterator.next();
            Rect rec = Imgproc.boundingRect(matOfPoint);
            if (rec.height < 25 || rec.width < 25) {
                iterator.remove();
                continue;
            }
            float aspect = (float) rec.width / (float) rec.height;
            if (aspect < 1.0)
                iterator.remove();
        }
        for (MatOfPoint mop : contours) {
            Rect rec = Imgproc.boundingRect(mop);
            Imgproc.rectangle(matOriginal, rec.br(), rec.tl(), BLACK);
        }
        // if there is only 1 target, then we have found the target we want
        if (contours.size() == 1) {
            Rect rec = Imgproc.boundingRect(contours.get(0));
            // "fun" math brought to you by miss daisy (team 341)!
            y = rec.br().y + rec.height / 2;
            y = -((2 * (y / matOriginal.height())) - 1);
            distance = (TOP_TARGET_HEIGHT - TOP_CAMERA_HEIGHT)
                    / Math.tan((y * VERTICAL_FOV / 2.0 + CAMERA_ANGLE) * Math.PI / 180);
            // angle to target...would not rely on this
            targetX = rec.tl().x + rec.width / 2;
            targetX = (2 * (targetX / matOriginal.width())) - 1;
            azimuth = normalize360(targetX * HORIZONTAL_FOV / 2.0 + 0);
            // drawing info on target
            Point center = new Point(rec.br().x - rec.width / 2 - 15, rec.br().y - rec.height / 2);
            Point centerw = new Point(rec.br().x - rec.width / 2 - 15, rec.br().y - rec.height / 2 - 20);
            Imgproc.putText(matOriginal, "" + (int) distance, center, Core.FONT_HERSHEY_PLAIN, 1, BLACK);
            Imgproc.putText(matOriginal, "" + (int) azimuth, centerw, Core.FONT_HERSHEY_PLAIN, 1, BLACK);
        }
        // output an image for debugging
        Imgcodecs.imwrite("output.png", matOriginal);
        FrameCount++;
    }
    shouldRun = false;
}