Example usage for org.opencv.imgproc Imgproc rectangle

List of usage examples for org.opencv.imgproc Imgproc rectangle

Introduction

In this page you can find the example usage for org.opencv.imgproc Imgproc rectangle.

Prototype

public static void rectangle(Mat img, Point pt1, Point pt2, Scalar color) 

Source Link

Usage

From source file:attendance_system_adder.cv.image.java

public Mat getFaceDetec(Mat image) {
    Mat face = null;/*  w w  w.j  a v  a  2 s .  com*/

    System.out.println("\nRunning DetectFaceDemo");

    CascadeClassifier faceDetector = new CascadeClassifier(".\\resource\\haarcascade_frontalface_default.xml");

    // Detect faces in the image.
    // MatOfRect is a special container class for Rect.
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);

    //    System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));
    //    // Draw a bounding box around each face.
    for (Rect rect : faceDetections.toArray()) {
        Imgproc.rectangle(image, new Point(rect.x, rect.y),
                new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(255, 0, 0));
        face = new Mat(image, rect);
    }
    //    // Save the visualized detection.
    //    String filename = "faceDetection.png";
    //    System.out.println(String.format("Writing %s", filename));
    //    imwrite(filename, image);
    //FaceRecognizer fr;//= new LBPHFaceRecognizer();
    return face;
}

From source file:attendance_system_adder.cv.image.java

public Mat FaceDetec(Mat image) {
    Mat face = null;/*ww w. j  a va  2  s . c  om*/

    System.out.println("\nRunning DetectFaceDemo");

    CascadeClassifier faceDetector = new CascadeClassifier(".\\resource\\haarcascade_frontalface_default.xml");

    // Detect faces in the image.
    // MatOfRect is a special container class for Rect.
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);

    //    System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));
    //    // Draw a bounding box around each face.
    for (Rect rect : faceDetections.toArray()) {
        Imgproc.rectangle(image, new Point(rect.x, rect.y),
                new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(255, 0, 0));

    }
    return image;
}

From source file:com.ibm.streamsx.edgevideo.device.AbstractFaceDetectApp.java

License:Open Source License

protected void renderImages(Mat rgbFrame, MatOfRect faceRects, List<Mat> faces) {
    // draw rectangles around the detected faces and render
    Rect[] rectArray = faceRects.toArray();
    for (Rect faceRect : rectArray) {
        Imgproc.rectangle(rgbFrame, new Point(faceRect.x, faceRect.y),
                new Point(faceRect.x + faceRect.width, faceRect.y + faceRect.height), new Scalar(0, 255, 0));
    }// w  ww .ja v a 2 s.c  o m
    faceDetectPanel.matToBufferedImage(rgbFrame);
    faceDetectPanel.repaint();

    // render the detected faces
    if (renderDetections) {
        detectedFacesPanel.clear();
        for (Mat face : faces) {
            // TODO handle rendering multiple detections / images in the panel 
            detectedFacesPanel.matToBufferedImage(face);
        }
        detectedFacesPanel.repaint();
    }
}

From source file:com.ibm.streamsx.edgevideo.device.wipRecognition.WIP_NonEdgentFaceDetectApp.java

License:Open Source License

protected void renderImages(Mat rgbFrame, MatOfRect faceRects, List<Mat> faces, List<Prediction> predictions) {
    // draw rectangles around the detected faces and render
    Rect[] rectArray = faceRects.toArray();
    for (Rect faceRect : rectArray) {
        Imgproc.rectangle(rgbFrame, new Point(faceRect.x, faceRect.y),
                new Point(faceRect.x + faceRect.width, faceRect.y + faceRect.height), new Scalar(0, 255, 0));
    }/*from  w ww  . j  a  v  a  2s.co  m*/

    // TODO add recognition prediction info label to image

    faceDetectPanel.matToBufferedImage(rgbFrame);
    faceDetectPanel.repaint();

    // render the detected faces
    if (renderDetections) {
        detectedFacesPanel.clear();
        for (Mat face : faces) {
            // TODO handle rendering multiple detections / images in the panel 
            detectedFacesPanel.matToBufferedImage(face);
        }
        detectedFacesPanel.repaint();
    }
}

From source file:com.jeremydyer.nifi.ObjectDetectionProcessor.java

License:Apache License

final public Mat detectObjects(final ProcessSession session, FlowFile original, final JSONObject dd,
        final Mat image) {

    CascadeClassifier objectDetector = new CascadeClassifier(dd.getString("opencv_xml_cascade_path"));
    MatOfRect objectDetections = new MatOfRect();
    objectDetector.detectMultiScale(image, objectDetections);
    //getLogger().error("Detected " + objectDetections.toArray().length + " " + dd.getString("name") + " objects in the input flowfile");

    final AtomicReference<Mat> croppedImageReference = new AtomicReference<>();

    int counter = 0;
    for (int i = 0; i < objectDetections.toArray().length; i++) {
        final Rect rect = objectDetections.toArray()[i];
        FlowFile detection = session.write(session.create(original), new OutputStreamCallback() {
            @Override//from   www .  j a  va2  s.  co  m
            public void process(OutputStream outputStream) throws IOException {

                Mat croppedImage = null;

                //Should the image be cropped? If so there is no need to draw bounds because that would be the same as the cropping
                if (dd.getBoolean("crop")) {
                    Rect rectCrop = new Rect(rect.x, rect.y, rect.width, rect.height);
                    croppedImage = new Mat(image, rectCrop);
                    MatOfByte updatedImage = new MatOfByte();
                    Imgcodecs.imencode(".jpg", croppedImage, updatedImage);
                    croppedImageReference.set(croppedImage);
                    outputStream.write(updatedImage.toArray());
                } else {
                    //Should the image have a border drawn around it?
                    if (dd.getBoolean("drawBounds")) {
                        Mat imageWithBorder = image.clone();
                        Imgproc.rectangle(imageWithBorder, new Point(rect.x, rect.y),
                                new Point(rect.x + rect.width, rect.y + rect.height),
                                new Scalar(255, 255, 255));
                        MatOfByte updatedImage = new MatOfByte();
                        Imgcodecs.imencode(".jpg", imageWithBorder, updatedImage);
                        outputStream.write(updatedImage.toArray());
                    } else {
                        MatOfByte updatedImage = new MatOfByte();
                        Imgcodecs.imencode(".jpg", image, updatedImage);
                        outputStream.write(updatedImage.toArray());
                    }
                }

            }
        });

        Map<String, String> atts = new HashMap<>();
        atts.put("object.detection.name", dd.getString("name"));
        atts.put("object.detection.id", new Long(System.currentTimeMillis() + counter).toString());

        counter++;

        detection = session.putAllAttributes(detection, atts);
        session.transfer(detection, REL_OBJECT_DETECTED);
    }

    Mat childResponse = null;

    if (croppedImageReference.get() != null) {
        childResponse = croppedImageReference.get();
    } else {
        childResponse = image;
    }

    if (dd.has("children")) {
        JSONArray children = dd.getJSONArray("children");
        if (children != null) {

            for (int i = 0; i < children.length(); i++) {
                JSONObject ddd = children.getJSONObject(i);
                childResponse = detectObjects(session, original, ddd, childResponse);
            }
        }
    }

    return childResponse;
}

From source file:com.mycompany.facedetection.FaceDetector.java

public void findFaces() {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    CascadeClassifier faceDetector = new CascadeClassifier(
            "D:\\opencv\\sources\\data\\lbpcascades\\lbpcascade_frontalface.xml");
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(img, faceDetections);
    for (Rect rect : faceDetections.toArray()) {
        faceList.add(rect);/*  w  w  w.j  av  a 2  s  .co m*/
        Imgproc.rectangle(img, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height),
                new Scalar(0, 255, 0));
    }
}

From source file:com.mycompany.objectdetection.ObjectDetector.java

public void findObjects() {

    //        Imgproc.cvtColor(img, imgGrayscale, Imgproc.COLOR_RGBA2GRAY, 1); 
    //        Core.convertScaleAbs(img, imgGrayscale);
    //        Core.normalize(imgGrayscale, imgMeanShifted, 0.0, 1.0, NORM_MINMAX);
    preProcessImg();/*from ww  w .  j  a  v  a  2  s .  c  om*/

    toGrayScale(imgMeanShifted);
    detectEdges(imgGrayscale);
    Imgproc.findContours(imgCanny, contours, imgCanny, RETR_EXTERNAL, CHAIN_APPROX_SIMPLE);
    objList = new ArrayList();

    for (MatOfPoint mop : contours) {
        MatOfPoint2f m2p;
        m2p = new MatOfPoint2f(mop.toArray());
        Double peri = Imgproc.arcLength(m2p, true);
        Imgproc.approxPolyDP(m2p, m2p, 0.02 * peri, true);
        Imgproc.drawContours(imgOut, contours, -1, new Scalar(0, 0, 255), 2);

        float area = img.width() * img.height();
        Rect rect = Imgproc.boundingRect(mop);
        objList.add(rect);
        Imgproc.rectangle(imgOut, rect.tl(), rect.br(), new Scalar(255, 0, 0));
    }

    Collections.sort(objList, new Comparator<Rect>() {
        @Override
        public int compare(Rect r1, Rect r2) {
            return (int) (r2.area() - r1.area());
        }

    });

    List<Rect> arr = objList;

    while (arr.size() > 0) {
        //System.out.println("---->" + arr);
        Rect bigRect = arr.get(0);
        arr.remove(0);
        Rect bigRect2 = new Rect();

        while (!equals(bigRect, bigRect2)) {
            bigRect2 = bigRect;
            for (int i = 0; i < arr.size(); ++i) {
                // System.out.println("elotte"+arr.get(i));
                if (doOverlap(bigRect, arr.get(i))) {
                    //System.out.println("utana"+arr.get(i));
                    bigRect = union(bigRect, arr.get(i));
                    arr.remove(i);
                    break;
                }
            }

        }

        mainRect = bigRect;

        if (objList.size() > 5 && mainRect.area() >= img.width() * img.height() * 3 / 100) {
            Imgproc.rectangle(imgOut, bigRect.tl(), bigRect.br(), new Scalar(255, 255, 0));
            mainObjects.add(mainRect);
        } else if (objList.size() <= 5) {
            mainObjects.add(mainRect);
        }
    }

}

From source file:com.raulh82vlc.face_detection_sample.opencv.render.FaceDrawerOpenCV.java

License:Apache License

public static void drawMatchedEye(Point matchLocTx, Point matchLocTy, Mat matrixRgba) {
    Imgproc.rectangle(matrixRgba, matchLocTx, matchLocTy, new Scalar(255, 255, 0, 255));
}

From source file:com.seleniumtests.util.imaging.ImageDetector.java

License:Apache License

/**
 * Detect the object inside the scene/*from w  w w . jav a2 s  . co m*/
 * We also search the scale of the scene from 20% to 120% scale by steps
 * steps are 10%, with 0.6 accuracy
 * then when a good match is found, we search around by 5% scale steps with 0.7 accuracy
 * then when a good match is found, we search around by 2.5% scale steps with 0.8 accuracy
 * 
 * example:
 * first pass: scales are: 200, 300, 400, 500, 600, 700, 800, 900, 1000, 1100, 1200
 *             good matches are found around 600 and 700
 * second pass: scales are 550, 600, 650, 700, 750
 *             good matches are found at 650
 * third pass: scales are 625, 650, 675
 * 
 * The best match is at 675
 */
public void detectExactZoneWithScale() {

    Mat sceneImageMat = Imgcodecs.imread(sceneImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE);
    Mat objectImageMat = Imgcodecs.imread(objectImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE);

    List<TemplateMatchProperties> matches = Collections.synchronizedList(new ArrayList<>());

    Map<Integer, Double> scaleSteps = new LinkedHashMap<>();
    scaleSteps.put(100, 0.6);
    scaleSteps.put(50, 0.7);
    scaleSteps.put(25, 0.8);

    int currentStep = 100;

    Set<Integer> computedScales = new HashSet<>();

    while (currentStep >= 25) {
        final double currentThreshold = scaleSteps.get(currentStep);

        // first loop
        Set<Integer> localScales = Collections.synchronizedSet(new HashSet<>());
        if (currentStep == 100) {
            for (int scale = 200; scale < 1200; scale += currentStep) {
                localScales.add(scale);
            }
        } else {
            if (matches.isEmpty()) {
                throw new ImageSearchException("no matches");
            }
            for (TemplateMatchProperties tmpM : matches) {
                if (tmpM.isActive()) {
                    localScales.add(tmpM.getMatchScale() - currentStep);
                    localScales.add(tmpM.getMatchScale() + currentStep);
                }
            }
        }

        ExecutorService executorService = Executors
                .newFixedThreadPool(Runtime.getRuntime().availableProcessors());
        for (int scale : localScales) {
            if (computedScales.contains(scale)) {
                continue;
            }
            computedScales.add(scale);

            // resize to scale factor
            final int localScale = scale;
            Size sz = new Size(sceneImageMat.cols() * scale / 1000.0,
                    sceneImageMat.rows() * localScale / 1000.0);

            // skip if resized image is smaller than object
            if (sz.width < objectImageMat.cols() || sz.height < objectImageMat.rows()) {
                continue;
            }

            executorService.submit(() -> {

                Mat resizeSceneImageMat = new Mat();
                Imgproc.resize(sceneImageMat, resizeSceneImageMat, sz);

                try {
                    TemplateMatchProperties match = detectExactZone2(resizeSceneImageMat, objectImageMat,
                            localScale, currentThreshold);
                    matches.add(match);
                } catch (ImageSearchException e) {
                }

            });
        }

        executorService.shutdown();
        try {
            executorService.awaitTermination(10, TimeUnit.SECONDS);
        } catch (Exception e) {
            logger.info("Could not compute scale within 10 seconds", e);
        }

        // shortcut if we find a very good match
        double cleanThreshold = currentThreshold;
        matches.sort((TemplateMatchProperties t1,
                TemplateMatchProperties t2) -> -(t1.getMatchValue().compareTo(t2.getMatchValue())));
        if (!matches.isEmpty() && matches.get(0).getMatchValue() > 0.9) {
            cleanThreshold = 0.9;
            currentStep = Math.min(currentStep, 50);
        }
        currentStep = currentStep / 2;

        // clean matches from too low matching values
        for (TemplateMatchProperties t : matches) {
            if (t.getMatchValue() < cleanThreshold) {
                t.setActive(false);
            }
        }
    }

    // get the best match
    matches.sort((TemplateMatchProperties t1,
            TemplateMatchProperties t2) -> -(t1.getMatchValue().compareTo(t2.getMatchValue())));

    if (!matches.isEmpty()) {
        TemplateMatchProperties bestMatch = matches.get(0);
        if (bestMatch.getMatchValue() < 1 - detectionThreshold) {
            throw new ImageSearchException(
                    String.format("No match found for threshold %.2f, match found with value %.2f",
                            1 - detectionThreshold, bestMatch.getMatchValue()));
        }

        detectedRectangle = new Rectangle((int) (bestMatch.getMatchLoc().x / bestMatch.getDoubleScale()),
                (int) (bestMatch.getMatchLoc().y / bestMatch.getDoubleScale()),
                (int) (objectImageMat.rows() / bestMatch.getDoubleScale()),
                (int) (objectImageMat.cols() / bestMatch.getDoubleScale()));

        if (debug) {
            try {
                Imgproc.rectangle(sceneImageMat, new Point(detectedRectangle.x, detectedRectangle.y),
                        new Point(detectedRectangle.x + detectedRectangle.width,
                                detectedRectangle.y + detectedRectangle.height),
                        new Scalar(0, 255, 0));

                showResultingPicture(sceneImageMat);
            } catch (IOException e) {
            }
        }
        rotationAngle = 0;
        sizeRatio = detectedRectangle.width / (double) objectImageMat.cols();

    } else {
        throw new ImageSearchException("no matching has been found");
    }

}

From source file:com.trandi.opentld.TLDView.java

License:Apache License

private static void drawBox(Mat image, final Rect box, final Scalar colour) {
    if (box != null) {
        Imgproc.rectangle(image, box.tl(), box.br(), colour);
    }/*from w ww.  j av  a 2s  . c om*/
}