Example usage for org.opencv.objdetect CascadeClassifier detectMultiScale

List of usage examples for org.opencv.objdetect CascadeClassifier detectMultiScale

Introduction

In this page you can find the example usage for org.opencv.objdetect CascadeClassifier detectMultiScale.

Prototype

public void detectMultiScale(Mat image, MatOfRect objects) 

Source Link

Usage

From source file:Fiji_OpenCV.java

License:Creative Commons License

public void process(int[] pixels) {
    int channels = 3;
    byte[] buf = new byte[pixels.length * channels];
    for (int i = 0; i < pixels.length; i++) {
        buf[i * channels] = (byte) (0x000000ff & (pixels[i]));
        buf[i * channels + 1] = (byte) (0x000000ff & (pixels[i] >>> 8));
        buf[i * channels + 2] = (byte) (0x000000ff & (pixels[i] >>> 16));
    }//from www . j  a  va 2  s  .  co m

    Mat image = new Mat(width, height, CvType.CV_8UC3);
    image.put(0, 0, buf);

    // Create a face detector from the cascade file in the resources
    // directory.
    CascadeClassifier faceDetector = new CascadeClassifier(
            getClass().getResource("/opencv/data/haarcascades/haarcascade_frontalface_alt2.xml").getPath());

    // Detect faces in the image.
    // MatOfRect is a special container class for Rect.
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);

    System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));

    // Draw a bounding box around each face.
    for (Rect rect : faceDetections.toArray()) {
        Core.rectangle(image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height),
                new Scalar(0, 255, 0));
    }

    image.get(0, 0, buf);
    for (int i = 0; i < pixels.length; i++) {
        pixels[i] = 0x80000000 + ((int) (buf[i * channels + 2]) << 16) + ((int) (buf[i * channels + 1]) << 8)
                + ((int) (buf[i * channels + 0]));
    }
    this.ip = new ColorProcessor(width, height, pixels);
}

From source file:M.java

/**
 * Resize the certain image to required size (WIDTH*HEIGHT).
 *
 * @param imgPath the path of the image.
 * @return the path of the resized image.
 * @throws Exception/*from  ww  w.jav  a 2  s. c o  m*/
 */
public static String resize(String imgPath) throws Exception {
    System.out.println("\nRunning DetectFaceDemo");
    String xmlfilePath = FaceDetector.class.getResource("haarcascade_frontalface_alt.xml").getPath()
            .substring(1);
    System.out.println(xmlfilePath);//test
    CascadeClassifier faceDetector = new CascadeClassifier(xmlfilePath);
    //        String imgPath=FaceDetector.class.getResource("cam_img/test.jpg").getPath().substring(1);
    Mat image = Highgui.imread(imgPath);
    System.out.println(imgPath);
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);
    System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));
    int count = 1;
    String dir = "";
    for (Rect rect : faceDetections.toArray()) {
        ImageFilter cropFilter = new CropImageFilter(rect.x, rect.y, rect.width, rect.height);
        BufferedImage tag = new BufferedImage(rect.width, rect.height, BufferedImage.TYPE_INT_RGB);
        //            File file = new File("build\\classes\\cam_img\\test.jpg");
        File file = new File(imgPath);
        BufferedImage src = ImageIO.read(file);
        Image img = Toolkit.getDefaultToolkit()
                .createImage(new FilteredImageSource(src.getSource(), cropFilter));
        BufferedImage output = new BufferedImage(WIDTH, HEIGHT, BufferedImage.TYPE_INT_RGB);
        Graphics g = output.getGraphics();
        g.drawImage(img, 0, 0, WIDTH, HEIGHT, null);
        g.dispose();
        dir = "img_resized\\cut_image.jpg";
        //            String dir = "trainset\\57-tx\\57-"+(count++)+".jpg";
        File dest = new File(dir);
        ImageIO.write(output, "JPEG", dest);
    }
    return dir;
}

From source file:M.java

public static String resize(String imgPath, String andrewId, int trainImageCount) throws Exception {
    System.out.println("\nRunning DetectFaceDemo");
    String xmlfilePath = FaceDetector.class.getResource("haarcascade_frontalface_alt.xml").getPath()
            .substring(1);/*from w ww .  ja v  a2 s  .  c o  m*/
    System.out.println(xmlfilePath);//test
    CascadeClassifier faceDetector = new CascadeClassifier(xmlfilePath);
    //        String imgPath=FaceDetector.class.getResource("cam_img/test.jpg").getPath().substring(1);
    Mat image = Highgui.imread(imgPath);
    System.out.println(imgPath);
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);
    System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));
    int count = 1;
    String dir = "";
    for (Rect rect : faceDetections.toArray()) {
        ImageFilter cropFilter = new CropImageFilter(rect.x, rect.y, rect.width, rect.height);
        BufferedImage tag = new BufferedImage(rect.width, rect.height, BufferedImage.TYPE_INT_RGB);
        //            File file = new File("build\\classes\\cam_img\\test.jpg");
        File file = new File(imgPath);
        BufferedImage src = ImageIO.read(file);
        Image img = Toolkit.getDefaultToolkit()
                .createImage(new FilteredImageSource(src.getSource(), cropFilter));
        BufferedImage output = new BufferedImage(WIDTH, HEIGHT, BufferedImage.TYPE_INT_RGB);
        Graphics g = output.getGraphics();
        g.drawImage(img, 0, 0, WIDTH, HEIGHT, null);
        g.dispose();
        int st_no = findLabel("photodb_resized\\", andrewId);
        dir = "photodb_resized\\" + st_no + "-" + andrewId + trainImageCount + ".jpg";
        //            String dir = "trainset\\57-tx\\57-"+(count++)+".jpg";
        File dest = new File(dir);
        ImageIO.write(output, "JPEG", dest);
    }
    return dir;
}

From source file:attendance_system_adder.cv.image.java

public Mat getFaceDetec(Mat image) {
    Mat face = null;/*from w  w  w  .  j  a  v  a2 s  . c o m*/

    System.out.println("\nRunning DetectFaceDemo");

    CascadeClassifier faceDetector = new CascadeClassifier(".\\resource\\haarcascade_frontalface_default.xml");

    // Detect faces in the image.
    // MatOfRect is a special container class for Rect.
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);

    //    System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));
    //    // Draw a bounding box around each face.
    for (Rect rect : faceDetections.toArray()) {
        Imgproc.rectangle(image, new Point(rect.x, rect.y),
                new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(255, 0, 0));
        face = new Mat(image, rect);
    }
    //    // Save the visualized detection.
    //    String filename = "faceDetection.png";
    //    System.out.println(String.format("Writing %s", filename));
    //    imwrite(filename, image);
    //FaceRecognizer fr;//= new LBPHFaceRecognizer();
    return face;
}

From source file:attendance_system_adder.cv.image.java

public Mat FaceDetec(Mat image) {
    Mat face = null;//from   ww  w . j av  a2s.  c o m

    System.out.println("\nRunning DetectFaceDemo");

    CascadeClassifier faceDetector = new CascadeClassifier(".\\resource\\haarcascade_frontalface_default.xml");

    // Detect faces in the image.
    // MatOfRect is a special container class for Rect.
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);

    //    System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));
    //    // Draw a bounding box around each face.
    for (Rect rect : faceDetections.toArray()) {
        Imgproc.rectangle(image, new Point(rect.x, rect.y),
                new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(255, 0, 0));

    }
    return image;
}

From source file:br.com.prj.TelaPrincipal.java

private void btnProcurarActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnProcurarActionPerformed

    // remove todos os labels do componente
    jPanel1.removeAll();//  w w  w.  ja  v  a 2 s.  co m

    // reposicionao o primeiro label
    boundX = 12;
    boundY = 22;

    CascadeClassifier faceDetector = new CascadeClassifier(URL_LIB_FACE);

    imagemCarregada = Imgcodecs.imread(selectedFile.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
    // imagem com retangulo dos rostos encontrados
    imagemDest = new Mat(imagemCarregada.rows(), imagemCarregada.cols(), imagemCarregada.type());
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(imagemCarregada, faceDetections);

    // tentar verificar se o rect encontrado possui olhos
    Rect[] faceEncontrada = new Rect[faceDetections.toArray().length];
    int i = 0;
    for (Rect rect : faceDetections.toArray()) {

        faceEncontrada[i] = new Rect(new Point(rect.x - PAD_LATERAL + 5, rect.y - PAD_SUPERIOR + 5),
                new Point(rect.x + rect.width + PAD_LATERAL, (rect.y + rect.height + PAD_SUPERIOR) - 5));

        adicionarLabel(convertMatToImage(new Mat(imagemCarregada, faceEncontrada[i])), faceEncontrada[i]);

        //            ADICIONA RETANGULO DO ROSTO NA IMAGEM  
        //            Imgproc.rectangle(imagemDest,
        //                    new Point(rect.x - PAD_LATERAL, rect.y - PAD_SUPERIOR),
        //                    new Point(rect.x + rect.width + PAD_LATERAL, (rect.y + rect.height + PAD_SUPERIOR) - 5),
        //                    new Scalar(0, 255, 0));
        i++;
    }

    if (faceDetections.toArray().length == 0) {
        totalRostos.setText("No foi possvel identificar nenhum rosto na imagem selecionada.");
    } else {
        totalRostos
                .setText("Identificamos " + faceDetections.toArray().length + " rosto(s) na imagem carregada.");
    }

}

From source file:by.zuyeu.deyestracker.core.detection.detector.BaseDetector.java

protected Rect[] detectWithClassifier(final Mat inputframe, final CascadeClassifier classifier) {
    LOG.debug("detectWithClassifier - start;");

    final Mat mRgba = new Mat();
    final Mat mGrey = new Mat();
    final MatOfRect detectedObjects = new MatOfRect();
    inputframe.copyTo(mRgba);//from   www . jav a  2  s . co m
    inputframe.copyTo(mGrey);
    Imgproc.cvtColor(mRgba, mGrey, Imgproc.COLOR_BGR2GRAY);
    Imgproc.equalizeHist(mGrey, mGrey);
    classifier.detectMultiScale(mGrey, detectedObjects);

    LOG.debug("detectWithClassifier - end;");
    return detectedObjects.toArray();
}

From source file:car_counter.counting.opencv.OpencvCarCounter.java

License:Apache License

@Override
public Collection<DetectedVehicle> processVideo(Path video, DateTime startDateTime) {
    CascadeClassifier carDetector = new CascadeClassifier("/Users/luke/working/car-counter/data/cars3.xml");

    VideoCapture videoCapture = new VideoCapture();
    videoCapture.open("/Users/luke/working/car-counter/data/video1.m4v");

    int index = 0;

    while (true) {
        if (!videoCapture.read(image)) {
            break;
        }//from   w w  w.j  a v a  2 s .  co m

        System.out.print(".");

        //processFrame();

        MatOfRect carDetections = new MatOfRect();
        carDetector.detectMultiScale(image, carDetections);

        System.out.println(String.format("Detected %s cars", carDetections.toArray().length));

        // Draw a bounding box around each hit
        for (Rect rect : carDetections.toArray()) {
            Core.rectangle(image, new Point(rect.x, rect.y),
                    new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0));
        }

        String file = String.format("/Users/luke/working/car-counter/data/out/out-%03d.jpg", index++);
        org.opencv.highgui.Highgui.imwrite(file, image);
    }

    return null;
}

From source file:classes.FaceDetector.java

public static String detectFace(String filePath) {

    //        String dirName = "C:/Users/ggm/Documents/NetBeansProjects/MyWebApplication";
    String dirName = "C:/Users/Gonzalo/Documents/NetBeansProjects/MyWebApplication";
    //        String dirName = "/Users/ggmendez/Development/MyWebApplication";

    System.out.println(dirName);//ww w .j  av a2  s. c  o  m

    String frontalfaceFile = dirName + "/data/lbpcascades/lbpcascade_frontalface.xml";

    System.out.println(frontalfaceFile);

    CascadeClassifier faceDetector = new CascadeClassifier(frontalfaceFile);

    Mat image = Highgui.imread(filePath);

    System.out.println(image);

    // Detect faces in the image 
    // MatOfRect is a special container class for Rect.
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);

    System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));

    // Draw a bounding box around each face.
    for (Rect rect : faceDetections.toArray()) {
        Core.rectangle(image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height),
                new Scalar(0, 255, 0));
    }

    // Save the visualized detection.            
    Date date = new Date();
    Format formatter = new SimpleDateFormat("YYYY-MM-dd_hh-mm-ss");
    String filename = dirName + "/imgs/out_" + formatter.format(date) + ".png";

    System.out.println(String.format("Writing %s", filename));
    Highgui.imwrite(filename, image);

    Gson gson = new Gson();
    String jsonResponse = gson.toJson(faceDetections.toArray());
    jsonResponse = jsonResponse.replaceAll("x", "left").replaceAll("y", "top");

    return jsonResponse;

}

From source file:com.jeremydyer.nifi.ObjectDetectionProcessor.java

License:Apache License

final public Mat detectObjects(final ProcessSession session, FlowFile original, final JSONObject dd,
        final Mat image) {

    CascadeClassifier objectDetector = new CascadeClassifier(dd.getString("opencv_xml_cascade_path"));
    MatOfRect objectDetections = new MatOfRect();
    objectDetector.detectMultiScale(image, objectDetections);
    //getLogger().error("Detected " + objectDetections.toArray().length + " " + dd.getString("name") + " objects in the input flowfile");

    final AtomicReference<Mat> croppedImageReference = new AtomicReference<>();

    int counter = 0;
    for (int i = 0; i < objectDetections.toArray().length; i++) {
        final Rect rect = objectDetections.toArray()[i];
        FlowFile detection = session.write(session.create(original), new OutputStreamCallback() {
            @Override/*from  w  ww.ja  v a  2s  . c o m*/
            public void process(OutputStream outputStream) throws IOException {

                Mat croppedImage = null;

                //Should the image be cropped? If so there is no need to draw bounds because that would be the same as the cropping
                if (dd.getBoolean("crop")) {
                    Rect rectCrop = new Rect(rect.x, rect.y, rect.width, rect.height);
                    croppedImage = new Mat(image, rectCrop);
                    MatOfByte updatedImage = new MatOfByte();
                    Imgcodecs.imencode(".jpg", croppedImage, updatedImage);
                    croppedImageReference.set(croppedImage);
                    outputStream.write(updatedImage.toArray());
                } else {
                    //Should the image have a border drawn around it?
                    if (dd.getBoolean("drawBounds")) {
                        Mat imageWithBorder = image.clone();
                        Imgproc.rectangle(imageWithBorder, new Point(rect.x, rect.y),
                                new Point(rect.x + rect.width, rect.y + rect.height),
                                new Scalar(255, 255, 255));
                        MatOfByte updatedImage = new MatOfByte();
                        Imgcodecs.imencode(".jpg", imageWithBorder, updatedImage);
                        outputStream.write(updatedImage.toArray());
                    } else {
                        MatOfByte updatedImage = new MatOfByte();
                        Imgcodecs.imencode(".jpg", image, updatedImage);
                        outputStream.write(updatedImage.toArray());
                    }
                }

            }
        });

        Map<String, String> atts = new HashMap<>();
        atts.put("object.detection.name", dd.getString("name"));
        atts.put("object.detection.id", new Long(System.currentTimeMillis() + counter).toString());

        counter++;

        detection = session.putAllAttributes(detection, atts);
        session.transfer(detection, REL_OBJECT_DETECTED);
    }

    Mat childResponse = null;

    if (croppedImageReference.get() != null) {
        childResponse = croppedImageReference.get();
    } else {
        childResponse = image;
    }

    if (dd.has("children")) {
        JSONArray children = dd.getJSONArray("children");
        if (children != null) {

            for (int i = 0; i < children.length(); i++) {
                JSONObject ddd = children.getJSONObject(i);
                childResponse = detectObjects(session, original, ddd, childResponse);
            }
        }
    }

    return childResponse;
}