Example usage for org.opencv.core Scalar Scalar

List of usage examples for org.opencv.core Scalar Scalar

Introduction

In this page you can find the example usage for org.opencv.core Scalar Scalar.

Prototype

public Scalar(double v0, double v1, double v2) 

Source Link

Usage

From source file:com.github.mbillingr.correlationcheck.ImageProcessor.java

License:Open Source License

public List<Point> extractPoints() {
    Mat gray = new Mat();//work_width, work_height, CvType.CV_8UC1);
    Mat binary = new Mat();

    Mat kernel = Mat.ones(3, 3, CvType.CV_8UC1);

    debugreset();/*  w  ww.ja v  a  2 s.c  om*/

    Mat image = load_transformed();
    working_image = image.clone();
    debugsave(image, "source");

    Imgproc.cvtColor(image, gray, Imgproc.COLOR_RGB2GRAY);
    debugsave(gray, "grayscale");

    Imgproc.GaussianBlur(gray, gray, new Size(15, 15), 0);
    debugsave(gray, "blurred");

    //Imgproc.equalizeHist(gray, gray);
    //debugsave(gray, "equalized");

    Imgproc.adaptiveThreshold(gray, binary, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY_INV,
            129, 5);
    //Imgproc.threshold(gray, binary, 0, 255, Imgproc.THRESH_BINARY_INV + Imgproc.THRESH_OTSU);
    //Imgproc.threshold(gray, binary, 128, 255, Imgproc.THRESH_BINARY_INV);
    debugsave(binary, "binary");

    Imgproc.morphologyEx(binary, binary, Imgproc.MORPH_CLOSE, kernel);
    debugsave(binary, "closed");

    Imgproc.morphologyEx(binary, binary, Imgproc.MORPH_OPEN, kernel);
    debugsave(binary, "opened");

    List<MatOfPoint> contours = new ArrayList<>();
    Mat hierarchy = new Mat();
    Imgproc.findContours(binary, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE); // is binary is now changed
    Imgproc.drawContours(image, contours, -1, new Scalar(0, 0, 255), 3);
    debugsave(image, "contours");

    List<PointAndArea> points = new ArrayList<>();

    for (MatOfPoint cnt : contours) {
        MatOfPoint2f c2f = new MatOfPoint2f();
        c2f.fromArray(cnt.toArray());
        RotatedRect rr = Imgproc.minAreaRect(c2f);

        double area = Imgproc.contourArea(cnt);

        if (rr.size.width / rr.size.height < 3 && rr.size.height / rr.size.width < 3 && rr.size.width < 64
                && rr.size.height < 64 && area > 9 && area < 10000) {
            points.add(new PointAndArea((int) area, rr.center));
        }
    }

    List<Point> final_points = new ArrayList<>();

    Collections.sort(points);
    Collections.reverse(points);
    int prev = -1;
    for (PointAndArea p : points) {
        Log.i("area", Integer.toString(p.area));
        if (prev == -1 || p.area >= prev / 2) {
            prev = p.area;
            Imgproc.circle(image, p.point, 10, new Scalar(0, 255, 0), 5);
            final_points.add(new Point(1 - p.point.y / work_height, 1 - p.point.x / work_width));
        }
    }
    debugsave(image, "circles");

    return final_points;
}

From source file:com.github.rosjava_catkin_package_a.ARLocROS.ComputePose.java

License:Apache License

public boolean computePose(Mat rvec, Mat tvec, Mat image2) throws NyARException, FileNotFoundException {
    // convert image to NyAR style for processing
    final INyARRgbRaster imageRaster = NyARImageHelper.createFromMat(image2);

    // create new marker system configuration
    i_config = new NyARMarkerSystemConfig(i_param);
    markerSystemState = new NyARMarkerSystem(i_config);
    // Create wrapper that passes cam pictures to marker system
    cameraSensorWrapper = new NyARSensor(i_screen_size);
    ids = new int[markerPatterns.size()];
    patternmap = new HashMap<>();
    for (int i = 0; i < markerPatterns.size(); i++) {
        // create marker description from pattern file and add to marker
        // system
        ids[i] = markerSystemState.addARMarker(arCodes.get(i), 25, markerConfig.getMarkerSize());
        patternmap.put(ids[i], markerPatterns.get(i));
    }/*  ww w .  j a v a 2s. c  o m*/

    cameraSensorWrapper.update(imageRaster);
    markerSystemState.update(cameraSensorWrapper);

    // init 3D point list
    final List<Point3> points3dlist = new ArrayList<>();
    final List<Point> points2dlist = new ArrayList<>();

    for (final int id : ids) {
        // process only if this marker has been detected
        if (markerSystemState.isExistMarker(id) && markerSystemState.getConfidence(id) > 0.7) {
            // read and add 2D points
            final NyARIntPoint2d[] vertex2d = markerSystemState.getMarkerVertex2D(id);
            Point p = new Point(vertex2d[0].x, vertex2d[0].y);
            points2dlist.add(p);
            p = new Point(vertex2d[1].x, vertex2d[2].y);
            points2dlist.add(p);
            p = new Point(vertex2d[2].x, vertex2d[2].y);
            points2dlist.add(p);
            p = new Point(vertex2d[3].x, vertex2d[3].y);
            points2dlist.add(p);

            final MatOfPoint mop = new MatOfPoint();
            mop.fromList(points2dlist);
            final List<MatOfPoint> pts = new ArrayList<>();
            pts.add(mop);
            // read and add corresponding 3D points
            points3dlist.addAll(markerConfig.create3dpointlist(patternmap.get(id)));
            // draw red rectangle around detected marker
            Core.rectangle(image2, new Point(vertex2d[0].x, vertex2d[0].y),
                    new Point(vertex2d[2].x, vertex2d[2].y), new Scalar(0, 0, 255));
        }

    }
    // load 2D and 3D points to Mats for solvePNP
    final MatOfPoint3f objectPoints = new MatOfPoint3f();
    objectPoints.fromList(points3dlist);
    final MatOfPoint2f imagePoints = new MatOfPoint2f();
    imagePoints.fromList(points2dlist);

    if (visualization) {
        // show image with markers detected
        Imshow.show(image2);
    }

    // do not call solvePNP with empty intput data (no markers detected)
    if (points2dlist.size() == 0) {
        return false;
    }

    // uncomment these lines if using RANSAC-based pose estimation (more
    // shaking)
    Mat inliers = new Mat();

    Calib3d.solvePnPRansac(objectPoints, imagePoints, cameraMatrix, distCoeffs, rvec, tvec, false, 300, 5, 16,
            inliers, Calib3d.CV_P3P);
    ARLoc.getLog().info("Points detected: " + points2dlist.size() + " inliers: " + inliers.size());
    // avoid publish zero pose if localization failed
    if (inliers.rows() == 0) {
        return false;
    }

    return true;
}

From source file:com.ibm.streamsx.edgevideo.device.AbstractFaceDetectApp.java

License:Open Source License

protected void renderImages(Mat rgbFrame, MatOfRect faceRects, List<Mat> faces) {
    // draw rectangles around the detected faces and render
    Rect[] rectArray = faceRects.toArray();
    for (Rect faceRect : rectArray) {
        Imgproc.rectangle(rgbFrame, new Point(faceRect.x, faceRect.y),
                new Point(faceRect.x + faceRect.width, faceRect.y + faceRect.height), new Scalar(0, 255, 0));
    }//from w  w  w . j av  a  2  s  .  co  m
    faceDetectPanel.matToBufferedImage(rgbFrame);
    faceDetectPanel.repaint();

    // render the detected faces
    if (renderDetections) {
        detectedFacesPanel.clear();
        for (Mat face : faces) {
            // TODO handle rendering multiple detections / images in the panel 
            detectedFacesPanel.matToBufferedImage(face);
        }
        detectedFacesPanel.repaint();
    }
}

From source file:com.ibm.streamsx.edgevideo.device.wipRecognition.WIP_NonEdgentFaceDetectApp.java

License:Open Source License

protected void renderImages(Mat rgbFrame, MatOfRect faceRects, List<Mat> faces, List<Prediction> predictions) {
    // draw rectangles around the detected faces and render
    Rect[] rectArray = faceRects.toArray();
    for (Rect faceRect : rectArray) {
        Imgproc.rectangle(rgbFrame, new Point(faceRect.x, faceRect.y),
                new Point(faceRect.x + faceRect.width, faceRect.y + faceRect.height), new Scalar(0, 255, 0));
    }//from   w  w w .ja  v a  2s  . co  m

    // TODO add recognition prediction info label to image

    faceDetectPanel.matToBufferedImage(rgbFrame);
    faceDetectPanel.repaint();

    // render the detected faces
    if (renderDetections) {
        detectedFacesPanel.clear();
        for (Mat face : faces) {
            // TODO handle rendering multiple detections / images in the panel 
            detectedFacesPanel.matToBufferedImage(face);
        }
        detectedFacesPanel.repaint();
    }
}

From source file:com.imgprocessor.processor.ImageProcessorImpl.java

@Override
public void process() throws ValidatingException, TruncatingException, ProcessingException {

    // here, order kindof matters
    //// loads the opencv 249 library for features2d
    System.loadLibrary("opencv_java249");
    // loads the opencv 310 library for fillConvexPoly and all the others
    System.loadLibrary("opencv_java310");

    imageRepresentation = new Representation();

    // run template detection && line detection in another thread
    new Thread() {

        public void run() {

            // object detection
            if (!DETECT_ONLY_WALLS) {
                DetectObject objectDetector = new DetectObject(ImageFile.getAbsolutePath(), thisReff);
                objectDetector.detectAllObject();
            }// w w  w  .j  a  v a2s.  c  o m

            // line detection
            HoughLineDetection houghLineDetection = new HoughLineDetection(DetectObject.TEMPLATE_OUTPUT_PATH,
                    thisReff);
            List<Line> detectedWalls = houghLineDetection.detectLines();
            imageRepresentation.populateWalls(detectedWalls);

            thisReff.appendDetail("Walls detected: " + detectedWalls.size());
            int k = 1;
            for (Line line : detectedWalls) {
                thisReff.appendDetail(k + ". (" + (int) line.x1 + ", " + (int) line.y1 + ") --> " + "("
                        + (int) line.x2 + ", " + (int) line.y2 + ")");
                k++;
            }

            // till here, detected walls nice, united them

            //here, transform the doors & windows too.
            List<Line> theDoors = new ArrayList<>();
            for (Door door : imageRepresentation.getDoors()) {
                Coordinates start = door.getStart();
                Coordinates end = door.getEnd();
                Point s = new Point(start.getX(), start.getY());
                Point e = new Point(end.getX(), end.getY());
                Line lDoor = new Line(s, e);
                lDoor.type = line_type.DOOR;
                theDoors.add(lDoor);
            }

            Mat blackMatrix = Imgcodecs.imread(LineProcessor.drawID + ". lineDetection.png");
            theDoors = LineProcessor.uniteObjectsWithWalls(blackMatrix, detectedWalls, theDoors, 50, true);
            LineProcessor.drawLines(theDoors, blackMatrix, new Scalar(255, 0, 255), 2, false, true, thisReff);

            // try uniting them

            List<Line> theWindows = new ArrayList<>();
            for (Window window : imageRepresentation.getWindows()) {

                Coordinates start = window.getStart();
                Coordinates end = window.getEnd();
                Point s = new Point(start.getX(), start.getY());
                Point e = new Point(end.getX(), end.getY());
                Line lWindow = new Line(s, e);
                lWindow.type = line_type.WINDOW;
                theWindows.add(lWindow);
            }

            blackMatrix = Imgcodecs.imread(LineProcessor.drawID + ". lineDetection.png");
            theWindows = LineProcessor.uniteObjectsWithWalls(blackMatrix, detectedWalls, theWindows, 50, true);
            LineProcessor.drawLines(theWindows, blackMatrix, new Scalar(255, 0, 0), 2, false, true, thisReff);

            // now all good, time to put them back (convert, fucking convert)
            // 1. The DOORS
            imageRepresentation.clearDoors();
            thisReff.appendDetail("The new DOORS coordinates (fixed to the walls): ");
            k = 1;
            for (Line door : theDoors) {

                Coordinates start = new Coordinates((float) door.getStartingPoint().x,
                        (float) door.getStartingPoint().y);
                Coordinates end = new Coordinates((float) door.getEndingPoint().x,
                        (float) door.getEndingPoint().y);
                Door theDoor = new Door(start, end);
                imageRepresentation.addDoor(theDoor);
                thisReff.appendDetail(k + ". (" + (int) start.getX() + ", " + (int) start.getY() + ") -> ("
                        + (int) end.getX() + ", " + (int) end.getY() + ")");
                k++;
            }

            // 2. The WINDOWS
            imageRepresentation.clearWindows();
            thisReff.appendDetail("The new WINDOWS coordinates (fixed to the walls): ");
            k = 1;
            for (Line window : theWindows) {

                Coordinates start = new Coordinates((float) window.getStartingPoint().x,
                        (float) window.getStartingPoint().y);
                Coordinates end = new Coordinates((float) window.getEndingPoint().x,
                        (float) window.getEndingPoint().y);
                Window theWindow = new Window(start, end);
                imageRepresentation.addWindow(theWindow);
                thisReff.appendDetail(k + ". (" + (int) start.getX() + ", " + (int) start.getY() + ") -> ("
                        + (int) end.getX() + ", " + (int) end.getY() + ")");
                k++;
            }

            //xml encode
            thisReff.setProgress(0);
            thisReff.appendDetail("Serializing the representation into 'Representation.xml'...");
            try {

                SAVED_XML_PATH = "Representation.xml";
                XMLEncoder myEncoder = new XMLEncoder(new FileOutputStream(SAVED_XML_PATH));
                myEncoder.writeObject(imageRepresentation);
                myEncoder.flush();
                myEncoder.close();

                thisReff.setProgress(100);
                thisReff.appendDetail("Finished serialization.");

                // RUN THE Graph Module Algorithm
                runGraphALgorithm();

            } catch (FileNotFoundException e) {
                thisReff.appendDetail("FAILED!");
                e.printStackTrace();
            }
        };

    }.start();

}

From source file:com.jeremydyer.nifi.ObjectDetectionProcessor.java

License:Apache License

final public Mat detectObjects(final ProcessSession session, FlowFile original, final JSONObject dd,
        final Mat image) {

    CascadeClassifier objectDetector = new CascadeClassifier(dd.getString("opencv_xml_cascade_path"));
    MatOfRect objectDetections = new MatOfRect();
    objectDetector.detectMultiScale(image, objectDetections);
    //getLogger().error("Detected " + objectDetections.toArray().length + " " + dd.getString("name") + " objects in the input flowfile");

    final AtomicReference<Mat> croppedImageReference = new AtomicReference<>();

    int counter = 0;
    for (int i = 0; i < objectDetections.toArray().length; i++) {
        final Rect rect = objectDetections.toArray()[i];
        FlowFile detection = session.write(session.create(original), new OutputStreamCallback() {
            @Override//from  ww w .j a va  2 s . co  m
            public void process(OutputStream outputStream) throws IOException {

                Mat croppedImage = null;

                //Should the image be cropped? If so there is no need to draw bounds because that would be the same as the cropping
                if (dd.getBoolean("crop")) {
                    Rect rectCrop = new Rect(rect.x, rect.y, rect.width, rect.height);
                    croppedImage = new Mat(image, rectCrop);
                    MatOfByte updatedImage = new MatOfByte();
                    Imgcodecs.imencode(".jpg", croppedImage, updatedImage);
                    croppedImageReference.set(croppedImage);
                    outputStream.write(updatedImage.toArray());
                } else {
                    //Should the image have a border drawn around it?
                    if (dd.getBoolean("drawBounds")) {
                        Mat imageWithBorder = image.clone();
                        Imgproc.rectangle(imageWithBorder, new Point(rect.x, rect.y),
                                new Point(rect.x + rect.width, rect.y + rect.height),
                                new Scalar(255, 255, 255));
                        MatOfByte updatedImage = new MatOfByte();
                        Imgcodecs.imencode(".jpg", imageWithBorder, updatedImage);
                        outputStream.write(updatedImage.toArray());
                    } else {
                        MatOfByte updatedImage = new MatOfByte();
                        Imgcodecs.imencode(".jpg", image, updatedImage);
                        outputStream.write(updatedImage.toArray());
                    }
                }

            }
        });

        Map<String, String> atts = new HashMap<>();
        atts.put("object.detection.name", dd.getString("name"));
        atts.put("object.detection.id", new Long(System.currentTimeMillis() + counter).toString());

        counter++;

        detection = session.putAllAttributes(detection, atts);
        session.transfer(detection, REL_OBJECT_DETECTED);
    }

    Mat childResponse = null;

    if (croppedImageReference.get() != null) {
        childResponse = croppedImageReference.get();
    } else {
        childResponse = image;
    }

    if (dd.has("children")) {
        JSONArray children = dd.getJSONArray("children");
        if (children != null) {

            for (int i = 0; i < children.length(); i++) {
                JSONObject ddd = children.getJSONObject(i);
                childResponse = detectObjects(session, original, ddd, childResponse);
            }
        }
    }

    return childResponse;
}

From source file:com.kinect.FaceCapture.java

public void saveFace() {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    System.out.println("\nRunning FaceDetector");

    CascadeClassifier faceDetector = new CascadeClassifier(
            FaceCapture.class.getResource("haarcascade_frontalface_alt.xml").getPath().substring(1));
    Mat image = Highgui.imread("screancapture.jpg");
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);
    System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));
    Rect rectCrop = null;//from  ww w.j  ava 2 s  . com
    for (Rect rect : faceDetections.toArray()) {
        Core.rectangle(image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height),
                new Scalar(0, 255, 0));
        rectCrop = new Rect(rect.x, rect.y, rect.width, rect.height);
    }
    Mat image_roi = new Mat(image, rectCrop);
    Highgui.imwrite("screancapture.jpg", image_roi);

    System.out.println("save face...");
}

From source file:com.minio.io.alice.MainActivity.java

License:Open Source License

public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

    if (srcMat != null) {
        srcMat.release();//from w  ww.j  a va2s.  c  o m
    }

    srcMat = inputFrame.rgba();

    if (matVideoWriter.isRecording()) {
        matVideoWriter.write(srcMat, webSocket);
    }
    if (serverReply != null) {

        if (serverReply.isReply() == true) {
            if (XDebug.LOG) {
                Log.i(MainActivity.TAG, " Alice found someone");
            }
            Imgproc.rectangle(srcMat, serverReply.getP1(), serverReply.getP2(), serverReply.getScalar(),
                    serverReply.getThickness());
            if (serverReply.getZoom() != 0)
                mOpenCvCameraView.increaseZoom(serverReply.getZoom());
            return srcMat;
        }

        if (serverReply.getDisplay()) {
            // Wake up if the display is set to true
            if (XDebug.LOG) {
                Log.i(MainActivity.TAG, " Alice Wakes up");
                Log.i(MainActivity.TAG, String.valueOf(serverReply.isReply()));
            }
            return srcMat;
        } else {
            if (XDebug.LOG) {
                Log.i(MainActivity.TAG, "Alice Sleeps");
            }
            // return a black mat when server replies with false for Display.
            blackMat = srcMat.clone();
            blackMat.setTo(new Scalar(0, 0, 0));
            return blackMat;
        }
    } else {
        // return black frame unless woken up explicitly by server.
        blackMat = srcMat.clone();
        blackMat.setTo(new Scalar(0, 0, 0));
        return blackMat;
    }

}

From source file:com.mycompany.facedetection.FaceDetector.java

public void findFaces() {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    CascadeClassifier faceDetector = new CascadeClassifier(
            "D:\\opencv\\sources\\data\\lbpcascades\\lbpcascade_frontalface.xml");
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(img, faceDetections);
    for (Rect rect : faceDetections.toArray()) {
        faceList.add(rect);//www .  j a  v  a  2s  .  co  m
        Imgproc.rectangle(img, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height),
                new Scalar(0, 255, 0));
    }
}

From source file:com.mycompany.linedetection.LineDetector.java

public void findLines() {
    Imgproc.Canny(img, edgeDetectedImg, 100, 200, 3, true);
    Mat lines = new Mat();

    int width = img.width();
    int height = img.height();
    double diagonal = Math.sqrt(width * width + height * height);
    int minOfWidthHeight = (width < height) ? width : height;

    Imgproc.HoughLinesP(edgeDetectedImg, lines, 1, Math.PI / 180, minOfWidthHeight * 10 / 100,
            diagonal * 25 / 100, diagonal * 4 / 100);

    int firstN = (lines.rows() < 5) ? lines.rows() : 5;

    for (int x = 0; x < lines.rows(); x++) {
        double[] vec = lines.get(x, 0);
        double[] vec1 = lines.get(x, 1);
        double x1 = vec[0], y1 = vec[1], x2 = vec[2], y2 = vec[3];
        Point startPoint = new Point(x1, y1);
        Point endPoint = new Point(x2, y2);

        double angle_inv = horizontalLine.getAngle(new Line(x1, y1, x2, y2));
        double angle = horizontalLine.getAngle(new Line(x2, y2, x1, y1));
        if ((angle >= diagAngle1 - DIAGONAL_TRESHOLD && angle <= diagAngle1 + DIAGONAL_TRESHOLD)
                || (angle >= diagAngle2 - DIAGONAL_TRESHOLD && angle <= diagAngle2 + DIAGONAL_TRESHOLD)
                || (angle_inv >= diagAngle1 - DIAGONAL_TRESHOLD && angle_inv <= diagAngle1 + DIAGONAL_TRESHOLD)
                || (angle_inv >= diagAngle2 - DIAGONAL_TRESHOLD
                        && angle_inv <= diagAngle2 + DIAGONAL_TRESHOLD)) {
            diagonalLineList.add(new Line(x1, y1, x2, y2));
            Imgproc.line(img, startPoint, endPoint, new Scalar(255, 255, 0), 4);
        } else {// w  ww .j  av  a  2 s.c om
            lineList.add(new Line(x1, y1, x2, y2));
        }

    }

    Collections.sort(lineList, new Comparator<Line>() {
        @Override
        public int compare(Line l1, Line l2) {
            return (int) (l2.getLength() - l1.getLength());
        }

    });

    ArrayList arr = new ArrayList<Line>();

    for (int i = 0; i < firstN + 1; i++) {
        if (lineList.size() >= firstN + 1) {
            double x1 = lineList.get(i).getX1(), y1 = lineList.get(i).getY1(), x2 = lineList.get(i).getX2(),
                    y2 = lineList.get(i).getY2();
            Point startPoint = new Point(x1, y1);
            Point endPoint = new Point(x2, y2);
            arr.add(lineList.get(i));
            Imgproc.line(img, startPoint, endPoint, new Scalar(0, 0, 255), 3);
        }
    }
    lineList = arr;
}