Example usage for org.opencv.core Core magnitude

List of usage examples for org.opencv.core Core magnitude

Introduction

In this page you can find the example usage for org.opencv.core Core magnitude.

Prototype

public static void magnitude(Mat x, Mat y, Mat magnitude) 

Source Link

Usage

From source file:at.uniklu.itec.videosummary.Summarize.java

License:GNU General Public License

private double getImageSharpness(File frame) {
    Mat img = Highgui.imread(frame.getAbsolutePath(), 0);
    Mat dx, dy;//from www  . jav  a2  s  .  com
    dx = new Mat();
    dy = new Mat();
    Imgproc.Sobel(img, dx, CvType.CV_32F, 1, 0);
    Imgproc.Sobel(img, dy, CvType.CV_32F, 0, 1);
    Core.magnitude(dx, dy, dx);
    Scalar sum = Core.sumElems(dx);
    img.release();
    dx.release();
    dy.release();
    System.gc();
    System.gc();
    System.gc();
    //System.out.println("Sum of gradients= "+sum);
    return (sum.val[0]);
}

From source file:karthik.Barcode.MatrixBarcode.java

License:Open Source License

private void calcGradientDirectionAndMagnitude() {
    // calculates magnitudes and directions of gradients in the image
    // results are stored in appropriate matrices in img_details object

    Imgproc.Scharr(img_details.src_grayscale, img_details.scharr_x, CvType.CV_32F, 1, 0);
    Imgproc.Scharr(img_details.src_grayscale, img_details.scharr_y, CvType.CV_32F, 0, 1);

    // calc angle using Core.phase function - quicker than using atan2 manually
    Core.phase(img_details.scharr_x, img_details.scharr_y, img_details.gradient_direction, true);

    // convert angles from 180-360 to 0-180 range and set angles from 170-180 to 0
    Core.inRange(img_details.gradient_direction, scalarDict.get(180), scalarDict.get(360), img_details.mask);
    Core.add(img_details.gradient_direction, scalarDict.get(-180), img_details.gradient_direction,
            img_details.mask);//from  w w  w  .  j  av  a 2s.co m
    Core.inRange(img_details.gradient_direction, scalarDict.get(170), scalarDict.get(180), img_details.mask);
    img_details.gradient_direction.setTo(ZERO_SCALAR, img_details.mask);

    // convert type after modifying angle so that angles above 360 don't get truncated
    img_details.gradient_direction.convertTo(img_details.gradient_direction, CvType.CV_8U);
    if (DEBUG_IMAGES)
        write_Mat("angles.csv", img_details.gradient_direction);

    // calculate magnitude of gradient, normalize and threshold
    Core.magnitude(img_details.scharr_x, img_details.scharr_y, img_details.gradient_magnitude);
    Core.normalize(img_details.gradient_magnitude, img_details.gradient_magnitude, 0, 255, Core.NORM_MINMAX,
            CvType.CV_8U);
    Imgproc.threshold(img_details.gradient_magnitude, img_details.gradient_magnitude, 50, 255,
            Imgproc.THRESH_BINARY | Imgproc.THRESH_OTSU);

    // set angle to DUMMY_ANGLE = 255 at all points where gradient magnitude is 0 i.e. where there are no edges
    // these angles will be ignored in the histogram calculation since that counts only up to 180
    Core.inRange(img_details.gradient_magnitude, ZERO_SCALAR, ZERO_SCALAR, img_details.mask);
    img_details.gradient_direction.setTo(scalarDict.get(DUMMY_ANGLE), img_details.mask);
    // add 1 to gradient directions so that gradients of 0 can be located
    Core.add(img_details.gradient_direction, new Scalar(1), img_details.gradient_direction);

    // calculate integral image for edge density
    img_details.edgeDensity = calcEdgeDensityIntegralImage();

    // calculate histograms for each tile
    calcHistograms();

    if (DEBUG_IMAGES) {
        write_Mat("magnitudes.csv", img_details.gradient_magnitude);
        write_Mat("angles_modified.csv", img_details.gradient_direction);
    }
}

From source file:udp.server.ObjectTracker.java

private void trackColors() {

    while (true) {
        //capture.read(webcam_image);  
        //System.out.println(this.camCap.getFrame().size());
        webcam_image = this.camCap.getFrame().clone();
        if (!webcam_image.empty()) {

            //Adjusting brightness and contrast
            webcam_image.convertTo(webcam_image, -1, brightness, contrast);

            //Adding blur to remove noise
            Imgproc.blur(webcam_image, webcam_image, new Size(7, 7));

            // converting to HSV image
            Imgproc.cvtColor(webcam_image, hsv_image, Imgproc.COLOR_BGR2HSV);

            //Checking if the hsv image is in range.
            Core.inRange(hsv_image, hsv_min, hsv_max, thresholded);

            Imgproc.erode(thresholded, thresholded,
                    Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(8, 8)));
            Imgproc.dilate(thresholded, thresholded,
                    Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(8, 8)));
            Core.split(hsv_image, lhsv); // We get 3 2D one channel Mats  
            Mat S = lhsv.get(1);/*from   w  w  w.j a v  a2 s. co  m*/
            Mat V = lhsv.get(2);
            Core.subtract(array255, S, S);
            Core.subtract(array255, V, V);
            S.convertTo(S, CvType.CV_32F);
            V.convertTo(V, CvType.CV_32F);
            Core.magnitude(S, V, distance);
            Core.inRange(distance, new Scalar(0.0), new Scalar(200.0), thresholded2);

            Imgproc.GaussianBlur(thresholded, thresholded, new Size(9, 9), 0, 0);
            Imgproc.HoughCircles(thresholded, circles, Imgproc.CV_HOUGH_GRADIENT, 2, thresholded.height() / 8,
                    200, 100, 0, 0);
            Imgproc.findContours(thresholded, contours, thresholded2, Imgproc.RETR_LIST,
                    Imgproc.CHAIN_APPROX_SIMPLE);
            //------Imgproc.drawContours(webcam_image, contours, -1, new Scalar(255, 0, 0), 2);   

            //------Core.circle(webcam_image, new Point(210,210), 10, new Scalar(100,10,10),3);  
            data = webcam_image.get(210, 210);
            //------Core.putText(webcam_image,String.format("("+String.valueOf(data[0])+","+String.valueOf(data[1])+","+String.valueOf(data[2])+")"),new Point(30, 30) , 3 //FONT_HERSHEY_SCRIPT_SIMPLEX  
            //------   ,1.0,new Scalar(100,10,10,255),3); 
            //ArrayList<Float> errorAngles = new ArrayList<>();
            ArrayList<Float> errorAngles = getTargetError();

            if (errorAngles != null) {

                try {
                    semaphore.acquire();
                } catch (InterruptedException ex) {
                    Logger.getLogger(ObjectTracker.class.getName()).log(Level.SEVERE, null, ex);
                }
                float eXa = (errorAngles.get(0));
                float eYa = (errorAngles.get(1));
                this.dh.setPixyXvalue(eXa);
                this.dh.setPixyYvalue(eYa);

                //System.out.print("AngleErrorX: "+errorAngles.get(0));
                //System.out.println("       AngleErrorY: "+ errorAngles.get(1));

                semaphore.release();

            }
        }

        /*else {
                
        System.out.println(" --(!) No captured frame -- Break!"); 
        } */
    }
}