Example usage for org.opencv.core Core split

List of usage examples for org.opencv.core Core split

Introduction

In this page you can find the example usage for org.opencv.core Core split.

Prototype

public static void split(Mat m, List<Mat> mv) 

Source Link

Usage

From source file:es.ugr.osgiliart.features.opencv.MatchImage.java

License:Open Source License

public MatchImage(String templatePath) {
    Mat template = Highgui.imread(templatePath);
    Mat resized = new Mat(SIZE, SIZE, template.type());
    //Mat blurred = new Mat();
    Imgproc.resize(template, resized, new Size(SIZE, SIZE));
    //Imgproc.blur(resized, blurred, new Size(FILTER_SIZE,FILTER_SIZE) );
    templateChannels = new ArrayList<Mat>();
    Core.split(resized, templateChannels);
}

From source file:es.ugr.osgiliart.features.opencv.MatchImage.java

License:Open Source License

public double match(String path) {
    Mat img = Highgui.imread(path);//from ww w. j a  va2  s.  co m
    Mat resizedImg = new Mat(SIZE, SIZE, img.type());
    //Mat blurredImg = new Mat();
    Imgproc.resize(img, resizedImg, new Size(SIZE, SIZE));
    //Imgproc.blur(resizedImg, blurredImg, new Size(FILTER_SIZE,FILTER_SIZE) );

    ArrayList<Mat> channels = new ArrayList<Mat>();

    Core.split(resizedImg, channels);

    int conta = 0;

    double corrcoef = 0;
    for (int i = 0; i < 1; ++i) {
        /*      
              for(int px = 0; px < SIZE; px++){
                 for(int py = 0; py < SIZE; py++){
                    if(resizedImg.get(px, py)[i]!=0.0){
          double im_orig = templateChannels.get(i).get(px, py)[0];
          double im_indi = resizedImg.get(px, py)[i];
                  
          corrcoef +=  Math.pow(im_orig ,2) - Math.pow(im_indi, 2);
          conta++;
                    }
                            
                            
                 }
              }*/

        Mat result = new Mat();
        Imgproc.matchTemplate(channels.get(i), templateChannels.get(i), result, Imgproc.TM_CCOEFF_NORMED);
        //Imgproc.matchTemplate(channels.get(i), templateChannels.get(i), result, Imgproc.TM_SQDIFF);
        corrcoef += result.get(0, 0)[0];
        //corrcoef += result.get(0, 0)[0];
    }
    corrcoef /= 3.0;
    //return (corrcoef/conta/(255*3));
    return (corrcoef);
}

From source file:fuzzycv.MainFrame.java

private Mat removeBG(Mat frame) {

    Mat hsvImg = new Mat();
    List<Mat> hsvPlanes = new ArrayList<>();
    Mat thresholdImg = new Mat();

    //threshold the image with the histogram average value
    hsvImg.create(frame.size(), CvType.CV_8U);
    Imgproc.cvtColor(frame, hsvImg, Imgproc.COLOR_BGR2HSV);
    Core.split(hsvImg, hsvPlanes);

    double threshValue = getHistoAvg(hsvImg, hsvPlanes.get(0));

    if (inverseCheckBox.isSelected()) {
        Imgproc.threshold(hsvPlanes.get(0), thresholdImg, threshValue, 179.0, Imgproc.THRESH_BINARY_INV);
    } else {// www  .jav  a2  s  .co m
        Imgproc.threshold(hsvPlanes.get(0), thresholdImg, threshValue, 179.0, Imgproc.THRESH_BINARY);
    }

    Imgproc.blur(thresholdImg, thresholdImg, new Size(5, 5));

    // dilate to fill gaps, erode to smooth edges
    Imgproc.dilate(thresholdImg, thresholdImg, new Mat(), new Point(-1, 1), 6);
    Imgproc.erode(thresholdImg, thresholdImg, new Mat(), new Point(-1, 1), 6);

    Imgproc.threshold(thresholdImg, thresholdImg, threshValue, 179.0, Imgproc.THRESH_BINARY);

    // create the new image
    Mat foreground = new Mat(frame.size(), CvType.CV_8UC3, new Scalar(255, 255, 255));
    frame.copyTo(foreground, thresholdImg);

    return foreground;
}

From source file:gab.opencv.OpenCV.java

License:Open Source License

private void populateHSV() {
    matHSV = imitate(matBGRA);//from  w ww  .j  a  va  2s.co m
    Imgproc.cvtColor(matBGRA, matHSV, Imgproc.COLOR_BGR2HSV);
    ArrayList<Mat> channels = new ArrayList<Mat>();
    Core.split(matHSV, channels);

    matH = channels.get(0);
    matS = channels.get(1);
    matV = channels.get(2);
}

From source file:gab.opencv.OpenCV.java

License:Open Source License

private void populateBGRA() {
    ArrayList<Mat> channels = new ArrayList<Mat>();
    Core.split(matBGRA, channels);
    matB = channels.get(0);/*from  w w  w  .j av a  2 s .c o  m*/
    matG = channels.get(1);
    matR = channels.get(2);
    matA = channels.get(3);
}

From source file:gab.opencv.OpenCV.java

License:Open Source License

public static void ARGBtoBGRA(Mat rgba, Mat bgra) {
    ArrayList<Mat> channels = new ArrayList<Mat>();
    Core.split(rgba, channels);

    ArrayList<Mat> reordered = new ArrayList<Mat>();
    // Starts as ARGB. 
    // Make into BGRA.

    reordered.add(channels.get(3));/*w w  w . j a  va  2 s  .  c  om*/
    reordered.add(channels.get(2));
    reordered.add(channels.get(1));
    reordered.add(channels.get(0));

    Core.merge(reordered, bgra);
}

From source file:imageprocess.ObjectFinder.java

public Mat getHueHistogram(final Mat image, int minSaturation) {

    Mat hist = new Mat();

    // Convert to Lab color space
    Mat hsv = new Mat();
    Imgproc.cvtColor(image, hsv, CV_BGR2HSV);
    Mat mask = new Mat();
    if (minSaturation > 0) {
        // Spliting the 3 channels into 3 images
        List<Mat> v = new ArrayList<>();
        Core.split(hsv, v);
        // Mask out the low saturated pixels
        Imgproc.threshold(v.get(1), mask, minSaturation, 255, THRESH_BINARY);
    }/*w  ww .j a v a  2  s.  c  o m*/
    // Compute histogram
    Imgproc.calcHist(Arrays.asList(image), new MatOfInt(0), // the hue channel used
            mask, // no mask is used
            hist, // the resulting histogram
            new MatOfInt(256), // number of bins
            new MatOfFloat(0.0f, 180.0f) // pixel value range
    );

    return hist;
}

From source file:imageprocess.ObjectFinder.java

public static void main(String[] args) {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    Mat image = Highgui.imread("D:\\backup\\opencv\\baboon1.jpg");
    // Define ROI
    Rect rect = new Rect(110, 260, 35, 40);
    Mat imageROI = new Mat(image, rect);
    Core.rectangle(image, new Point(110, 260), new Point(145, 300), new Scalar(0, 0, 255));

    Imshow origIm = new Imshow("Origin");
    origIm.showImage(image);//from  w  ww.  j  ava  2 s  .  c  o m

    ObjectFinder finder = new ObjectFinder(false, 0.2f);

    // Get the Hue histogram
    int minSat = 65;
    Mat hist = finder.getHueHistogram(imageROI, minSat);
    Mat norm = new Mat();
    Core.normalize(hist, norm, 1, 0, NORM_L2);

    finder.setROIHistogram(norm);

    // Convert to HSV space
    Mat hsv = new Mat();
    Imgproc.cvtColor(image, hsv, CV_BGR2HSV);
    // Split the image
    List<Mat> v = new ArrayList<>();
    Core.split(hsv, v);

    // Eliminate pixels with low saturation
    Imgproc.threshold(v.get(1), v.get(1), minSat, 255, THRESH_BINARY);
    Imshow satIm = new Imshow("Saturation");
    satIm.showImage(v.get(1));
    // Get back-projection of hue histogram
    Mat result = finder.find(hsv, new MatOfInt(0), new MatOfFloat(0.0f, 180.0f));

    Imshow resultHueIm = new Imshow("Result Hue");
    resultHueIm.showImage(result);

    Core.bitwise_and(result, v.get(1), result);
    Imshow resultHueAndIm = new Imshow("Result Hue and raw");
    resultHueAndIm.showImage(result);

    // Second image
    Mat image2 = Highgui.imread("D:\\backup\\opencv\\baboon3.jpg");

    // Display image
    Imshow img2Im = new Imshow("Imgage2");
    img2Im.showImage(image2);

    // Convert to HSV space
    Imgproc.cvtColor(image2, hsv, CV_BGR2HSV);

    // Split the image
    Core.split(hsv, v);

    // Eliminate pixels with low saturation
    Imgproc.threshold(v.get(1), v.get(1), minSat, 255, THRESH_BINARY);
    Imshow satIm2 = new Imshow("Saturation2");
    satIm2.showImage(v.get(1));

    // Get back-projection of hue histogram
    finder.setThreshold(-1.0f);
    result = finder.find(hsv, new MatOfInt(0), new MatOfFloat(0.0f, 180.0f));

    Imshow resultHueIm2 = new Imshow("Result Hue2");
    resultHueIm2.showImage(result);

    Core.bitwise_and(result, v.get(1), result);
    Imshow resultHueAndIm2 = new Imshow("Result Hue and raw2");
    resultHueAndIm2.showImage(result);

    Rect rect2 = new Rect(110, 260, 35, 40);
    Core.rectangle(image2, new Point(110, 260), new Point(145, 300), new Scalar(0, 0, 255));

    TermCriteria criteria = new TermCriteria(TermCriteria.MAX_ITER | TermCriteria.EPS, 100, 0.01);
    int steps = Video.meanShift(result, rect2, criteria);

    Core.rectangle(image2, new Point(rect2.x, rect2.y),
            new Point(rect2.x + rect2.width, rect2.y + rect2.height), new Scalar(0, 255, 0));

    Imshow meanshiftIm = new Imshow("Meanshift result");
    meanshiftIm.showImage(image2);

}

From source file:javafx1.JavaFX1.java

private Mat doBackgroundRemoval(Mat frame) {
        // init/*from  w ww .j av  a2  s. c  o m*/
        Mat hsvImg = new Mat();
        List<Mat> hsvPlanes = new ArrayList<>();
        Mat thresholdImg = new Mat();

        int thresh_type = Imgproc.THRESH_BINARY_INV;
        //inverse
        thresh_type = Imgproc.THRESH_BINARY;

        // threshold the image with the average hue value
        hsvImg.create(frame.size(), CvType.CV_8U);
        Imgproc.cvtColor(frame, hsvImg, Imgproc.COLOR_BGR2HSV);
        Core.split(hsvImg, hsvPlanes);

        // get the average hue value of the image
        double threshValue = this.getHistAverage(hsvImg, hsvPlanes.get(0));

        Imgproc.threshold(hsvPlanes.get(0), thresholdImg, threshValue, 179.0, thresh_type);

        Imgproc.blur(thresholdImg, thresholdImg, new Size(5, 5));

        // dilate to fill gaps, erode to smooth edges
        Imgproc.dilate(thresholdImg, thresholdImg, new Mat(), new Point(-1, -1), 1);
        Imgproc.erode(thresholdImg, thresholdImg, new Mat(), new Point(-1, -1), 3);

        Imgproc.threshold(thresholdImg, thresholdImg, threshValue, 179.0, Imgproc.THRESH_BINARY);

        // create the new image
        Mat foreground = new Mat(frame.size(), CvType.CV_8UC3, new Scalar(255, 255, 255));
        frame.copyTo(foreground, thresholdImg);

        return foreground;
    }

From source file:logic.featurepointextractor.MouthFPE.java

/**
 * Detect mouth feature points//from   www.  j  a va 2 s  . c  o  m
 * Algorithm:           Equalize histogram of mouth rect
 *                      Implement Sobel horizontal filter
 *                      Find corners
 *                      Invert color + Binarization
 *                      Find lip up and down points
 * @param mc
 * @return 
 */
@Override
public Point[] detect(MatContainer mc) {
    /**Algorithm
     *                  find pix(i) = (R-G)/R
     *                  normalize: 2arctan(pix(i))/pi
     */

    //find pix(i) = (R-G)/R
    Mat mouthRGBMat = mc.origFrame.submat(mc.mouthRect);
    List mouthSplitChannelsList = new ArrayList<Mat>();
    Core.split(mouthRGBMat, mouthSplitChannelsList);
    //extract R-channel
    Mat mouthR = (Mat) mouthSplitChannelsList.get(2);
    mouthR.convertTo(mouthR, CvType.CV_64FC1);
    //extract G-channel
    Mat mouthG = (Mat) mouthSplitChannelsList.get(1);
    mouthG.convertTo(mouthG, CvType.CV_64FC1);
    //calculate (R-G)/R
    Mat dst = new Mat(mouthR.rows(), mouthR.cols(), CvType.CV_64FC1);
    mc.mouthProcessedMat = new Mat(mouthR.rows(), mouthR.cols(), CvType.CV_64FC1);

    Core.absdiff(mouthR, mouthG, dst);
    //        Core.divide(dst, mouthR, mc.mouthProcessedMat);
    mc.mouthProcessedMat = dst;
    mc.mouthProcessedMat.convertTo(mc.mouthProcessedMat, CvType.CV_8UC1);
    Imgproc.equalizeHist(mc.mouthProcessedMat, mc.mouthProcessedMat);
    //       Imgproc.blur(mc.mouthProcessedMat, mc.mouthProcessedMat, new Size(4,4));
    //        Imgproc.morphologyEx(mc.mouthProcessedMat, mc.mouthProcessedMat, Imgproc.MORPH_OPEN, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(4,4)));
    Imgproc.threshold(mc.mouthProcessedMat, mc.mouthProcessedMat, 230, 255, THRESH_BINARY);

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Imgproc.findContours(mc.mouthProcessedMat, contours, new Mat(), Imgproc.RETR_TREE,
            Imgproc.CHAIN_APPROX_SIMPLE);

    //find the biggest contour
    int maxSize = -1;
    int tmpSize = -1;
    int index = -1;

    Rect centMouthRect = new Rect(mc.mouthRect.x + mc.mouthRect.width / 4,
            mc.mouthRect.y + mc.mouthRect.height / 4, mc.mouthRect.width / 2, mc.mouthRect.height / 2);
    if (contours.size() != 0) {
        maxSize = contours.get(0).toArray().length;
        tmpSize = 0;
        index = 0;
    }

    //find max contour
    for (int j = 0; j < contours.size(); ++j) {
        //if contour is vertical, exclude it 
        Rect boundRect = Imgproc.boundingRect(contours.get(j));
        int centX = mc.mouthRect.x + boundRect.x + boundRect.width / 2;
        int centY = mc.mouthRect.y + boundRect.y + boundRect.height / 2;
        //                LOG.info("Center = " + centX + "; " + centY);
        //                LOG.info("Rect = " + centMouthRect.x + "; " + centMouthRect.y);
        if (!centMouthRect.contains(new Point(centX, centY)))
            continue;

        tmpSize = contours.get(j).toArray().length;

        LOG.info("Contour " + j + "; size = " + tmpSize);

        if (tmpSize > maxSize) {
            maxSize = tmpSize;
            index = j;
        }
    }

    //appproximate curve
    Point[] p1 = contours.get(index).toArray();
    MatOfPoint2f p2 = new MatOfPoint2f(p1);
    MatOfPoint2f p3 = new MatOfPoint2f();
    Imgproc.approxPolyDP(p2, p3, 1, true);

    p1 = p3.toArray();

    MatOfInt tmpMatOfPoint = new MatOfInt();
    Imgproc.convexHull(new MatOfPoint(p1), tmpMatOfPoint);

    Rect boundRect = Imgproc.boundingRect(new MatOfPoint(p1));
    if (boundRect.area() / mc.mouthRect.area() > 0.3)
        return null;

    int size = (int) tmpMatOfPoint.size().height;
    Point[] _p1 = new Point[size];
    int[] a = tmpMatOfPoint.toArray();

    _p1[0] = new Point(p1[a[0]].x + mc.mouthRect.x, p1[a[0]].y + mc.mouthRect.y);
    Core.circle(mc.origFrame, _p1[0], 3, new Scalar(0, 0, 255), -1);
    for (int i = 1; i < size; i++) {
        _p1[i] = new Point(p1[a[i]].x + mc.mouthRect.x, p1[a[i]].y + mc.mouthRect.y);
        Core.circle(mc.origFrame, _p1[i], 3, new Scalar(0, 0, 255), -1);
        Core.line(mc.origFrame, _p1[i - 1], _p1[i], new Scalar(255, 0, 0), 2);
    }
    Core.line(mc.origFrame, _p1[size - 1], _p1[0], new Scalar(255, 0, 0), 2);

    /*        contours.set(index, new MatOfPoint(_p1));
            
            mc.mouthProcessedMat.setTo(new Scalar(0));
                    
            Imgproc.drawContours(mc.mouthProcessedMat, contours, index, new Scalar(255), -1);
                    
    */ mc.mouthMatOfPoint = _p1;

    MatOfPoint matOfPoint = new MatOfPoint(_p1);
    mc.mouthBoundRect = Imgproc.boundingRect(matOfPoint);
    mc.features.mouthBoundRect = mc.mouthBoundRect;

    /**extract feature points:  1 most left
     *                          2 most right
     *                          3,4 up
     *                          5,6 down
     */

    //        mc.mouthMatOfPoint = extractFeaturePoints(contours.get(index));

    return null;
}