Example usage for org.opencv.core Scalar Scalar

List of usage examples for org.opencv.core Scalar Scalar

Introduction

In this page you can find the example usage for org.opencv.core Scalar Scalar.

Prototype

public Scalar(double v0, double v1, double v2) 

Source Link

Usage

From source file:logic.analyzer.VideoAnalyzer.java

/**
 * Algorithm:       INITIAL LOCALIZATION
 *              Open port           //from  w w w  .  j ava 2s .  c o  m
 *              Take frame          
 *              Preprocess frame    
 *              Localize face       
 *              Localize eyepair  
 *              Localize mouth 
 *                  OBJECT TRACKING
 *              Track eyebrows      
 *              Rotate frame  
 *              Track mouth         
 *                  FEATURE POINT EXTRACTION
 *              Extract mouth Features         
 */
@Override
public void analyze() {
    //open port
    if (!this.imLoader.open())
        return;

    //define counter for retrived Frames. If count = 0, then run 
    //INITIAL LOCALIZATION, other wise skip INITIAL LOCALIZATION
    boolean isRunInit = true;

    //set status flag in container.features to false
    //set to false when will be error.
    //It means that drawing process must be run from the begining

    //localize ROI and extract FP
    retrieve_frame: do {
        //take frame
        container.origFrame = this.imLoader.loadImage();
        if (container.origFrame == null)
            continue;

        //Preprocess frame
        container.grayFrame = Util.preprocessFrame(container.origFrame);

        //run INITIAL LOCALIZATION 
        initial_localization: while (isRunInit) {
            if (!detectFaceFeatures()) {
                //clear deque
                container.features.isStocked = true;
                container.featuresDeque.clear();
                container.featuresDeque.add(container.features);

                continue retrieve_frame;
            }

            isRunInit = false;
        }

        //track face and eye pair

        //rotate frame
        int rotRes = rotateFrameAndTrackTemplate(container.eyeBrowBaseDst, container.eyePairRect.width,
                container.eyeBrowBoundRectArr, container.eyeBrowTrackingTemplateArr,
                container.eyeBrowCentersPointsArr, new Scalar(0, 0, 255));

        if (rotRes != 1) {
            container.features.isStocked = true;
            container.featuresDeque.clear();
            container.featuresDeque.add(container.features);

            isRunInit = true;
            continue retrieve_frame;
        }

        //track mouth (AFTER PUT TO INITIALIZATION STEP)
        container.mouthRect = Util.trackTemplate(container.grayFrame, container.mouthRect, container.mouthMat);

        //track nose
        container.noseRect = Util.trackTemplate(container.grayFrame, container.noseRect, container.noseMat);

        if (container.mouthRect == null) {
            container.features.isStocked = true;
            container.featuresDeque.clear();
            container.featuresDeque.add(container.features);

            LOG.warn("Tracking pattern of nose is out of image scope");
            isRunInit = true;
            continue retrieve_frame;
        }

        container.features.noseCenterPoint = new Point(container.noseRect.x + container.noseRect.width / 2,
                container.noseRect.y + container.noseRect.height / 2);

        Core.circle(container.origFrame, container.features.noseCenterPoint, 5, new Scalar(0, 0, 255), -1);

        Core.rectangle(container.origFrame, container.noseRect.tl(), container.noseRect.br(),
                new Scalar(255, 0, 0), 1);
        Core.rectangle(container.origFrame, container.mouthRect.tl(), container.mouthRect.br(),
                new Scalar(255, 0, 0), 1);

        //detect mouth FPE and show results
        mouthFPE.detect(container);

        //all features have been detected, therefore add features to deque
        if (container.features.isStocked)
            container.features.isStocked = false;

        container.featuresDeque.add(container.features);

        //            imShowOrig.showImage(container.mouthProcessedMat);
        imShowProc.showImage(container.origFrame);

    } while (!isByKey);
}

From source file:logic.featurepointextractor.MouthFPE.java

/**
 * Detect mouth feature points/*from  w  w  w .j  a  va 2  s. co  m*/
 * Algorithm:           Equalize histogram of mouth rect
 *                      Implement Sobel horizontal filter
 *                      Find corners
 *                      Invert color + Binarization
 *                      Find lip up and down points
 * @param mc
 * @return 
 */
@Override
public Point[] detect(MatContainer mc) {
    /**Algorithm
     *                  find pix(i) = (R-G)/R
     *                  normalize: 2arctan(pix(i))/pi
     */

    //find pix(i) = (R-G)/R
    Mat mouthRGBMat = mc.origFrame.submat(mc.mouthRect);
    List mouthSplitChannelsList = new ArrayList<Mat>();
    Core.split(mouthRGBMat, mouthSplitChannelsList);
    //extract R-channel
    Mat mouthR = (Mat) mouthSplitChannelsList.get(2);
    mouthR.convertTo(mouthR, CvType.CV_64FC1);
    //extract G-channel
    Mat mouthG = (Mat) mouthSplitChannelsList.get(1);
    mouthG.convertTo(mouthG, CvType.CV_64FC1);
    //calculate (R-G)/R
    Mat dst = new Mat(mouthR.rows(), mouthR.cols(), CvType.CV_64FC1);
    mc.mouthProcessedMat = new Mat(mouthR.rows(), mouthR.cols(), CvType.CV_64FC1);

    Core.absdiff(mouthR, mouthG, dst);
    //        Core.divide(dst, mouthR, mc.mouthProcessedMat);
    mc.mouthProcessedMat = dst;
    mc.mouthProcessedMat.convertTo(mc.mouthProcessedMat, CvType.CV_8UC1);
    Imgproc.equalizeHist(mc.mouthProcessedMat, mc.mouthProcessedMat);
    //       Imgproc.blur(mc.mouthProcessedMat, mc.mouthProcessedMat, new Size(4,4));
    //        Imgproc.morphologyEx(mc.mouthProcessedMat, mc.mouthProcessedMat, Imgproc.MORPH_OPEN, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(4,4)));
    Imgproc.threshold(mc.mouthProcessedMat, mc.mouthProcessedMat, 230, 255, THRESH_BINARY);

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Imgproc.findContours(mc.mouthProcessedMat, contours, new Mat(), Imgproc.RETR_TREE,
            Imgproc.CHAIN_APPROX_SIMPLE);

    //find the biggest contour
    int maxSize = -1;
    int tmpSize = -1;
    int index = -1;

    Rect centMouthRect = new Rect(mc.mouthRect.x + mc.mouthRect.width / 4,
            mc.mouthRect.y + mc.mouthRect.height / 4, mc.mouthRect.width / 2, mc.mouthRect.height / 2);
    if (contours.size() != 0) {
        maxSize = contours.get(0).toArray().length;
        tmpSize = 0;
        index = 0;
    }

    //find max contour
    for (int j = 0; j < contours.size(); ++j) {
        //if contour is vertical, exclude it 
        Rect boundRect = Imgproc.boundingRect(contours.get(j));
        int centX = mc.mouthRect.x + boundRect.x + boundRect.width / 2;
        int centY = mc.mouthRect.y + boundRect.y + boundRect.height / 2;
        //                LOG.info("Center = " + centX + "; " + centY);
        //                LOG.info("Rect = " + centMouthRect.x + "; " + centMouthRect.y);
        if (!centMouthRect.contains(new Point(centX, centY)))
            continue;

        tmpSize = contours.get(j).toArray().length;

        LOG.info("Contour " + j + "; size = " + tmpSize);

        if (tmpSize > maxSize) {
            maxSize = tmpSize;
            index = j;
        }
    }

    //appproximate curve
    Point[] p1 = contours.get(index).toArray();
    MatOfPoint2f p2 = new MatOfPoint2f(p1);
    MatOfPoint2f p3 = new MatOfPoint2f();
    Imgproc.approxPolyDP(p2, p3, 1, true);

    p1 = p3.toArray();

    MatOfInt tmpMatOfPoint = new MatOfInt();
    Imgproc.convexHull(new MatOfPoint(p1), tmpMatOfPoint);

    Rect boundRect = Imgproc.boundingRect(new MatOfPoint(p1));
    if (boundRect.area() / mc.mouthRect.area() > 0.3)
        return null;

    int size = (int) tmpMatOfPoint.size().height;
    Point[] _p1 = new Point[size];
    int[] a = tmpMatOfPoint.toArray();

    _p1[0] = new Point(p1[a[0]].x + mc.mouthRect.x, p1[a[0]].y + mc.mouthRect.y);
    Core.circle(mc.origFrame, _p1[0], 3, new Scalar(0, 0, 255), -1);
    for (int i = 1; i < size; i++) {
        _p1[i] = new Point(p1[a[i]].x + mc.mouthRect.x, p1[a[i]].y + mc.mouthRect.y);
        Core.circle(mc.origFrame, _p1[i], 3, new Scalar(0, 0, 255), -1);
        Core.line(mc.origFrame, _p1[i - 1], _p1[i], new Scalar(255, 0, 0), 2);
    }
    Core.line(mc.origFrame, _p1[size - 1], _p1[0], new Scalar(255, 0, 0), 2);

    /*        contours.set(index, new MatOfPoint(_p1));
            
            mc.mouthProcessedMat.setTo(new Scalar(0));
                    
            Imgproc.drawContours(mc.mouthProcessedMat, contours, index, new Scalar(255), -1);
                    
    */ mc.mouthMatOfPoint = _p1;

    MatOfPoint matOfPoint = new MatOfPoint(_p1);
    mc.mouthBoundRect = Imgproc.boundingRect(matOfPoint);
    mc.features.mouthBoundRect = mc.mouthBoundRect;

    /**extract feature points:  1 most left
     *                          2 most right
     *                          3,4 up
     *                          5,6 down
     */

    //        mc.mouthMatOfPoint = extractFeaturePoints(contours.get(index));

    return null;
}

From source file:logic.helpclass.Drawer.java

static public void drawTrackedEyeCenters(Mat frame, Point p1, Point p2) {
    Core.circle(frame, p1, 3, new Scalar(0, 0, 255), 5);
    Core.circle(frame, p2, 3, new Scalar(0, 0, 255), 5);
    Core.line(frame, p1, p2, new Scalar(0, 255, 0), 1);
}

From source file:logic.imagelocalizator.MouthLocalizator.java

/**Localizes mouth rectangle
 *      Divide face on upRect end botRect
 *      Extract Mat from botRect/*from   w w w  .  j  a  v a2  s . c o m*/
 *      Equalize botMat
 *      Find mouthRect using Haar cascades
 *      Enlarge mouthRect according to face size
 * @param mc
 * @return mouthRect, mouthMat
 */
@Override
public boolean localize(MatContainer mc) {
    /* Mouth Localization by Haar cascades*/
    /*
    //Divide face on upRect end botRect
    int start_x = mc.faceRect.x;
    int start_y = mc.faceRect.y + (int)mc.faceRect.height*2/3;
    int end_x = start_x + mc.faceRect.width;
    int end_y = start_y + (int)mc.faceRect.height*2/3;
            
    end_x = end_x >= mc.grayFrame.width() ? mc.grayFrame.width() : end_x;
    end_y = end_y >= mc.grayFrame.height() ? mc.grayFrame.height() : end_y;
            
    mc.mouthRect = new Rect(new Point(start_x, start_y), new Point(end_x, end_y));
    //Extract Mat from botRect
            
            
    LOG.error(mc.mouthRect.tl() + " " + mc.mouthRect.br());
    LOG.error(mc.grayFrame.size());
            
    mc.mouthMat = mc.grayFrame.submat(mc.mouthRect);
    //Equalize botMat
    Imgproc.equalizeHist(mc.mouthMat, mc.mouthMat);
    //Find mouthRect using Haar cascades
    MatOfRect mouthRectMat = new MatOfRect();
    mouthCascade.detectMultiScale(mc.mouthMat, mouthRectMat);
    Rect rMat[] = mouthRectMat.toArray();
              
    //find real mouth within detected mouth array rMat
    List<Rect> newFacesArr = new ArrayList();
    for(Rect rect:rMat)
    {
    rect = new Rect(rect.x + mc.faceRect.x, rect.y + mc.faceRect.y + mc.faceRect.height*2/3, rect.width, rect.height);
            
    //check if rect out of face
    if(!mc.faceRect.contains(rect.br()) || !mc.faceRect.contains(rect.tl()))
        continue;
            
    newFacesArr.add(rect);
    }
            
    if( newFacesArr.size() !=1 )//need just 1 mouth
    {
    LOG.warn("Detected mouth number: " + newFacesArr.size());
    return false;
    }
            
    //Enlarge mouthRect according to mouth size
    Rect rect = newFacesArr.get(0);
    int enlargeX = (int)Math.round(Parameters.enlargeMouthRect * rect.width);
    int enlargeY = (int)Math.round(Parameters.enlargeMouthRect * rect.height);
    mc.mouthRect  = new Rect(rect.x - enlargeX, rect.y - enlargeY, rect.width + enlargeX*2, rect.height + enlargeY*2);
            
    //save mat for mouth
    mc.mouthMat = mc.grayFrame.submat(mc.mouthRect);
    */

    /*Localization by enlarging:
    mouth rectangle can be defined as third lower part of face rect 
    with enlarging over 1/5 of face lower part:
    Mouth_rect = faceY + faceHeight*3/2 + faceHeight/5
    */

    int heightFace = mc.faceRect.height;
    int mouthHeight = heightFace * 1 / 3 + heightFace / 5;
    int tmp = mc.faceRect.y + heightFace * 2 / 3 + mouthHeight;
    int mouthEndY = tmp < mc.origFrame.width() ? tmp : mc.origFrame.width();

    mc.mouthRect = new Rect(new Point(mc.faceRect.x, mc.faceRect.y + heightFace * 2 / 3),
            new Point(mc.faceRect.x + mc.faceRect.width, mouthEndY));
    mc.mouthMat = mc.grayFrame.submat(mc.mouthRect);
    //show results
    Core.rectangle(mc.origFrame, mc.mouthRect.tl(), mc.mouthRect.br(), new Scalar(0, 0, 255));
    //        Core.rectangle(mc.origFrame, mc.faceRect.tl(), mc.faceRect.br(), new Scalar(0,0,255));
    return true;
}

From source file:mvision.Bhattacharyya.java

public Mat histogram(String img) {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    Mat image = Highgui.imread(img);/* w  w w  . j  a  v  a  2s . c om*/

    //Mat image = Highgui.imread("C:\\image1.jpg");

    //Mat src = new Mat(image.height(), image.width(), CvType.CV_8UC2);

    Imgproc.cvtColor(image, image, Imgproc.COLOR_RGB2HSV);
    java.util.List<Mat> matList = new LinkedList<Mat>();
    matList.add(image);
    Mat histogram = new Mat();
    MatOfFloat ranges = new MatOfFloat(0, 256);
    MatOfInt histSize = new MatOfInt(255);
    Imgproc.calcHist(matList, new MatOfInt(0), new Mat(), histogram, histSize, ranges);

    // Create space for histogram image
    Mat histImage = Mat.zeros(100, (int) histSize.get(0, 0)[0], CvType.CV_8UC1);

    histogram.convertTo(histogram, CvType.CV_32F);

    // Normalize histogram      
    Core.normalize(histogram, histogram, 1, histImage.rows(), Core.NORM_MINMAX, -1, new Mat());
    // Draw lines for histogram points
    for (int i = 0; i < (int) histSize.get(0, 0)[0]; i++) {
        Core.line(histImage, new org.opencv.core.Point(i, histImage.rows()),
                new org.opencv.core.Point(i, histImage.rows() - Math.round(histogram.get(i, 0)[0])),
                new Scalar(255, 255, 255), 1, 8, 0);
    }
    return histogram;

}

From source file:net.bsrc.cbod.opencv.OpenCV.java

public static void drawRect(Rect rect, Mat mat, Scalar scalar) {

    Point p1 = new Point(rect.x, rect.y);
    Point p2 = new Point(rect.x + rect.width, rect.y);
    Point p3 = new Point(rect.x + rect.width, rect.y + rect.height);
    Point p4 = new Point(rect.x, rect.y + rect.height);

    if (scalar == null)
        scalar = new Scalar(0, 255, 0);
    Core.line(mat, p1, p2, scalar, 2);/*from   w w  w. j  a v  a  2s .  c  o m*/
    Core.line(mat, p2, p3, scalar, 2);
    Core.line(mat, p3, p4, scalar, 2);
    Core.line(mat, p4, p1, scalar, 2);
}

From source file:net.bsrc.cbod.opencv.OpenCV.java

public static void drawComponentsToImage(List<CandidateComponent> candidateComponents, ImageModel imageModel,
        String outputSuffix) {//from  w w w  .  jav a2s.co  m

    Mat copy = OpenCV.copyImage(imageModel.getMat());
    Scalar blue = new Scalar(255, 0, 0);
    Scalar green = new Scalar(0, 255, 0);
    Scalar red = new Scalar(0, 0, 255);
    if (candidateComponents != null) {
        for (CandidateComponent cc : candidateComponents) {
            Rect rect = cc.getRect();
            if (cc.getObjectType().equals(EObjectType.WHEEL)) {
                OpenCV.drawRect(rect, copy, red);
            }
            if (cc.getObjectType().equals(EObjectType.TAIL_LIGHT)) {
                OpenCV.drawRect(rect, copy, green);
            }
            if (cc.getObjectType().equals(EObjectType.LICENSE_PLATE)) {
                OpenCV.drawRect(rect, copy, blue);
            }
        }
    }
    String outputImagePath = CBODUtil.getCbodTempDirectory().concat("/")
            .concat(imageModel.getRawImageName() + outputSuffix + "." + imageModel.getExtension());
    OpenCV.writeImage(copy, outputImagePath);

}

From source file:net.bsrc.cbod.opencv.OpenCV.java

private static void drawComponentsToImage(List<CandidateComponent> candidateComponents,
        CandidateComponent pivot, ImageModel imageModel, String outputSuffix, double fuzzyResult,
        boolean writeFuzzyResult) {

    Mat copy = OpenCV.copyImage(imageModel.getMat());
    Scalar blue = new Scalar(255, 0, 0);
    Scalar green = new Scalar(0, 255, 0);
    Scalar red = new Scalar(0, 0, 255);
    Scalar yellow = new Scalar(0, 255, 255);
    if (candidateComponents != null) {
        for (CandidateComponent cc : candidateComponents) {
            Rect rect = cc.getRect();//from w w  w.  j  ava  2s.  c o m
            if (cc.getObjectType().equals(EObjectType.WHEEL)) {
                OpenCV.drawRect(rect, copy, red);
            }
            if (cc.getObjectType().equals(EObjectType.TAIL_LIGHT)) {
                OpenCV.drawRect(rect, copy, green);
            }
            if (cc.getObjectType().equals(EObjectType.LICENSE_PLATE)) {
                OpenCV.drawRect(rect, copy, blue);
            }
        }
    }
    if (pivot != null) {
        OpenCV.drawRect(pivot.getRect(), copy, yellow);
    }

    if (writeFuzzyResult) {
        DecimalFormat dFormat = new DecimalFormat("#.####");
        drawText(copy, new Point(5, 20), dFormat.format(fuzzyResult));
    }

    String outputImagePath = CBODUtil.getCbodTempDirectory().concat("/")
            .concat(imageModel.getRawImageName() + outputSuffix + "." + imageModel.getExtension());
    OpenCV.writeImage(copy, outputImagePath);

}

From source file:net.bsrc.cbod.opencv.OpenCV.java

public static void drawText(Mat m, Point p, String text) {
    Core.putText(m, text, p, Core.FONT_HERSHEY_SIMPLEX, 0.75, new Scalar(238, 130, 238), 2);
}

From source file:nz.ac.auckland.lablet.vision.CamShiftTracker.java

License:Open Source License

/**
 * Finds the dominant colour in an image, and returns two values in HSV colour space to represent similar colours,
 * e.g. so you can keep all colours similar to the dominant colour.
 *
 * How the algorithm works://from  www  .  j ava 2  s  .  c  o  m
 *
 * 1. Scale the frame down so that algorithm doesn't take too long.
 * 2. Segment the frame into different colours (number of colours determined by k)
 * 3. Find dominant cluster (largest area) and get its central colour point.
 * 4. Get range (min max) to represent similar colours.
 *
 * @param bgr The input frame, in BGR colour space.
 * @param k The number of segments to use (2 works well).
 * @return The min and max HSV colour values, which represent the colours similar to the dominant colour.
 */
private Pair<Scalar, Scalar> getMinMaxHsv(Mat bgr, int k) {
    //Convert to HSV
    Mat input = new Mat();
    Imgproc.cvtColor(bgr, input, Imgproc.COLOR_BGR2BGRA, 3);

    //Scale image
    Size bgrSize = bgr.size();
    Size newSize = new Size();

    if (bgrSize.width > CamShiftTracker.KMEANS_IMG_SIZE || bgrSize.height > CamShiftTracker.KMEANS_IMG_SIZE) {

        if (bgrSize.width > bgrSize.height) {
            newSize.width = CamShiftTracker.KMEANS_IMG_SIZE;
            newSize.height = CamShiftTracker.KMEANS_IMG_SIZE / bgrSize.width * bgrSize.height;
        } else {
            newSize.width = CamShiftTracker.KMEANS_IMG_SIZE / bgrSize.height * bgrSize.width;
            newSize.height = CamShiftTracker.KMEANS_IMG_SIZE;
        }

        Imgproc.resize(input, input, newSize);
    }

    //Image quantization using k-means, see here for details of k-means algorithm: http://bit.ly/1JIvrlB
    Mat clusterData = new Mat();

    Mat reshaped = input.reshape(1, input.rows() * input.cols());
    reshaped.convertTo(clusterData, CvType.CV_32F, 1.0 / 255.0);
    Mat labels = new Mat();
    Mat centres = new Mat();
    TermCriteria criteria = new TermCriteria(TermCriteria.COUNT, 50, 1);
    Core.kmeans(clusterData, k, labels, criteria, 1, Core.KMEANS_PP_CENTERS, centres);

    //Get num hits for each category
    int[] counts = new int[k];

    for (int i = 0; i < labels.rows(); i++) {
        int label = (int) labels.get(i, 0)[0];
        counts[label] += 1;
    }

    //Get cluster index with maximum number of members
    int maxCluster = 0;
    int index = -1;

    for (int i = 0; i < counts.length; i++) {
        int value = counts[i];

        if (value > maxCluster) {
            maxCluster = value;
            index = i;
        }
    }

    //Get cluster centre point hsv
    int r = (int) (centres.get(index, 2)[0] * 255.0);
    int g = (int) (centres.get(index, 1)[0] * 255.0);
    int b = (int) (centres.get(index, 0)[0] * 255.0);
    int sum = (r + g + b) / 3;

    //Get colour range
    Scalar min;
    Scalar max;

    int rg = Math.abs(r - g);
    int gb = Math.abs(g - b);
    int rb = Math.abs(r - b);
    int maxDiff = Math.max(Math.max(rg, gb), rb);

    if (maxDiff < 35 && sum > 120) { //white
        min = new Scalar(0, 0, 0);
        max = new Scalar(180, 40, 255);
    } else if (sum < 50 && maxDiff < 35) { //black
        min = new Scalar(0, 0, 0);
        max = new Scalar(180, 255, 40);
    } else {
        Mat bgrColour = new Mat(1, 1, CvType.CV_8UC3, new Scalar(r, g, b));
        Mat hsvColour = new Mat();

        Imgproc.cvtColor(bgrColour, hsvColour, Imgproc.COLOR_BGR2HSV, 3);
        double[] hsv = hsvColour.get(0, 0);

        int addition = 0;
        int minHue = (int) hsv[0] - colourRange;
        if (minHue < 0) {
            addition = Math.abs(minHue);
        }

        int maxHue = (int) hsv[0] + colourRange;

        min = new Scalar(Math.max(minHue, 0), 60, Math.max(35, hsv[2] - 30));
        max = new Scalar(Math.min(maxHue + addition, 180), 255, 255);
    }

    return new Pair<>(min, max);
}