Example usage for org.opencv.core Scalar Scalar

List of usage examples for org.opencv.core Scalar Scalar

Introduction

In this page you can find the example usage for org.opencv.core Scalar Scalar.

Prototype

public Scalar(double[] vals) 

Source Link

Usage

From source file:LetsStart.GUI.java

private void setupImage(JFrame frame) {

    GridBagConstraints c = new GridBagConstraints();
    c.fill = GridBagConstraints.CENTER;

    c.gridy = 3;//from  w w w .j  a  va 2 s .  co m
    c.gridx = 0;

    frame.add(imageView);
    frame.add(imageView2);

    imageView.addMouseListener(new MouseAdapter() {
        public void mousePressed(MouseEvent e) {

            color = new Scalar(image.get(e.getY(), e.getX()));
            //Imgproc.circle(image,new Point(e.getX(),e.getY()),20, new Scalar(0,0,255), 4);
            System.out.println("coords " + e.getX() + " " + e.getY());
            System.out.println("the point is " + image.get(e.getY(), e.getX())[0] + " "
                    + image.get(e.getY(), e.getX())[1] + " " + image.get(e.getY(), e.getX())[2]);

            //updateView(image);

            System.out.println(color.toString());
            //a = new Mat (100,100, CvType.CV_16UC3 ,color);
            updateView();

        }
    });

}

From source file:logic.analyzer.AnalyzerIF.java

/**
 * Virtual//  w  w w .  j  a v a  2s. co m
 * Rotates frame according to detected eye irises: compares y-coordinates and computes
 * angles between eye centers, calculates rotation matrix and rotates target image.
 * @return -1 if detected irises are not appropriate or computed eye centers angle is wrong, 
 * 0 if rotation matrix is not detected, 1 if success
 */
public int rotateFrameAndTrackTemplate(double in_baseDist, int in_boundRect, Rect[] in_out_trackRectArr,
        Mat[] out_trackTemplateMatArr, Point[] in_pointLocationArr, Scalar in_color) {
    //check if tracked objects lost location
    double tmp = in_baseDist / (double) in_boundRect;

    if (tmp < Parameters.eyeRectAndBaseDiffMinThresh || tmp > Parameters.eyeRectAndBaseDiffMaxThresh) {

        LOG.warn("baseDst: condition FAIL");

        return -1;
    }

    if (!trackRectArr(container.grayFrame, container.origFrame, in_out_trackRectArr, out_trackTemplateMatArr,
            in_pointLocationArr, in_color))
        return -1;

    //save new centers to feature in container
    container.features.eyeBrowCenterPointArr = in_pointLocationArr;

    if (in_pointLocationArr.length == 2) {
        Drawer.drawRectanglePair(container.origFrame, in_out_trackRectArr[0], in_out_trackRectArr[1], in_color);
        Drawer.drawTrackedEyeCenters(container.origFrame, in_pointLocationArr[0], in_pointLocationArr[1]);
    }

    //rotate images: color for watching, gray for further processing 
    //(eye templates rotate by themselves)
    container.rotationMat = getRotationMat(container.origFrame, container.faceRect, in_pointLocationArr[0],
            in_pointLocationArr[1], prevAlpha);

    if (prevAlpha != null && prevAlpha.x < 0) {
        LOG.warn("PrevAlpha: RESET");
        prevAlpha = new Point(0.0, 0.0);
        return -1;
    }

    if (container.rotationMat == null) {
        LOG.warn("Rotation angle is out of +/- " + Parameters.maxIrisAngle);
        return 0;
    }

    //save rot angle to features in container
    container.features.faceRotAngle = prevAlpha.y;

    Imgproc.warpAffine(container.origFrame, container.origFrame, container.rotationMat,
            container.origFrame.size(), Imgproc.INTER_LINEAR, Imgproc.BORDER_CONSTANT, new Scalar(0));

    Imgproc.warpAffine(container.grayFrame, container.grayFrame, container.rotationMat,
            container.grayFrame.size(), Imgproc.INTER_LINEAR, Imgproc.BORDER_CONSTANT, new Scalar(0));

    //recompute eyebrow interrocular distance
    container.eyeBrowBaseDst = Math
            .abs(container.eyeBrowBoundRectArr[0].x + container.eyeBrowBoundRectArr[0].width / 2
                    - (container.eyeBrowBoundRectArr[1].x + container.eyeBrowBoundRectArr[1].width / 2));

    LOG.info("eyeBrowBaseDst = " + container.eyeBrowBaseDst);

    return 1;
}

From source file:logic.featurepointextractor.EyeBrowsFPE.java

/**
 * getSkeleton  obtain thin 1-pixel region from contour. 
 * @param src   input binary image// w  w  w  .ja v  a  2  s.c o m
 * @return      binary image 
 */

private Mat getSkeleton(Mat src) {
    Mat skel = new Mat(src.rows(), src.cols(), CV_8UC1, new Scalar(0));
    Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_CROSS, new Size(3, 3));
    Mat tmp = new Mat();
    Mat eroded = new Mat();
    boolean done = false;

    do {
        Imgproc.morphologyEx(src, eroded, Imgproc.MORPH_ERODE, element);
        Imgproc.morphologyEx(eroded, tmp, Imgproc.MORPH_DILATE, element);
        Core.subtract(src, tmp, tmp);
        Core.bitwise_or(skel, tmp, skel);
        eroded.copyTo(src);

        done = (Core.countNonZero(src) == 0);
    } while (!done);

    return skel;
}

From source file:logic.imagelocalizator.EyeBrowsLocalizator.java

private boolean detectEyeBrowBoundRect(MatContainer mc) {
    int eyePairW = mc.eyePairRect.width;
    int eyePairH = mc.eyePairRect.height;

    //contains eyebrow bounding rectangles
    Rect boundRectArr[] = new Rect[2];

    //for each eyebrow
    Mat binMat = new Mat();
    for (int i = 0; i < 2; ++i) {
        mc.eyeBrowMatArr[i] = mc.grayFrame.submat(mc.eyeBrowRectArr[i]);
        Scalar meanScalar = Core.mean(mc.eyeBrowMatArr[i]);
        //negate image
        Core.convertScaleAbs(mc.eyeBrowMatArr[i], mc.eyeBrowMatArr[i], 1, 255 - meanScalar.val[0]);
        Imgproc.equalizeHist(mc.eyeBrowMatArr[i], mc.eyeBrowMatArr[i]);
        Imgproc.blur(mc.eyeBrowMatArr[i], mc.eyeBrowMatArr[i], new Size(4, 4));

        //obtain binary image
        Imgproc.threshold(mc.eyeBrowMatArr[i], binMat, 70, 255, Imgproc.THRESH_BINARY_INV);

        Imgproc.morphologyEx(binMat, binMat, Imgproc.MORPH_OPEN,
                Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(4, 4)));

        //find contours
        List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
        Imgproc.findContours(binMat, contours, new Mat(), Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE);

        //find the biggest contour
        int maxSize = -1;
        int tmpSize = -1;
        int index = -1;

        if (contours.size() != 0) {
            maxSize = contours.get(0).toArray().length;
            tmpSize = 0;//from   w w w .  j  a  v  a2 s .c  om
            index = 0;
        }

        //find max contour
        for (int j = 0; j < contours.size(); ++j) {
            //if contour is vertical, exclude it 
            Rect boundRect = Imgproc.boundingRect(contours.get(j));
            if (boundRect.height > boundRect.width)
                continue;

            if ((double) boundRect.height
                    / (double) mc.eyeBrowRectArr[i].height > Parameters.eyebrowBoundRectThresh) {
                LOG.warn("Reset brow rect");
                mc.eyeBrowBoundRectArr[i] = null;
                return false;
            }

            tmpSize = contours.get(j).toArray().length;

            LOG.info("Contour " + j + "; size = " + tmpSize);

            if (tmpSize > maxSize) {
                maxSize = tmpSize;
                index = j;
            }
        }

        binMat.setTo(new Scalar(0));
        boundRectArr[i] = Imgproc.boundingRect(contours.get(index));

        //save eyebrow bounding rectangle
        mc.eyeBrowBoundRectArr[i] = new Rect(mc.eyeBrowRectArr[i].x + boundRectArr[i].x,
                mc.eyeBrowRectArr[i].y + boundRectArr[i].y, boundRectArr[i].width, boundRectArr[i].height);

        //save binary eyebrow Mat for further FP detection (skeletonization)
        mc.eyeBrowBinMatArr[0] = binMat;

        //define tracking template for eyebrow
        mc.eyeBrowTrackingTemplateArr[i] = mc.grayFrame.submat(mc.eyeBrowBoundRectArr[i]);
    }

    //compute eyebrow interrocular distance
    mc.eyeBrowBaseDst = Math.abs(mc.eyeBrowBoundRectArr[0].x + mc.eyeBrowBoundRectArr[0].width / 2
            - (mc.eyeBrowBoundRectArr[1].x + mc.eyeBrowBoundRectArr[1].width / 2));

    LOG.info("eyeBrowBaseDst = " + mc.eyeBrowBaseDst);

    //define new bound rect centers for tracking template
    mc.eyeBrowCentersPointsArr = new Point[2];

    return true;
}

From source file:logic.localizator.EyeBrowsLocalizator.java

private boolean detectEyeBrowBoundRect(MatContainer mc) {
    int eyePairW = mc.eyePairRect.width;
    int eyePairH = mc.eyePairRect.height;

    //contains eyebrow bounding rectangles
    Rect boundRectArr[] = new Rect[2];

    //for each eyebrow
    Mat binMat = new Mat();
    for (int i = 0; i < 2; ++i) {
        mc.eyeBrowMatArr[i] = mc.grayFrame.submat(mc.eyeBrowRectArr[i]);
        Scalar meanScalar = Core.mean(mc.eyeBrowMatArr[i]);
        //negate image
        Core.convertScaleAbs(mc.eyeBrowMatArr[i], mc.eyeBrowMatArr[i], 1, 255 - meanScalar.val[0]);
        Imgproc.equalizeHist(mc.eyeBrowMatArr[i], mc.eyeBrowMatArr[i]);
        Imgproc.blur(mc.eyeBrowMatArr[i], mc.eyeBrowMatArr[i], new Size(4, 4));

        //obtain binary image
        Imgproc.threshold(mc.eyeBrowMatArr[i], binMat, 70, 255, Imgproc.THRESH_BINARY_INV);

        Imgproc.morphologyEx(binMat, binMat, Imgproc.MORPH_OPEN,
                Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(4, 4)));

        //find contours
        List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
        Imgproc.findContours(binMat, contours, new Mat(), Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE);

        //find the biggest contour
        int maxSize = -1;
        int tmpSize = -1;
        int index = -1;

        if (contours.size() != 0) {
            maxSize = contours.get(0).toArray().length;
            tmpSize = 0;/*from   w w w  .java  2s. co  m*/
            index = 0;
        }

        //find max contour
        for (int j = 0; j < contours.size(); ++j) {
            //if contour is vertical, exclude it 
            Rect boundRect = Imgproc.boundingRect(contours.get(j));
            if (boundRect.height > boundRect.width)
                continue;

            if ((double) boundRect.height
                    / (double) mc.eyeBrowRectArr[i].height > Parameters.eyebrowBoundRectThresh) {
                LOG.warn("Reset brow rect");
                mc.eyeBrowBoundRectArr[i] = null;
                return false;
            }

            tmpSize = contours.get(j).toArray().length;

            LOG.info("Contour " + j + "; size = " + tmpSize);

            if (tmpSize > maxSize) {
                maxSize = tmpSize;
                index = j;
            }
        }

        binMat.setTo(new Scalar(0));
        boundRectArr[i] = Imgproc.boundingRect(contours.get(index));

        //save eyebrow bounding rectangle
        mc.eyeBrowBoundRectArr[i] = new Rect(mc.eyeBrowRectArr[i].x + boundRectArr[i].x,
                mc.eyeBrowRectArr[i].y + boundRectArr[i].y, boundRectArr[i].width, boundRectArr[i].height);

        //save binary eyebrow Mat for further FP detection (skeletonization)
        mc.eyeBrowBinMatArr[0] = binMat;

        //define tracking template for eyebrow
        mc.eyeBrowTrackingTemplateArr[i] = mc.grayFrame.submat(mc.eyeBrowBoundRectArr[i]); //local rectangle
    }

    //compute eyebrow interrocular distance
    mc.eyeBrowBaseDst = Math.abs(mc.eyeBrowBoundRectArr[0].x + mc.eyeBrowBoundRectArr[0].width / 2
            - (mc.eyeBrowBoundRectArr[1].x + mc.eyeBrowBoundRectArr[1].width / 2));

    LOG.info("eyeBrowBaseDst = " + mc.eyeBrowBaseDst);

    //define new bound rect centers for tracking template
    mc.eyeBrowCentersPointsArr = new Point[2];

    //save eyebrow centers
    //left-right
    Point p1 = new Point(
            mc.eyePairGlobalRect.x + mc.eyeBrowBoundRectArr[0].x + mc.eyeBrowBoundRectArr[0].width / 2,
            mc.eyePairGlobalRect.y + mc.eyeBrowBoundRectArr[0].y + mc.eyeBrowBoundRectArr[0].height / 2);

    Point p2 = new Point(
            mc.eyePairGlobalRect.x + mc.eyeBrowBoundRectArr[1].x + mc.eyeBrowBoundRectArr[1].width / 2,
            mc.eyePairGlobalRect.y + mc.eyeBrowBoundRectArr[1].y + mc.eyeBrowBoundRectArr[1].height / 2);

    Point[] pointArr = new Point[2];
    pointArr[0] = p1;
    pointArr[1] = p2;

    mc.features.eyeBrowCenterPointArr = pointArr;

    return true;
}

From source file:opencv.CaptchaDetection.java

private static Mat k_means_spilter(Mat src) {
    Mat dst = Mat.zeros(src.size(), CvType.CV_8UC1);

    int width = src.cols();
    int height = src.rows();
    int dims = src.channels();

    //   /*from   w  ww  . j  ava  2s  .  c  o  m*/
    int clusterCount = 3;

    Mat points = new Mat(width * height, dims, CvType.CV_32F, new Scalar(0));
    Mat centers = new Mat(clusterCount, dims, CvType.CV_32F);
    Mat labels = new Mat(width * height, 1, CvType.CV_32S);

    //    points
    for (int row = 0; row < height; row++) {
        for (int col = 0; col < width; col++) {
            int index = row * width + col;
            double[] s_data = src.get(row, col);

            for (int channel = 0; channel < 3; channel++) {
                float[] f_buff = new float[1];
                f_buff[0] = (float) s_data[channel];

                points.put(index, channel, f_buff);
            }
        }
    }

    //  knn ?
    TermCriteria criteria = new TermCriteria(TermCriteria.EPS + TermCriteria.MAX_ITER, 10, 0.1);
    Core.kmeans(points, clusterCount, labels, criteria, 3, Core.KMEANS_PP_CENTERS, centers);

    //  ??? label index
    Map<Integer, Integer> tmp = new TreeMap<>();
    for (int i = 0; i < clusterCount; i++) {
        int sum = 0;
        for (int j = 0; j < dims; j++) {
            sum += centers.get(i, j)[0];
        }
        while (tmp.containsKey(sum))
            sum++;
        tmp.put(sum, i);
    }

    int count = 0;
    int[] label_order = new int[clusterCount];
    for (Map.Entry<Integer, Integer> iter : tmp.entrySet()) {
        label_order[count++] = iter.getValue();
    }

    for (int row = 0; row < height; row++) {
        for (int col = 0; col < width; col++) {
            int index = row * width + col;
            int label = (int) labels.get(index, 0)[0];

            if (label == label_order[1]) {
                byte[] d_buff = new byte[1];
                d_buff[0] = (byte) 255;
                dst.put(row, col, d_buff);
            }
        }
    }

    return dst;
}

From source file:opencv.CaptchaDetection.java

/***
 * ??, ROI/*from   www. j  a v  a  2s .co m*/
 * @param src
 * @return 
 */
private static List<Mat> find_number(Mat src) {
    Mat src_tmp = src.clone();

    //  
    Imgproc.dilate(src_tmp, src_tmp, new Mat());

    //  ?
    Mat canny_edge = new Mat();
    Imgproc.blur(src_tmp, src_tmp, new Size(3, 3));
    Imgproc.Canny(src_tmp, canny_edge, 50, 150, 3, false);

    //  
    List<MatOfPoint> contours = new ArrayList<>();
    Imgproc.findContours(canny_edge, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

    List<Rect> boundRect = new ArrayList<>();

    //  ??, ??
    for (int i = 0; i < contours.size(); i++) {
        MatOfPoint2f tmp_mp2f_1 = new MatOfPoint2f();
        MatOfPoint2f tmp_mp2f_2 = new MatOfPoint2f();

        contours.get(i).convertTo(tmp_mp2f_1, CvType.CV_32FC2);

        Imgproc.approxPolyDP(tmp_mp2f_1, tmp_mp2f_2, 3, true);

        tmp_mp2f_2.convertTo(contours.get(i), CvType.CV_32S);

        Rect rect = Imgproc.boundingRect(contours.get(i));

        //if (rect.area() > 300)
        //out.println("h : " + rect.height + ", w : " + rect.width + ", aera :  " + rect.area());

        if (rect.height >= 21 && rect.width >= 21 && rect.area() >= 700)
            boundRect.add(rect);
    }

    //  ??
    for (Rect rect : boundRect) {
        Scalar color = new Scalar(128);
        Imgproc.rectangle(src_tmp, rect.tl(), rect.br(), color, 2, 8, 0);
    }

    //  ???
    Collections.sort(boundRect, rectSort);

    List<Mat> numRoi = new ArrayList<>();
    for (Rect rect : boundRect)
        numRoi.add(src.submat(rect));

    //for (Mat roi : numRoi) 
    //showResult(roi, "roi");

    return numRoi;
}

From source file:opencv.CaptchaDetection.java

/***
 * ?/* ww w . j a v  a  2  s  .c  om*/
 * @param src
 * @return 
 */
private static String dect_number(List<Mat> src) {
    String answer = "";

    for (Mat numRoi : src) {
        Mat zoomNum = new Mat(numRoi.rows() * 2, numRoi.cols() * 2, CvType.CV_8UC1, new Scalar(0));
        numRoi.copyTo(
                zoomNum.submat(new Rect(numRoi.cols() / 2, numRoi.rows() / 2, numRoi.cols(), numRoi.rows())));

        double matchMin = Double.MAX_VALUE;
        int matchSample = 0;

        for (Map.Entry<Integer, List<Mat>> iter : sampleMat.entrySet()) {
            for (Mat sample : iter.getValue()) {
                int result_cols = zoomNum.cols() - sample.cols() + 1;
                int result_rows = zoomNum.rows() - sample.rows() + 1;

                Mat resultImg = new Mat(result_rows, result_cols, CvType.CV_32FC1);

                Imgproc.matchTemplate(zoomNum, sample, resultImg, Imgproc.TM_SQDIFF);

                Core.MinMaxLocResult mmr = Core.minMaxLoc(resultImg);

                if (matchMin > mmr.minVal) {
                    matchMin = mmr.minVal;
                    matchSample = iter.getKey();
                }
            }
        }
        answer += matchSample / 2;
        //out.println("NumRio\tmatch sample :  " + matchSample + "\tmatch value : " + matchMin);
    }

    //out.println("Answer is : " + answer);
    return answer;
}

From source file:org.ar.rubik.Annotation.java

License:Open Source License

/**
 * Draw Cube Color Metrics/*ww w  .jav a  2s . co m*/
 * 
 * Draw a 2D representation of observed tile colors vs.  pre-defined constant rubik tile colors. 
 * Also, right side 1D representation of measured and adjusted luminous.  See ...... for 
 * existing luminous correction.
 * 
 * @param image
 */
private void drawCubeColorMetrics(Mat image) {

    Core.rectangle(image, new Point(0, 0), new Point(570, 720), ColorTileEnum.BLACK.cvColor, -1);

    // Draw simple grid
    Core.rectangle(image, new Point(-256 + 256, -256 + 400), new Point(256 + 256, 256 + 400),
            ColorTileEnum.WHITE.cvColor);
    Core.line(image, new Point(0 + 256, -256 + 400), new Point(0 + 256, 256 + 400),
            ColorTileEnum.WHITE.cvColor);
    Core.line(image, new Point(-256 + 256, 0 + 400), new Point(256 + 256, 0 + 400),
            ColorTileEnum.WHITE.cvColor);

    // Draw measured tile color as solid small circles on both the UV plane and the Y axis.
    for (RubikFace face : stateModel.nameRubikFaceMap.values()) {
        for (int n = 0; n < 3; n++) {
            for (int m = 0; m < 3; m++) {

                double[] measuredTileColor = face.measuredColorArray[n][m];
                //              Log.e(Constants.TAG, "RGB: " + logicalTileArray[n][m].character + "=" + actualTileColor[0] + "," + actualTileColor[1] + "," + actualTileColor[2] + " x=" + x + " y=" + y );
                double[] measuredTileColorYUV = Util.getYUVfromRGB(measuredTileColor);
                //              Log.e(Constants.TAG, "Lum: " + logicalTileArray[n][m].character + "=" + acutalTileYUV[0]);

                double luminousScaled = measuredTileColorYUV[0] * 2 - 256;
                double uChromananceScaled = measuredTileColorYUV[1] * 2;
                double vChromananceScaled = measuredTileColorYUV[2] * 2;

                // Draw solid circle in UV plane
                Core.circle(image, new Point(uChromananceScaled + 256, vChromananceScaled + 400), 10,
                        new Scalar(face.observedTileArray[n][m].cvColor.val), -1);

                // Draw line on OUTSIDE right side for Y axis as directly measured.
                Core.line(image, new Point(522 + 20, luminousScaled + 400),
                        new Point(542 + 20, luminousScaled + 400), face.observedTileArray[n][m].cvColor, 3);
                // Log.e(Constants.TAG, "Lum: " + logicalTileArray[n][m].character + "=" + luminousScaled);
            }
        }
    }

    // Draw predicted tile colors (i.e. "rubikColor" from Constants) as a large circle in UV plane and short solid line in the Y plane.
    for (ColorTileEnum colorTile : ColorTileEnum.values()) {

        if (colorTile.isRubikColor == false)
            continue;

        // Target color we are expecting measurement to be.
        double[] targetColorYUV = Util.getYUVfromRGB(colorTile.rubikColor.val);

        // Draw Color Calibration in UV plane as rectangle
        double x = 2 * targetColorYUV[1] + 256;
        double y = 2 * targetColorYUV[2] + 400;

        // Open large circle in UV plane
        Core.circle(image, new Point(x, y), 15, colorTile.cvColor, +3);

        // Open large circle in Y plane
        Core.circle(image, new Point(512, -256 + 2 * targetColorYUV[0] + 400), 15, colorTile.cvColor, +3);
    }
}

From source file:org.lasarobotics.vision.util.color.Color.java

License:Open Source License

/**
 * Convert this color to a different colorspace and return a scalar
 *
 * @param to Colorspace to convert to//w w  w .  j  a  v a  2 s  .co m
 * @return Scalar in other colorspace
 */
public Scalar convertColorScalar(ColorSpace to) {
    if (getColorSpace() == to)
        return getScalar();
    if (!getColorSpace().canConvertTo(to))
        throw new IllegalArgumentException("Cannot convert color to the desired color space.");

    Scalar output = this.getScalar();

    try {
        for (int i = 0; i < getColorSpace().getConversionsTo(to).length; i += 3) {
            int conversion = getColorSpace().getConversionsTo(to)[i];
            int inputDim = getColorSpace().getConversionsTo(to)[i + 1];
            int outputDim = getColorSpace().getConversionsTo(to)[i + 2];

            Mat pointMatTo = new Mat();
            Mat pointMatFrom = new Mat(1, 1, CvType.CV_8UC(inputDim), output);
            Imgproc.cvtColor(pointMatFrom, pointMatTo, conversion, outputDim);
            output = new Scalar(pointMatTo.get(0, 0));
            pointMatTo.release();
            pointMatFrom.release();
        }
    } catch (Exception ignored) {
        throw new IllegalArgumentException("Cannot convert color to the desired color space.");
    }

    return output;
}