Example usage for org.opencv.core Scalar Scalar

List of usage examples for org.opencv.core Scalar Scalar

Introduction

In this page you can find the example usage for org.opencv.core Scalar Scalar.

Prototype

public Scalar(double v0, double v1, double v2) 

Source Link

Usage

From source file:com.mycompany.objectdetection.ObjectDetector.java

public void findObjects() {

    //        Imgproc.cvtColor(img, imgGrayscale, Imgproc.COLOR_RGBA2GRAY, 1); 
    //        Core.convertScaleAbs(img, imgGrayscale);
    //        Core.normalize(imgGrayscale, imgMeanShifted, 0.0, 1.0, NORM_MINMAX);
    preProcessImg();//from  ww w .ja  v  a  2s . co m

    toGrayScale(imgMeanShifted);
    detectEdges(imgGrayscale);
    Imgproc.findContours(imgCanny, contours, imgCanny, RETR_EXTERNAL, CHAIN_APPROX_SIMPLE);
    objList = new ArrayList();

    for (MatOfPoint mop : contours) {
        MatOfPoint2f m2p;
        m2p = new MatOfPoint2f(mop.toArray());
        Double peri = Imgproc.arcLength(m2p, true);
        Imgproc.approxPolyDP(m2p, m2p, 0.02 * peri, true);
        Imgproc.drawContours(imgOut, contours, -1, new Scalar(0, 0, 255), 2);

        float area = img.width() * img.height();
        Rect rect = Imgproc.boundingRect(mop);
        objList.add(rect);
        Imgproc.rectangle(imgOut, rect.tl(), rect.br(), new Scalar(255, 0, 0));
    }

    Collections.sort(objList, new Comparator<Rect>() {
        @Override
        public int compare(Rect r1, Rect r2) {
            return (int) (r2.area() - r1.area());
        }

    });

    List<Rect> arr = objList;

    while (arr.size() > 0) {
        //System.out.println("---->" + arr);
        Rect bigRect = arr.get(0);
        arr.remove(0);
        Rect bigRect2 = new Rect();

        while (!equals(bigRect, bigRect2)) {
            bigRect2 = bigRect;
            for (int i = 0; i < arr.size(); ++i) {
                // System.out.println("elotte"+arr.get(i));
                if (doOverlap(bigRect, arr.get(i))) {
                    //System.out.println("utana"+arr.get(i));
                    bigRect = union(bigRect, arr.get(i));
                    arr.remove(i);
                    break;
                }
            }

        }

        mainRect = bigRect;

        if (objList.size() > 5 && mainRect.area() >= img.width() * img.height() * 3 / 100) {
            Imgproc.rectangle(imgOut, bigRect.tl(), bigRect.br(), new Scalar(255, 255, 0));
            mainObjects.add(mainRect);
        } else if (objList.size() <= 5) {
            mainObjects.add(mainRect);
        }
    }

}

From source file:com.nekomeshi312.whiteboardcorrection.WhiteBoardDetect.java

License:Open Source License

/**
 * ??????????????????//w w w  .  j a  va2 s.  c o m
 * @param img????null??????
 * @return true:?false:
 */
private boolean divByIntersept(Mat img) {
    int[] sectionNumX = { 0, 0 };
    int[] sectionNumY = { 0, 0 };
    for (LineInfo li : mLineInfo) {
        final int angleNo = li.mLocationFlg == LineInfo.ANGLE0 ? 0 : 1;
        if (Math.abs(li.mLineEq.a) > Math.abs(li.mLineEq.b)) {
            sectionNumX[angleNo]++;
        } else {
            sectionNumY[angleNo]++;
        }
    }

    for (LineInfo li : mLineInfo) {
        final int angleNo = li.mLocationFlg == LineInfo.ANGLE0 ? 0 : 1;
        if (sectionNumX[angleNo] > sectionNumY[angleNo]) {//x???
            if (li.mLineEq.a > 0.0) {
                li.mLocationFlg |= LineInfo.LOCAT0;
            } else {
                li.mLocationFlg |= LineInfo.LOCAT1;
            }
        } else {
            if (li.mLineEq.b > 0.0) {
                li.mLocationFlg |= LineInfo.LOCAT0;
            } else {
                li.mLocationFlg |= LineInfo.LOCAT1;
            }
        }
        if (img != null) {
            Scalar color[] = new Scalar[4];
            color[0] = new Scalar(0xff, 0x00, 0x00);
            color[1] = new Scalar(0x00, 0xff, 0x00);
            color[2] = new Scalar(0x00, 0x00, 0xff);
            color[3] = new Scalar(0xff, 0x00, 0xff);
            for (LineInfo linfo : mLineInfo) {
                int col = 0;
                if (linfo.mLocationFlg == (LineInfo.LOCAT0 | LineInfo.ANGLE0))
                    col = 0;
                if (linfo.mLocationFlg == (LineInfo.LOCAT0 | LineInfo.ANGLE1))
                    col = 1;
                if (linfo.mLocationFlg == (LineInfo.LOCAT1 | LineInfo.ANGLE0))
                    col = 2;
                if (linfo.mLocationFlg == (LineInfo.LOCAT1 | LineInfo.ANGLE1))
                    col = 3;
                Core.line(img, linfo.mStart, linfo.mEnd, color[col], 5);
            }
        }
    }
    return true;
}

From source file:com.nekomeshi312.whiteboardcorrection.WhiteBoardDetect.java

License:Open Source License

/**
 * ???? 2x2=4????????/*from   w  ww .  j  a va 2 s.com*/
 * @param lineEq ?????(ax+by=1) ??[angle][section]
 * @param img????null??????
 * @return true:?false:
 */
private boolean selectLines(StraightLineEquation lineEq[][], Mat img) {
    //
    //????array
    ArrayList<LineInfo>[][] classifiedLines = new ArrayList[2][2];
    LineInfo[][] designateLine = new LineInfo[2][2];

    for (int i = 0; i < 2; i++) {
        for (int j = 0; j < 2; j++) {
            classifiedLines[i][j] = new ArrayList<LineInfo>();
            designateLine[i][j] = null;
        }
    }
    for (LineInfo li : mLineInfo) {
        final int agl = (li.mLocationFlg & LineInfo.ANGLE0) == LineInfo.ANGLE0 ? 0 : 1;
        final int sec = (li.mLocationFlg & LineInfo.LOCAT0) == LineInfo.LOCAT0 ? 0 : 1;
        classifiedLines[agl][sec].add(li);
    }
    final float centerX = (float) (mViewWidth >> 1);
    final float centerY = (float) (mViewHeight >> 1);

    //4????(?????
    if (classifiedLines[0][0].size() == 0 || classifiedLines[0][1].size() == 0
            || classifiedLines[1][0].size() == 0 || classifiedLines[1][1].size() == 0) {
        return false;
    }

    for (int ang = 0; ang < 2; ang++) {//?????
        for (int sec = 0; sec < 2; sec++) {//???
            //?? ax+by=1? a?b??????????a, b?????
            final int HIST_DIV_NUM = 50;
            final int OUTLIER_LOOPNUM = 2;
            //a, b??2?????
            final double SIGMA_MUL = 1.0;
            double aveA = 0.0;
            double aveB = 0.0;
            double stdevA = 0.0;
            double stdevB = 0.0;
            double aMax = Double.MIN_VALUE;
            double aMin = Double.MAX_VALUE;
            double bMax = Double.MIN_VALUE;
            double bMin = Double.MAX_VALUE;

            for (int i = 0; i < OUTLIER_LOOPNUM; i++) {
                if (classifiedLines[ang][sec].size() == 0) {
                    return false;
                }
                aveA = 0.0;
                aveB = 0.0;
                stdevA = 0.0;
                stdevB = 0.0;
                double aveL = 0.0;
                for (LineInfo li : classifiedLines[ang][sec]) {
                    aveA += li.mLineEq.a * li.mLength * li.mWidth;
                    aveB += li.mLineEq.b * li.mLength * li.mWidth;
                    aveL += li.mLength * li.mWidth;
                }
                aveA /= aveL;
                aveB /= aveL;
                for (LineInfo li : classifiedLines[ang][sec]) {
                    final double aa = li.mLineEq.a - aveA;
                    final double bb = li.mLineEq.b - aveB;
                    stdevA += aa * aa;
                    stdevB += bb * bb;
                }
                stdevA = Math.sqrt(stdevA / classifiedLines[ang][sec].size()) * SIGMA_MUL;
                stdevB = Math.sqrt(stdevB / classifiedLines[ang][sec].size()) * SIGMA_MUL;
                aMax = aveA + stdevA;
                aMin = aveA - stdevA;
                bMax = aveB + stdevB;
                bMin = aveB - stdevB;
                if (i < OUTLIER_LOOPNUM - 1) {
                    ArrayList<LineInfo> tmp = new ArrayList<LineInfo>();
                    for (LineInfo li : classifiedLines[ang][sec]) {
                        //???? a, b???????????
                        if (li.mLineEq.a > aMax || li.mLineEq.a < aMin || li.mLineEq.b > bMax
                                || li.mLineEq.b < bMin)
                            continue;
                        tmp.add(li);
                    }
                    if (tmp.size() > 0) {
                        classifiedLines[ang][sec] = tmp;
                    } else {
                        for (LineInfo li : classifiedLines[ang][sec]) {
                            //???????????a,b?????
                            if ((li.mLineEq.a > aMax || li.mLineEq.a < aMin)
                                    && (li.mLineEq.b > bMax || li.mLineEq.b < bMin))
                                continue;
                            tmp.add(li);
                        }
                        classifiedLines[ang][sec] = tmp;
                    }
                }
            }
            //max/min????
            aMax = Double.MIN_VALUE;
            aMin = Double.MAX_VALUE;
            bMax = Double.MIN_VALUE;
            bMin = Double.MAX_VALUE;
            for (LineInfo li : classifiedLines[ang][sec]) {
                if (li.mLineEq.a > aMax)
                    aMax = li.mLineEq.a;
                if (li.mLineEq.a < aMin)
                    aMin = li.mLineEq.a;
                if (li.mLineEq.b > bMax)
                    bMax = li.mLineEq.b;
                if (li.mLineEq.b < bMin)
                    bMin = li.mLineEq.b;
            }

            final double aDiv = (aMax - aMin) / (double) HIST_DIV_NUM;
            final double bDiv = (bMax - bMin) / (double) HIST_DIV_NUM;

            LineList hist[][] = new LineList[HIST_DIV_NUM][HIST_DIV_NUM];
            for (int i = 0; i < HIST_DIV_NUM; i++) {
                for (int j = 0; j < HIST_DIV_NUM; j++) {
                    hist[i][j] = new LineList();
                }
            }
            int linenum = 0;
            for (LineInfo li : classifiedLines[ang][sec]) {
                int aPos = (int) ((li.mLineEq.a - aMin) / aDiv);
                if (aPos == HIST_DIV_NUM)
                    aPos--;
                int bPos = (int) ((li.mLineEq.b - bMin) / bDiv);
                if (bPos == HIST_DIV_NUM)
                    bPos--;
                hist[aPos][bPos].pushLine(li);
                linenum++;
            }
            if (linenum == 0) {
                return false;
            }
            int maxAPos = 0;
            int maxBPos = 0;
            double maxLen = 0.0;
            for (int a = 0; a < HIST_DIV_NUM; a++) {
                for (int b = 0; b < HIST_DIV_NUM; b++) {
                    if (hist[a][b].getLineListNum() == 0) {
                        continue;
                    }
                    double len = 0.0;
                    for (LineInfo li : hist[a][b].mLineList) {
                        len += li.mLength;
                    }
                    if (maxLen < len) {
                        maxAPos = a;
                        maxBPos = b;
                        maxLen = len;
                    }
                }
            }
            if (linenum == 1) {
                lineEq[ang][sec].a = hist[maxAPos][maxBPos].mLineList.get(0).mLineEq.a;
                lineEq[ang][sec].b = hist[maxAPos][maxBPos].mLineList.get(0).mLineEq.b;
            } else {
                lineEq[ang][sec].a = ((double) maxAPos + 0.5) * aDiv + aMin;
                lineEq[ang][sec].b = ((double) maxBPos + 0.5) * bDiv + bMin;
            }

            if (img != null) {
                final double aa = lineEq[ang][sec].a;
                final double bb = lineEq[ang][sec].b;
                Point pt1 = new Point();
                Point pt2 = new Point();
                if (Math.abs(bb) > Math.abs(aa)) {
                    pt1.x = 0.0;
                    pt1.y = (1.0 - aa * (float) (-centerX)) / bb + (float) centerY;
                    pt2.x = (float) mViewWidth;
                    pt2.y = (1.0 - aa * (float) (centerX)) / bb + (float) centerY;
                } else {
                    pt1.x = (1.0 - bb * (float) (-centerY)) / aa + (float) centerX;
                    pt1.y = 0.0;
                    pt2.x = (1.0 - bb * (float) (centerY)) / aa + (float) centerX;
                    pt2.y = (float) mViewHeight;
                }
                if (MyDebug.DEBUG) {
                    if (Math.abs(bb) > 0.001 && Math.abs(aa / bb) > 0.3 && Math.abs(aa / bb) < 2) {
                        Log.d(LOG_TAG,
                                "ang = " + ang + " sec = " + sec + " max a/b = " + maxAPos + ":" + maxBPos);
                        //Core.line(img, pt1, pt2, new Scalar(0xff, 0x00, 0x00), 5);
                    } else {
                        //Core.line(img, pt1, pt2, new Scalar(0xff, 0xff, 0xff), 5);
                    }
                }

                //??????(debug)
                Scalar color[] = new Scalar[4];
                color[0] = new Scalar(0xff, 0x00, 0x00);
                color[1] = new Scalar(0x00, 0xff, 0x00);
                color[2] = new Scalar(0x00, 0x00, 0xff);
                color[3] = new Scalar(0xff, 0x00, 0xff);

                for (LineInfo li : mLineInfo) {
                    int c = 0;
                    if (li.mLocationFlg == (LineInfo.ANGLE0 | LineInfo.LOCAT0)) {
                        c = 0;
                    } else if (li.mLocationFlg == (LineInfo.ANGLE0 | LineInfo.LOCAT1)) {
                        c = 1;
                    }
                    if (li.mLocationFlg == (LineInfo.ANGLE1 | LineInfo.LOCAT0)) {
                        c = 2;
                    } else if (li.mLocationFlg == (LineInfo.ANGLE1 | LineInfo.LOCAT1)) {
                        c = 3;
                    }
                    Core.line(img, li.mStart, li.mEnd, color[c], 1);
                    Core.circle(img, li.mStart, 10, color[0]);
                    Core.circle(img, li.mEnd, 10, color[1]);
                }
            }
        }
    }
    return true;
}

From source file:com.nekomeshi312.whiteboardcorrection.WhiteBoardDetect.java

License:Open Source License

/**
 * ????????//from  ww w.ja  va2  s.c o  m
 * @param lineEq ?????(ax+by=1) ??[angle][section]
 * @param points ?ArrayList
 * @param img ????null??????
 * @return true:? false:
 */
private boolean calcSquare(StraightLineEquation lineEq[][], ArrayList<Point> points, Mat img) {
    //2??
    Mat mat = new Mat(2, 2, CvType.CV_32F);
    mPointCenterX = 0.0f;
    mPointCenterY = 0.0f;
    int counter = 0;
    for (int ang0sec = 0; ang0sec < 2; ang0sec++) {
        mat.put(0, 0, lineEq[0][ang0sec].a);
        mat.put(0, 1, lineEq[0][ang0sec].b);
        for (int ang1sec = 0; ang1sec < 2; ang1sec++) {
            mat.put(1, 0, lineEq[1][ang1sec].a);
            mat.put(1, 1, lineEq[1][ang1sec].b);
            Mat matAns;
            try {
                matAns = mat.inv();
                if (matAns == null)
                    return false;
            } catch (Exception e) {//??????????
                e.printStackTrace();
                return false;
            }
            float x = (float) (matAns.get(0, 0)[0] + matAns.get(0, 1)[0] + mCenterX);
            float y = (float) (matAns.get(1, 0)[0] + matAns.get(1, 1)[0] + mCenterY);
            Point p = new Point(x, y);
            points.add(p);
            mPointCenterX += x;
            mPointCenterY += y;
            counter++;
        }
    }
    mPointCenterX /= (float) counter;
    mPointCenterY /= (float) counter;
    //????
    Collections.sort(points, new PointComparator());
    if (img != null) {
        Scalar color[] = new Scalar[4];
        color[0] = new Scalar(0xff, 0x00, 0x00);
        color[1] = new Scalar(0x00, 0xff, 0x00);
        color[2] = new Scalar(0x00, 0x00, 0xff);
        color[3] = new Scalar(0xff, 0x00, 0xff);

        for (int i = 0; i < 4; i++) {
            Core.circle(img, points.get(i), 30, color[i], 5);
        }
    }
    if (MyDebug.DEBUG) {
        for (int i = 0; i < 4; i++) {
            Log.d(LOG_TAG, "point(" + i + ") = " + points.get(i).x + ":" + points.get(i).y);
        }
    }

    return true;
}

From source file:com.opencv.mouse.MouseMainFrame.java

private void jToggleButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jToggleButton1ActionPerformed
    try {/*from  w  w w .  jav  a 2  s .  c  o  m*/
        robot = new Robot();
    } catch (AWTException e) {
    }
    t = new Thread() {
        public void run() {

            MatToBufImg matToBufferedImageConverter = new MatToBufImg(); //Utility class to convert Mat to Java's BufferedImage

            webCam = new VideoCapture(0);
            if (!webCam.isOpened()) {
                System.out.println("Kamera Ak Deil..!");
            } else
                System.out.println("Kamera Ald --> " + webCam.toString());

            Mat webcam_image = new Mat(480, 640, CvType.CV_8UC3);
            Mat hsv_image = new Mat(webcam_image.cols(), webcam_image.rows(), CvType.CV_8UC3);
            thresholded = new Mat(webcam_image.cols(), webcam_image.rows(), CvType.CV_8UC3,
                    new Scalar(255, 255, 255));
            if (webCam.isOpened()) {
                try {
                    Thread.sleep(1000);
                } catch (InterruptedException ex) {

                }

                while (true) {
                    try {
                        webCam.read(webcam_image);
                    } catch (Exception e) {
                        System.out.println("Web Cam Kapal !");
                    }

                    if (!webcam_image.empty()) {
                        try {
                            Thread.sleep(10);
                        } catch (InterruptedException ex) {

                        }
                        // Mat inRangeResim = webcam_image.clone();
                        /*
                        Mat inRangeResim = webcam_image.clone();
                        matToBufferedImageConverter.setMatrix(inRangeResim, ".jpg");
                        image =matToBufferedImageConverter.getBufferedImage();
                        Highgui.imwrite("D:\\bitirme.jpg", inRangeResim);
                        */

                        //       MatOfRect faceDetections = new MatOfRect();
                        Imgproc.cvtColor(webcam_image, hsv_image, Imgproc.COLOR_BGR2HSV);
                        //siyah hsv range 0 0 0 - 180 45 100
                        //hsvmavi   Core.inRange(webcam_image, new Scalar(75,63,40), new Scalar(118,255,255), webcam_image);
                        //rgb mavi        // Core.inRange(webcam_image, new Scalar(50,0,0), new Scalar(255,0,0), webcam_image);
                        //turuncu hsv      Core.inRange(webcam_image, new Scalar(5,50,50), new Scalar(15,255,255), webcam_image);
                        //Core.inRange(webcam_image, new Scalar(80,50,50), new Scalar(140,255,255), webcam_image);
                        //        Core.inRange(webcam_image, new Scalar(29,0,24), new Scalar(30,155,155), webcam_image);

                        //hsv mavi
                        //                       jSliderHmin.setValue(75);
                        //                       jSliderSmin.setValue(63);
                        //                       jSliderVmin.setValue(40);
                        //                       jSliderHmax.setValue(118);
                        //                       jSliderSmax.setValue(255);
                        //                       jSliderVmax.setValue(255);
                        //
                        //                       jSliderHmin.setValue(0);
                        //                       jSliderSmin.setValue(0);
                        //                       jSliderVmin.setValue(0);
                        //                       jSliderHmax.setValue(179);
                        //                       jSliderSmax.setValue(39);
                        //                       jSliderVmax.setValue(120);
                        Core.inRange(hsv_image, new Scalar(100, 97, 206), new Scalar(120, 255, 255),
                                thresholded);
                        Imgproc.dilate(thresholded, thresholded, element);

                        Imgproc.erode(thresholded, thresholded, element);
                        Imgproc.dilate(thresholded, thresholded, element);

                        Imgproc.erode(thresholded, thresholded, element);

                        List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
                        Imgproc.findContours(thresholded, contours, new Mat(), Imgproc.RETR_LIST,
                                Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));
                        Imgproc.drawContours(thresholded, contours, -1, new Scalar(255.0, 255.0, 255.0), 5);

                        for (int i = 0; i < contours.size(); i++) {
                            //  System.out.println(Imgproc.contourArea(contours.get(i)));
                            //    if (Imgproc.contourArea(contours.get(i)) > 1 ){
                            Rect rect = Imgproc.boundingRect(contours.get(i));
                            kesit = thresholded.submat(rect);
                            //System.out.println(rect.height);
                            // if (rect.height > 20 && rect.height <30 && rect.width < 30 && rect.width >20){
                            //  System.out.println(rect.x +","+rect.y+","+rect.height+","+rect.width);
                            Core.rectangle(webcam_image, new Point(rect.x, rect.y),
                                    new Point(rect.x + rect.width, rect.y + rect.height),
                                    new Scalar(0, 0, 255));

                            //}
                            //}
                            if (rect.height > 15 && rect.width > 15) {
                                System.out.println(rect.x + "\n" + rect.y);
                                Core.circle(webcam_image, new Point(rect.x, rect.y), i, new Scalar(0, 255, 0));
                                robot.mouseMove((int) (rect.x * 3), (int) (rect.y * 2.25));
                            }

                        }

                        //   Imgproc.cvtColor(webcam_image, webcam_image, Imgproc.COLOR_HSV2BGR);
                        //  hsv_image.convertTo(hsv_image, CvType.CV_32F);

                        //   Imgproc.Canny(thresholded, thresholded, 10, 20);
                        //   Core.bitwise_and(thresholded, webcam_image, webcam_image);

                        //ise yarar

                        //    Imgproc.cvtColor(thresholded, thresholded, Imgproc.COLOR_GRAY2BGR);
                        //  Core.bitwise_and(thresholded, webcam_image, webcam_image);

                        //    webcam_image.copyTo(hsv_image, thresholded);
                        //                            System.out.println("<------------------------------>");
                        //                            System.out.println("BGR: " +webcam_image.channels()+"  Size : "+webcam_image.size());
                        //                            System.out.println("HSV :"+hsv_image.channels()+"  Size: "+hsv_image.size());
                        //                            System.out.println("Thresold :"+thresholded.channels()+"  Size : "+thresholded.size());
                        //                            System.out.println("<------------------------------>");
                        //
                        matToBufferedImageConverter.setMatrix(webcam_image, ".jpg");

                        image = matToBufferedImageConverter.getBufferedImage();
                        g.drawImage(image, 0, 0, webcam_image.cols(), webcam_image.rows(), null);

                    } else {

                        System.out.println("Grnt yok!");
                        break;
                    }
                }
                //           webCam.release();
            }

        }
    };
    threadDurum = true;
    t.start();
}

From source file:com.opencv.video.VideoCaptureMain.java

private void jButtonPlayActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonPlayActionPerformed
    Thread t = new Thread() {
        @Override/*from w ww  .  ja v  a 2 s.  c  o m*/
        public void run() {

            MatToBufImg matToBufferedImageConverter = new MatToBufImg();
            try {
                final VideoCapture videoCapture = new VideoCapture("D:\\colorTest.mp4");
                //          videoCapture = new VideoCapture(0);
                //  Thread.sleep(3000);
                if (!videoCapture.isOpened()) {
                    System.out.println("Video Alamad");
                    return;
                }

                double fps = videoCapture.get(5);

                System.out.println("FPS :" + fps);
                frame = new Mat();

                Mat hsv_image = new Mat();
                Mat thresholded = new Mat();

                while (true) {
                    boolean basarili = videoCapture.read(frame);

                    if (!basarili) {
                        System.out.println("Okunamyor");
                        break;
                    }

                    Imgproc.cvtColor(frame, hsv_image, Imgproc.COLOR_BGR2HSV);

                    Core.inRange(hsv_image, new Scalar(170, 150, 60), new Scalar(179, 255, 255), thresholded);
                    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
                    Imgproc.findContours(thresholded, contours, new Mat(), Imgproc.RETR_LIST,
                            Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));
                    for (int i = 0; i < contours.size(); i++) {
                        //  System.out.println(Imgproc.contourArea(contours.get(i)));
                        //    if (Imgproc.contourArea(contours.get(i)) > 1 ){
                        Rect rect = Imgproc.boundingRect(contours.get(i));
                        kesit = frame.submat(rect);
                        //System.out.println(rect.height);
                        // if (rect.height > 20 && rect.height <30 && rect.width < 30 && rect.width >20){
                        //  System.out.println(rect.x +","+rect.y+","+rect.height+","+rect.width);
                        Core.rectangle(frame, new Point(rect.x, rect.y),
                                new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 0, 255));
                        // Core.circle(webcam_image, new Point(rect.x+rect.height/2, rect.y+rect.width/2), i, new Scalar(0, 0, 255));
                        //}
                        //}
                    }

                    matToBufferedImageConverter.setMatrix(frame, ".jpg");

                    g.drawImage(matToBufferedImageConverter.getBufferedImage(), 0, 0, 640, 480, null);

                }
            } catch (Exception e) {
                System.out.println("Sorun Burda");
            }

        }
    };

    t.start();

}

From source file:com.projectcs2103t.openglestest.OpenGLES20Activity.java

License:Apache License

@Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
    Mat rgba = inputFrame.rgba();/*ww  w.  j  av  a2  s .  c o m*/
    float projection[] = mCameraProjectionAdapter.getProjectionGL();
    Mat CameraMat = mCameraProjectionAdapter.getCVCameraMat();
    Mat DistortionMat = mCameraProjectionAdapter.getCVDistortionMat();
    Mat ModelViewMat = new Mat(4, 4, CvType.CV_64FC1);
    int detected = nl.processFrame(rgba.getNativeObjAddr(), CameraMat.getNativeObjAddr(),
            DistortionMat.getNativeObjAddr(), ModelViewMat.getNativeObjAddr());
    float mGLModelView[] = null;
    if (detected == 1) {
        mGLModelView = new float[16];
        mGLModelView[0] = (float) ModelViewMat.get(0, 0)[0];
        mGLModelView[1] = (float) ModelViewMat.get(0, 1)[0];
        mGLModelView[2] = (float) ModelViewMat.get(0, 2)[0];
        mGLModelView[3] = (float) ModelViewMat.get(0, 3)[0];
        mGLModelView[4] = (float) ModelViewMat.get(1, 0)[0];
        mGLModelView[5] = (float) ModelViewMat.get(1, 1)[0];
        mGLModelView[6] = (float) ModelViewMat.get(1, 2)[0];
        mGLModelView[7] = (float) ModelViewMat.get(1, 3)[0];
        mGLModelView[8] = (float) ModelViewMat.get(2, 0)[0];
        mGLModelView[9] = (float) ModelViewMat.get(2, 1)[0];
        mGLModelView[10] = (float) ModelViewMat.get(2, 2)[0];
        mGLModelView[11] = (float) ModelViewMat.get(2, 3)[0];
        mGLModelView[12] = (float) ModelViewMat.get(3, 0)[0];
        mGLModelView[13] = (float) ModelViewMat.get(3, 1)[0];
        mGLModelView[14] = (float) ModelViewMat.get(3, 2)[0];
        mGLModelView[15] = (float) ModelViewMat.get(3, 3)[0];
        //showMatrices(rgba, ModelViewMat);
    }
    mCameraProjectionAdapter.setModelViewGL(mGLModelView);
    Imgproc.putText(rgba, mCameraProjectionAdapter.toString(), new Point(50, 50), Core.FONT_HERSHEY_PLAIN, 1.0,
            new Scalar(0, 255, 0));
    Imgproc.putText(rgba, mGLView.toString(), new Point(50, 75), Core.FONT_HERSHEY_PLAIN, 1.0,
            new Scalar(0, 255, 0));
    return rgba;
}

From source file:com.projectcs2103t.openglestest.OpenGLES20Activity.java

License:Apache License

private void showMatrices(Mat rgba, Mat mat) {
    double m00 = mat.get(0, 0)[0];
    double m01 = mat.get(0, 1)[0];
    double m02 = mat.get(0, 2)[0];
    double m03 = mat.get(0, 3)[0];
    double m10 = mat.get(1, 0)[0];
    double m11 = mat.get(1, 1)[0];
    double m12 = mat.get(1, 2)[0];
    double m13 = mat.get(1, 3)[0];
    double m20 = mat.get(2, 0)[0];
    double m21 = mat.get(2, 1)[0];
    double m22 = mat.get(2, 2)[0];
    double m23 = mat.get(2, 3)[0];
    double m30 = mat.get(3, 0)[0];
    double m31 = mat.get(3, 1)[0];
    double m32 = mat.get(3, 2)[0];
    double m33 = mat.get(3, 3)[0];
    //String camMatStr = cameraMat.dump();
    String mRow0 = "|" + m00 + "," + m01 + "," + m02 + "," + m03 + "|";
    String mRow1 = "|" + m10 + "," + m11 + "," + m12 + "," + m13 + "|";
    String mRow2 = "|" + m20 + "," + m21 + "," + m22 + "," + m23 + "|";
    String mRow3 = "|" + m30 + "," + m31 + "," + m32 + "," + m33 + "|";
    Imgproc.putText(rgba, "Model-View-Mat:", new Point(50, 100), Core.FONT_HERSHEY_PLAIN, 1.0,
            new Scalar(0, 255, 0));
    Imgproc.putText(rgba, mRow0, new Point(50, 125), Core.FONT_HERSHEY_PLAIN, 1.0, new Scalar(0, 255, 0));
    Imgproc.putText(rgba, mRow1, new Point(50, 150), Core.FONT_HERSHEY_PLAIN, 1.0, new Scalar(0, 255, 0));
    Imgproc.putText(rgba, mRow2, new Point(50, 175), Core.FONT_HERSHEY_PLAIN, 1.0, new Scalar(0, 255, 0));
    Imgproc.putText(rgba, mRow3, new Point(50, 200), Core.FONT_HERSHEY_PLAIN, 1.0, new Scalar(0, 255, 0));

}

From source file:com.raspoid.additionalcomponents.camera.opencv.FaceDetector.java

License:Open Source License

/**
 * Creates a new output image from the input image with faces surrounded with green boxes.
 * @param image the input image previously analyzed.
 * @param faces array of coordinates corresponding to the previously detected faces.
 * @param outputFilename the output file name.
 * @return true in cas of success when creating the output file. False in case of failure.
 *//*from   ww w . j a  va 2  s  .  co m*/
public static boolean surroundFaces(Mat image, Rect[] faces, String outputFilename) {
    if (outputFilename == null || outputFilename.isEmpty())
        throw new RaspoidException("The output filename can't be empty.");

    for (Rect face : faces)
        Core.rectangle(image, new Point(face.x, face.y), new Point(face.x + face.width, face.y + face.height),
                new Scalar(0, 255, 0));

    return Highgui.imwrite(outputFilename, image);
}

From source file:com.seleniumtests.util.imaging.ImageDetector.java

License:Apache License

/**
 * Compute the rectangle where the searched picture is and the rotation angle between both images
 * Throw {@link ImageSearchException} if picture is not found
 * @return//from  w ww. j  a v a2s  . c  o  m
 * @Deprecated Kept here for information, but open CV 3 does not include SURF anymore for java build
 */
public void detectCorrespondingZone() {
    Mat objectImageMat = Imgcodecs.imread(objectImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
    Mat sceneImageMat = Imgcodecs.imread(sceneImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
    FeatureDetector surf = FeatureDetector.create(FeatureDetector.SURF);

    MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint();
    MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint();

    surf.detect(objectImageMat, objectKeyPoints);
    surf.detect(sceneImageMat, sceneKeyPoints);

    DescriptorExtractor surfExtractor = DescriptorExtractor.create(DescriptorExtractor.SURF);
    Mat objectDescriptor = new Mat();
    Mat sceneDescriptor = new Mat();
    surfExtractor.compute(objectImageMat, objectKeyPoints, objectDescriptor);
    surfExtractor.compute(sceneImageMat, sceneKeyPoints, sceneDescriptor);

    try {
        Mat outImage = new Mat();
        Features2d.drawKeypoints(objectImageMat, objectKeyPoints, outImage);
        String tempFile = File.createTempFile("img", ".png").getAbsolutePath();
        writeComparisonPictureToFile(tempFile, outImage);
    } catch (IOException e) {

    }

    // http://stackoverflow.com/questions/29828849/flann-for-opencv-java
    DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
    MatOfDMatch matches = new MatOfDMatch();

    if (objectKeyPoints.toList().isEmpty()) {
        throw new ImageSearchException("No keypoints in object to search, check it's not uniformly coloured: "
                + objectImage.getAbsolutePath());
    }
    if (sceneKeyPoints.toList().isEmpty()) {
        throw new ImageSearchException(
                "No keypoints in scene, check it's not uniformly coloured: " + sceneImage.getAbsolutePath());
    }
    if (objectDescriptor.type() != CvType.CV_32F) {
        objectDescriptor.convertTo(objectDescriptor, CvType.CV_32F);
    }
    if (sceneDescriptor.type() != CvType.CV_32F) {
        sceneDescriptor.convertTo(sceneDescriptor, CvType.CV_32F);
    }

    matcher.match(objectDescriptor, sceneDescriptor, matches);

    double maxDist = 0;
    double minDist = 10000;

    for (int i = 0; i < objectDescriptor.rows(); i++) {
        double dist = matches.toList().get(i).distance;
        if (dist < minDist) {
            minDist = dist;
        }
        if (dist > maxDist) {
            maxDist = dist;
        }
    }

    logger.debug("-- Max dist : " + maxDist);
    logger.debug("-- Min dist : " + minDist);

    LinkedList<DMatch> goodMatches = new LinkedList<>();
    MatOfDMatch gm = new MatOfDMatch();

    for (int i = 0; i < objectDescriptor.rows(); i++) {
        if (matches.toList().get(i).distance < detectionThreshold) {
            goodMatches.addLast(matches.toList().get(i));
        }
    }
    gm.fromList(goodMatches);

    Features2d.drawMatches(objectImageMat, objectKeyPoints, sceneImageMat, sceneKeyPoints, gm, imgMatch,
            Scalar.all(-1), Scalar.all(-1), new MatOfByte(), Features2d.NOT_DRAW_SINGLE_POINTS);

    if (goodMatches.isEmpty()) {
        throw new ImageSearchException("Cannot find matching zone");
    }

    LinkedList<Point> objList = new LinkedList<>();
    LinkedList<Point> sceneList = new LinkedList<>();

    List<KeyPoint> objectKeyPointsList = objectKeyPoints.toList();
    List<KeyPoint> sceneKeyPointsList = sceneKeyPoints.toList();

    for (int i = 0; i < goodMatches.size(); i++) {
        objList.addLast(objectKeyPointsList.get(goodMatches.get(i).queryIdx).pt);
        sceneList.addLast(sceneKeyPointsList.get(goodMatches.get(i).trainIdx).pt);
    }

    MatOfPoint2f obj = new MatOfPoint2f();
    obj.fromList(objList);

    MatOfPoint2f scene = new MatOfPoint2f();
    scene.fromList(sceneList);

    // Calib3d.RANSAC could be used instead of 0
    Mat hg = Calib3d.findHomography(obj, scene, 0, 5);

    Mat objectCorners = new Mat(4, 1, CvType.CV_32FC2);
    Mat sceneCorners = new Mat(4, 1, CvType.CV_32FC2);

    objectCorners.put(0, 0, new double[] { 0, 0 });
    objectCorners.put(1, 0, new double[] { objectImageMat.cols(), 0 });
    objectCorners.put(2, 0, new double[] { objectImageMat.cols(), objectImageMat.rows() });
    objectCorners.put(3, 0, new double[] { 0, objectImageMat.rows() });

    Core.perspectiveTransform(objectCorners, sceneCorners, hg);

    // points of object
    Point po1 = new Point(objectCorners.get(0, 0));
    Point po2 = new Point(objectCorners.get(1, 0));
    Point po3 = new Point(objectCorners.get(2, 0));
    Point po4 = new Point(objectCorners.get(3, 0));

    // point of object in scene
    Point p1 = new Point(sceneCorners.get(0, 0)); // top left
    Point p2 = new Point(sceneCorners.get(1, 0)); // top right
    Point p3 = new Point(sceneCorners.get(2, 0)); // bottom right
    Point p4 = new Point(sceneCorners.get(3, 0)); // bottom left

    logger.debug(po1);
    logger.debug(po2);
    logger.debug(po3);
    logger.debug(po4);
    logger.debug(p1); // top left
    logger.debug(p2); // top right
    logger.debug(p3); // bottom right
    logger.debug(p4); // bottom left

    if (debug) {
        try {
            // translate corners
            p1.set(new double[] { p1.x + objectImageMat.cols(), p1.y });
            p2.set(new double[] { p2.x + objectImageMat.cols(), p2.y });
            p3.set(new double[] { p3.x + objectImageMat.cols(), p3.y });
            p4.set(new double[] { p4.x + objectImageMat.cols(), p4.y });

            Imgproc.line(imgMatch, p1, p2, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p2, p3, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p3, p4, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p4, p1, new Scalar(0, 255, 0), 1);

            showResultingPicture(imgMatch);
        } catch (IOException e) {
        }
    }

    // check rotation angles
    checkRotationAngle(p1, p2, p3, p4, po1, po2, po3, po4);

    // rework on scene points as new, we are sure the object rotation is 0, 90, 180 or 270
    reworkOnScenePoints(p1, p2, p3, p4);

    // check that aspect ratio of the detected height and width are the same
    checkDetectionZoneAspectRatio(p1, p2, p4, po1, po2, po4);

    recordDetectedRectangle(p1, p2, p3, p4);
}