List of usage examples for org.opencv.core Scalar Scalar
public Scalar(double v0, double v1, double v2)
From source file:tv.danmaku.ijk.media.example.activities.VideoActivity.java
License:Apache License
public Mat onCameraFrame(CvCameraViewFrame inputFrame) { mRgba = inputFrame.rgba();//from w ww . ja v a 2s .c o m mGray = inputFrame.gray(); // return mRgba; // iThreshold = 10000; //Imgproc.blur(mRgba, mRgba, new Size(5,5)); Imgproc.GaussianBlur(mRgba, mRgba, new org.opencv.core.Size(3, 3), 1, 1); //Imgproc.medianBlur(mRgba, mRgba, 3); if (!mIsColorSelected) return mRgba; List<MatOfPoint> contours = mDetector.getContours(); mDetector.process(mRgba); Log.d(TAG, "Contours count: " + contours.size()); if (contours.size() <= 0) { return mRgba; } RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours.get(0).toArray())); double boundWidth = rect.size.width; double boundHeight = rect.size.height; int boundPos = 0; for (int i = 1; i < contours.size(); i++) { rect = Imgproc.minAreaRect(new MatOfPoint2f(contours.get(i).toArray())); if (rect.size.width * rect.size.height > boundWidth * boundHeight) { boundWidth = rect.size.width; boundHeight = rect.size.height; boundPos = i; } } Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours.get(boundPos).toArray())); Imgproc.rectangle(mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0); Log.d(TAG, " Row start [" + (int) boundRect.tl().y + "] row end [" + (int) boundRect.br().y + "] Col start [" + (int) boundRect.tl().x + "] Col end [" + (int) boundRect.br().x + "]"); int rectHeightThresh = 0; double a = boundRect.br().y - boundRect.tl().y; a = a * 0.7; a = boundRect.tl().y + a; Log.d(TAG, " A [" + a + "] br y - tl y = [" + (boundRect.br().y - boundRect.tl().y) + "]"); //Core.rectangle( mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR, 2, 8, 0 ); Imgproc.rectangle(mRgba, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0); MatOfPoint2f pointMat = new MatOfPoint2f(); Imgproc.approxPolyDP(new MatOfPoint2f(contours.get(boundPos).toArray()), pointMat, 3, true); contours.set(boundPos, new MatOfPoint(pointMat.toArray())); MatOfInt hull = new MatOfInt(); MatOfInt4 convexDefect = new MatOfInt4(); Imgproc.convexHull(new MatOfPoint(contours.get(boundPos).toArray()), hull); if (hull.toArray().length < 3) return mRgba; Imgproc.convexityDefects(new MatOfPoint(contours.get(boundPos).toArray()), hull, convexDefect); List<MatOfPoint> hullPoints = new LinkedList<MatOfPoint>(); List<Point> listPo = new LinkedList<Point>(); for (int j = 0; j < hull.toList().size(); j++) { listPo.add(contours.get(boundPos).toList().get(hull.toList().get(j))); } MatOfPoint e = new MatOfPoint(); e.fromList(listPo); hullPoints.add(e); List<MatOfPoint> defectPoints = new LinkedList<MatOfPoint>(); List<Point> listPoDefect = new LinkedList<Point>(); for (int j = 0; j < convexDefect.toList().size(); j = j + 4) { Point farPoint = contours.get(boundPos).toList().get(convexDefect.toList().get(j + 2)); Integer depth = convexDefect.toList().get(j + 3); if (depth > iThreshold && farPoint.y < a) { listPoDefect.add(contours.get(boundPos).toList().get(convexDefect.toList().get(j + 2))); } Log.d(TAG, "defects [" + j + "] " + convexDefect.toList().get(j + 3)); } MatOfPoint e2 = new MatOfPoint(); e2.fromList(listPo); defectPoints.add(e2); Log.d(TAG, "hull: " + hull.toList()); Log.d(TAG, "defects: " + convexDefect.toList()); Imgproc.drawContours(mRgba, hullPoints, -1, CONTOUR_COLOR, 3); int defectsTotal = (int) convexDefect.total(); Log.d(TAG, "Defect total " + defectsTotal); this.numberOfFingers = listPoDefect.size(); if (this.numberOfFingers > 5) this.numberOfFingers = 5; mHandler.post(mUpdateFingerCountResults); for (Point p : listPoDefect) { Imgproc.circle(mRgba, p, 6, new Scalar(255, 0, 255)); } return mRgba; }
From source file:usefull.detectContours.java
License:LGPL
public static void main(String[] args) throws InterruptedException { // load the Core OpenCV library by name System.loadLibrary(Core.NATIVE_LIBRARY_NAME); // load two image files Mat img1 = Highgui.imread("files/cctv_example1.png"); Mat img2 = Highgui.imread("files/cctv_example2.png"); // create a new image object to store image difference Mat diff_img = new Mat(); // compute the difference between the images Core.absdiff(img1, img2, diff_img);// ww w. j a va2 s .c om // now convert it to grey and threshold it Mat grey = new Mat(); Imgproc.cvtColor(diff_img, grey, Imgproc.COLOR_BGR2GRAY); Imgproc.adaptiveThreshold(grey, diff_img, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY_INV, 7, 10); // now clean it up using some morphological operations Size ksize = new Size(15, 15); Mat kernel = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, ksize); Imgproc.morphologyEx(diff_img, diff_img, Imgproc.MORPH_CLOSE, kernel); // find the all the contours from the binary image using the edge to contour // stuff we looked at in lectures List<MatOfPoint> contours = new Vector<MatOfPoint>(); Imgproc.findContours(diff_img, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE); // draw the contours on image 2 in red Imgproc.drawContours(img2, contours, -1, new Scalar(0, 0, 255)); // find the largest contour by area double maxArea = 0; int maxAreaIndex = 0; for (int i = 0; i < contours.size(); i++) { double area = Imgproc.contourArea(contours.get(i), false); if (area > maxArea) { maxArea = area; maxAreaIndex = i; } } // draw the largest contour in red Imgproc.drawContours(img2, contours, maxAreaIndex, new Scalar(0, 255, 0)); // create a new window objects Imshow ims1 = new Imshow("Image 1"); Imshow ims2 = new Imshow("Image 2"); Imshow ims_diff = new Imshow("Difference"); // display images ims1.showImage(img1); ims2.showImage(img2); ims_diff.showImage(diff_img); }
From source file:usefull.hogPeopleDetection.java
License:LGPL
public static void main(String[] args) throws InterruptedException { // load the Core OpenCV library by name System.loadLibrary(Core.NATIVE_LIBRARY_NAME); // create video capture device object VideoCapture cap = new VideoCapture(); // try to use the hardware device if present int CAM_TO_USE = 0; // create a new image object Mat matFrame = new Mat(); // try to open first capture device (0) try {/*from ww w .j a va 2 s. c o m*/ cap.open(CAM_TO_USE); } catch (Exception e1) { System.out.println("No webcam attached"); // otherwise try opening a video file try { cap.open("files/video.mp4"); } catch (Exception e2) { System.out.println("No video file found"); } } // if the a video capture source is now open if (cap.isOpened()) { // create a new window object Imshow ims = new Imshow("HOG People Detection"); boolean keepProcessing = true; // create new HoG detection object HOGDescriptor HOG = new HOGDescriptor(); HOG.setSVMDetector(HOGDescriptor.getDefaultPeopleDetector()); // create MatOfRect foundLocations = new MatOfRect(); MatOfDouble foundWeights = new MatOfDouble(); while (keepProcessing) { // grab the next frame from video source cap.grab(); // decode and return the grabbed video frame cap.retrieve(matFrame); // if the frame is valid (not end of video for example) if (!(matFrame.empty())) { // perform detection HOG.detectMultiScale(matFrame, foundLocations, foundWeights, 0, new Size(8, 8), new Size(32, 32), 1.05, 8, false); List<Rect> rectangles = foundLocations.toList(); for (int i = 0; i < rectangles.size(); i++) { Core.rectangle(matFrame, new Point(rectangles.get(i).x, rectangles.get(i).y), new Point(rectangles.get(i).x + rectangles.get(i).width, rectangles.get(i).y + rectangles.get(i).height), new Scalar(255, 0, 0), 2, 1, 0); } // display image with a delay of 40ms (i.e. 1000 ms / 25 = 25 fps) ims.showImage(matFrame); Thread.sleep(40); } else { keepProcessing = false; } } } else { System.out.println("error cannot open any capture source - exiting"); } // close down the camera correctly cap.release(); }
From source file:usefull.LoopImageFiles.java
License:LGPL
public static void main(String[] args) throws InterruptedException { // define the path to my files - clearly and obviously // **** CHANGE THIS TO YOUR OWN DIRECTORY ****/ String IMAGE_FILES_DIRECTORY_PATH = "files/combinations/"; // load the Core OpenCV library by name System.loadLibrary(Core.NATIVE_LIBRARY_NAME); // create a display window using an Imshow object Imshow ims = new Imshow("Next Image ..."); Size frame = new Size(704, 480); List<Mat> backgroundImg = new ArrayList<Mat>(); backgroundImg.add(Highgui.imread("files/1.png")); backgroundImg.add(Highgui.imread("files/2.png")); backgroundImg.add(Highgui.imread("files/3.png")); backgroundImg.add(Highgui.imread("files/4.png")); backgroundImg.add(Highgui.imread("files/5.png")); backgroundImg.add(Highgui.imread("files/6.png")); backgroundImg.add(Highgui.imread("files/7.png")); for (Mat bgImg : backgroundImg) { Mat grey = new Mat(); Imgproc.cvtColor(bgImg, grey, Imgproc.COLOR_BGR2GRAY); Imgproc.adaptiveThreshold(grey, bgImg, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY_INV, 7, 10);//from w w w. j a v a 2 s .c om Size ksize = new Size(21, 21); Mat kernel = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, ksize); Imgproc.morphologyEx(bgImg, bgImg, Imgproc.MORPH_CLOSE, kernel); Size filter = new Size(21, 21); Imgproc.GaussianBlur(bgImg, bgImg, filter, 0, 0, Imgproc.BORDER_DEFAULT); } BackgroundSubtractorMOG MoG = new BackgroundSubtractorMOG(); List<Mat> fg_masks = new ArrayList<Mat>(); Imshow imsS = new Imshow("input ... "); Imshow imsF = new Imshow("background"); // get a listing of files in that directory Imshow ims_diff = new Imshow("Difference"); File dir = new File(IMAGE_FILES_DIRECTORY_PATH); File[] directoryListing = dir.listFiles(); if (directoryListing != null) { for (File imgFile : directoryListing) { // if the file name ends with .jpg (JPEG) or .png (Portable Network Graphic) if ((imgFile.getName().endsWith(".png")) || (imgFile.getName().endsWith(".jpg"))) { // load an image from each file (read and decode image file) Mat inputImage = new Mat(); Mat iImg = new Mat(); Size filter = new Size(15, 15); iImg = Highgui.imread(IMAGE_FILES_DIRECTORY_PATH + "/" + imgFile.getName()); Imgproc.resize(iImg, inputImage, frame); Imgproc.GaussianBlur(inputImage, inputImage, filter, 0, 0, Imgproc.BORDER_DEFAULT); //Imgproc.GaussianBlur(inputImage, inputImage, filter, 0, 0, Imgproc.BORDER_DEFAULT); //Imgproc.GaussianBlur(inputImage, inputImage, filter, 0, 0, Imgproc.BORDER_DEFAULT); Imgproc.medianBlur(inputImage, inputImage, 9); Mat fg_mask = new Mat(); Mat fg_mask1 = new Mat(); Mat fg_mask2 = new Mat(); Mat fg_mask3 = new Mat(); Mat fg_mask4 = new Mat(); Mat fg_mask5 = new Mat(); Mat fg_mask6 = new Mat(); Mat fg_mask7 = new Mat(); ArrayList<Integer> countAll = new ArrayList<Integer>(); for (int i = 0; i < 7; i++) { // add it to the background model with a learning rate of 0.1 MoG.apply(inputImage, backgroundImg.get(i), 0.1); // extract the foreground mask (1 = foreground / 0 - background), // and convert/expand it to a 3-channel version of the same Imgproc.cvtColor(backgroundImg.get(i), backgroundImg.get(i), Imgproc.COLOR_GRAY2BGR); } // logically AND it with the original frame to extract colour // pixel only in the foreground regions Core.bitwise_and(inputImage, backgroundImg.get(0), fg_mask1); Core.bitwise_and(inputImage, backgroundImg.get(1), fg_mask2); Core.bitwise_and(inputImage, backgroundImg.get(2), fg_mask3); Core.bitwise_and(inputImage, backgroundImg.get(3), fg_mask4); Core.bitwise_and(inputImage, backgroundImg.get(4), fg_mask5); Core.bitwise_and(inputImage, backgroundImg.get(5), fg_mask6); Core.bitwise_and(inputImage, backgroundImg.get(6), fg_mask7); for (int i = 0; i < 7; i++) { countAll.add(countP(backgroundImg.get(i))); } double meanPixels = average(countAll); double[] temp = new double[5]; for (int i = 0; i < 5; i++) { temp[i] = Math.abs(countAll.get(i) - meanPixels); } double min = temp[0]; int mark = 0; for (int i = 1; i < 5; i++) { if (temp[i] < min) { mark = i; min = temp[i]; } } fg_mask = backgroundImg.get(mark); // Create a contour ZONE B top-left Point[] contourPoints1 = new Point[5]; contourPoints1[0] = new Point(55, 22); contourPoints1[1] = new Point(200, 22); contourPoints1[2] = new Point(330, 175); contourPoints1[3] = new Point(200, 430); contourPoints1[4] = new Point(55, 110); // convert it to a java list of OpenCV MatOfPoint // objects as this is what the draw function requires MatOfPoint contour1 = new MatOfPoint(contourPoints1); List<MatOfPoint> contourList1 = new ArrayList<MatOfPoint>(); contourList1.add(contour1); // Create a contour ZONE C top-right Point[] contourPoints2 = new Point[4]; contourPoints2[0] = new Point(55, 130); contourPoints2[1] = new Point(195, 435); contourPoints2[2] = new Point(125, 570); contourPoints2[3] = new Point(55, 340); MatOfPoint contour2 = new MatOfPoint(contourPoints2); List<MatOfPoint> contourList2 = new ArrayList<MatOfPoint>(); contourList2.add(contour2); // Create a contour ZONE A Point[] contourPoints3 = new Point[5]; contourPoints3[0] = new Point(420, 22); contourPoints3[1] = new Point(465, 22); contourPoints3[2] = new Point(463, 295); contourPoints3[3] = new Point(190, 695); contourPoints3[4] = new Point(65, 695); MatOfPoint contour3 = new MatOfPoint(contourPoints3); List<MatOfPoint> contourList3 = new ArrayList<MatOfPoint>(); contourList3.add(contour3); // Create a contour ZONE C button Point[] contourPoints4 = new Point[4]; contourPoints4[0] = new Point(310, 545); contourPoints4[1] = new Point(463, 320); contourPoints4[2] = new Point(463, 695); contourPoints4[3] = new Point(400, 695); MatOfPoint contour4 = new MatOfPoint(contourPoints4); List<MatOfPoint> contourList4 = new ArrayList<MatOfPoint>(); contourList4.add(contour4); // Create a contour ZONE b right Point[] contourPoints5 = new Point[3]; contourPoints5[0] = new Point(300, 555); contourPoints5[2] = new Point(385, 695); contourPoints5[1] = new Point(200, 695); MatOfPoint contour5 = new MatOfPoint(contourPoints5); List<MatOfPoint> contourList5 = new ArrayList<MatOfPoint>(); contourList5.add(contour5); // Create a contour ZONE RACT Point[] contourPoints6 = new Point[4]; contourPoints6[0] = new Point(210, 22); contourPoints6[1] = new Point(285, 22); contourPoints6[2] = new Point(275, 100); contourPoints6[3] = new Point(225, 100); MatOfPoint contour6 = new MatOfPoint(contourPoints6); List<MatOfPoint> contourList6 = new ArrayList<MatOfPoint>(); contourList1.add(contour6); // Create a contour ZONE RACT Point[] contourPoints7 = new Point[4]; contourPoints7[0] = new Point(400, 22); contourPoints7[1] = new Point(465, 22); contourPoints7[2] = new Point(465, 180); contourPoints7[3] = new Point(400, 125); MatOfPoint contour7 = new MatOfPoint(contourPoints7); List<MatOfPoint> contourList7 = new ArrayList<MatOfPoint>(); contourList7.add(contour7); // Create a contour ZONE RACT Point[] contourPoints8 = new Point[3]; contourPoints8[0] = new Point(0, 340); contourPoints8[1] = new Point(0, 695); contourPoints8[2] = new Point(150, 695); MatOfPoint contour8 = new MatOfPoint(contourPoints8); List<MatOfPoint> contourList8 = new ArrayList<MatOfPoint>(); contourList8.add(contour8); //put point to the list List<Point> list1 = new ArrayList<Point>(); List<Point> list2 = new ArrayList<Point>(); List<Point> list3 = new ArrayList<Point>(); List<Point> list4 = new ArrayList<Point>(); List<Point> list5 = new ArrayList<Point>(); List<Point> list6 = new ArrayList<Point>(); List<Point> list7 = new ArrayList<Point>(); List<Point> list8 = new ArrayList<Point>(); Point[] contourPoints10 = new Point[5]; contourPoints10[0] = new Point(22, 55); contourPoints10[1] = new Point(22, 200); contourPoints10[2] = new Point(175, 330); contourPoints10[3] = new Point(430, 200); contourPoints10[4] = new Point(110, 55); // convert it to a java list of OpenCV MatOfPoint // objects as this is what the draw function requires MatOfPoint contour10 = new MatOfPoint(contourPoints10); List<MatOfPoint> contourList10 = new ArrayList<MatOfPoint>(); contourList10.add(contour10); // Create a contour ZONE C top-right Point[] contourPoints20 = new Point[4]; contourPoints20[0] = new Point(130, 55); contourPoints20[1] = new Point(435, 195); contourPoints20[2] = new Point(570, 125); contourPoints20[3] = new Point(340, 55); MatOfPoint contour20 = new MatOfPoint(contourPoints20); List<MatOfPoint> contourList20 = new ArrayList<MatOfPoint>(); contourList20.add(contour20); // Create a contour ZONE A Point[] contourPoints30 = new Point[5]; contourPoints30[0] = new Point(22, 420); contourPoints30[1] = new Point(22, 465); contourPoints30[2] = new Point(295, 463); contourPoints30[3] = new Point(695, 190); contourPoints30[4] = new Point(695, 65); MatOfPoint contour30 = new MatOfPoint(contourPoints30); List<MatOfPoint> contourList30 = new ArrayList<MatOfPoint>(); contourList30.add(contour30); // Create a contour ZONE C button Point[] contourPoints40 = new Point[4]; contourPoints40[0] = new Point(545, 310); contourPoints40[1] = new Point(320, 463); contourPoints40[2] = new Point(695, 463); contourPoints40[3] = new Point(695, 400); MatOfPoint contour40 = new MatOfPoint(contourPoints40); List<MatOfPoint> contourList40 = new ArrayList<MatOfPoint>(); contourList40.add(contour40); // Create a contour ZONE b right Point[] contourPoints50 = new Point[3]; contourPoints50[0] = new Point(555, 300); contourPoints50[2] = new Point(695, 385); contourPoints50[1] = new Point(695, 200); MatOfPoint contour50 = new MatOfPoint(contourPoints50); List<MatOfPoint> contourList50 = new ArrayList<MatOfPoint>(); contourList50.add(contour50); // Create a contour ZONE RACT Point[] contourPoints60 = new Point[4]; contourPoints60[0] = new Point(22, 210); contourPoints60[1] = new Point(22, 285); contourPoints60[2] = new Point(100, 275); contourPoints60[3] = new Point(100, 225); MatOfPoint contour60 = new MatOfPoint(contourPoints60); List<MatOfPoint> contourList60 = new ArrayList<MatOfPoint>(); contourList60.add(contour60); // Create a contour ZONE RACT Point[] contourPoints70 = new Point[4]; contourPoints70[0] = new Point(22, 400); contourPoints70[1] = new Point(22, 465); contourPoints70[2] = new Point(180, 465); contourPoints70[3] = new Point(125, 400); MatOfPoint contour70 = new MatOfPoint(contourPoints70); List<MatOfPoint> contourList70 = new ArrayList<MatOfPoint>(); contourList70.add(contour70); // Create a contour ZONE RACT Point[] contourPoints80 = new Point[3]; contourPoints80[0] = new Point(340, 0); contourPoints80[1] = new Point(695, 0); contourPoints80[2] = new Point(695, 150); MatOfPoint contour80 = new MatOfPoint(contourPoints80); List<MatOfPoint> contourList80 = new ArrayList<MatOfPoint>(); contourList80.add(contour80); Imgproc.drawContours(fg_mask, contourList10, -1, new Scalar(255, 255, 255), 2); Imgproc.drawContours(fg_mask, contourList20, -1, new Scalar(255, 255, 255), 2); Imgproc.drawContours(fg_mask, contourList30, -1, new Scalar(255, 255, 255), 2); Imgproc.drawContours(fg_mask, contourList40, -1, new Scalar(255, 255, 255), 2); Imgproc.drawContours(fg_mask, contourList50, -1, new Scalar(255, 255, 255), 2); Imgproc.drawContours(fg_mask, contourList60, -1, new Scalar(255, 255, 255), 2); Imgproc.drawContours(fg_mask, contourList70, -1, new Scalar(255, 255, 255), 2); Imgproc.drawContours(fg_mask, contourList80, -1, new Scalar(255, 255, 255), 2); Point point = new Point(); for (int row = 0; row < fg_mask.rows(); row++) { for (int col = 0; col < fg_mask.cols(); col++) { point = new Point(row, col); // perform point in polygon test MatOfPoint2f contourPoint1f = new MatOfPoint2f(contourPoints1); MatOfPoint2f contourPoint2f = new MatOfPoint2f(contourPoints2); MatOfPoint2f contourPoint3f = new MatOfPoint2f(contourPoints3); MatOfPoint2f contourPoint4f = new MatOfPoint2f(contourPoints4); MatOfPoint2f contourPoint5f = new MatOfPoint2f(contourPoints5); MatOfPoint2f contourPoint6f = new MatOfPoint2f(contourPoints6); MatOfPoint2f contourPoint7f = new MatOfPoint2f(contourPoints7); MatOfPoint2f contourPoint8f = new MatOfPoint2f(contourPoints8); if (Imgproc.pointPolygonTest(contourPoint1f, point, false) >= 0) { list1.add(point); } if (Imgproc.pointPolygonTest(contourPoint2f, point, false) >= 0) { list2.add(point); } if (Imgproc.pointPolygonTest(contourPoint3f, point, false) >= 0) { list3.add(point); } if (Imgproc.pointPolygonTest(contourPoint4f, point, false) >= 0) { list4.add(point); } if (Imgproc.pointPolygonTest(contourPoint5f, point, false) >= 0) { list5.add(point); } if (Imgproc.pointPolygonTest(contourPoint6f, point, false) >= 0) { list6.add(point); } if (Imgproc.pointPolygonTest(contourPoint7f, point, false) >= 0) { list7.add(point); } if (Imgproc.pointPolygonTest(contourPoint8f, point, false) >= 0) { list8.add(point); } } } boolean flag[] = { false, false, false, false, false, false, false }; int events = 0; String events_name = ""; int count1 = 0; for (Point p : list1) { int x1 = (int) p.x; int y1 = (int) p.y; double[] n = fg_mask.get(x1, y1); double value1 = n[0]; if (value1 == 255.0) { count1++; } } System.out.println(" 1 " + count1); if (count1 > 2000) { flag[0] = true; } int count2 = 0; for (Point p : list2) { int x2 = (int) p.x; int y2 = (int) p.y; double[] n = fg_mask.get(x2, y2); double value2 = n[0]; if (value2 == 255.0) { count2++; } } System.out.println(" 2 " + count2); if (count2 > 1500) { flag[1] = true; } int count3 = 0; for (Point p : list3) { int x3 = (int) p.x; int y3 = (int) p.y; double[] n = fg_mask.get(x3, y3); double value3 = n[0]; if (value3 == 255.0) { count3++; } } System.out.println(" 3 " + count3); if (count3 > 20000) { flag[4] = true; } if (count3 > 4000) { flag[2] = true; } int count4 = 0; for (Point p : list4) { int x4 = (int) p.x; int y4 = (int) p.y; double[] n = fg_mask.get(x4, y4); double value4 = n[0]; if (value4 == 255.0) { count4++; } } System.out.println(" 4 " + count4); if (count4 > 1700) { flag[1] = true; } int count5 = 0; for (Point p : list5) { int x5 = (int) p.x; int y5 = (int) p.y; double[] n = fg_mask.get(x5, y5); double value5 = n[0]; if (value5 == 255.0) { count5++; } } System.out.println(" 5 " + count5); if (count5 > 1000) { flag[0] = true; } int count6 = 0; for (Point p : list6) { int x6 = (int) p.x; int y6 = (int) p.y; double[] n = fg_mask.get(x6, y6); double value6 = n[0]; if (value6 == 255.0) { count6++; } } System.out.println(" 6 " + count6); if (count6 > 750) { flag[3] = true; } int count7 = 0; for (Point p : list7) { int x7 = (int) p.x; int y7 = (int) p.y; double[] n = fg_mask.get(x7, y7); double value7 = n[0]; if (value7 == 255.0) { count7++; } } System.out.println(" 7 " + count7); if (count7 > 1100) { flag[5] = true; } int count8 = 0; for (Point p : list8) { int x8 = (int) p.x; int y8 = (int) p.y; double[] n = fg_mask.get(x8, y8); double value8 = n[0]; if (value8 == 255.0) { count8++; } } System.out.println(" 8 " + count8); if (count8 > 2000) { flag[6] = true; } imsS.showImage(inputImage); imsF.showImage(fg_mask); imsF.showImage(backgroundImg.get(0)); System.out.print(imgFile.getName() + " "); //Identifying events if ((!flag[0]) && (!flag[1]) && (!flag[2]) && (!flag[3]) && (!flag[4])) { System.out.print("Empty "); } else if (flag[4] || (!flag[4] && flag[5]) || (!flag[4] && flag[6])) { System.out.print("Train "); } if (flag[0] && !(flag[4] || (!flag[4] && flag[5]) || (!flag[4] && flag[6]))) { System.out.print("Enter "); } if (flag[1] && !(flag[4] || (!flag[4] && flag[5]) || (!flag[4] && flag[6]))) { System.out.print("Leave "); } if ((flag[2] && (!flag[5]) && (!flag[6])) && !(flag[3] && (!(flag[4] || (!flag[4] && flag[5]) || (!flag[4] && flag[6]))))) { System.out.print("On track "); } if (flag[3] && (!(flag[4] || (!flag[4] && flag[5]) || (!flag[4] && flag[6])))) { System.out.print("Barrier"); } System.out.println(); Thread.sleep(40); } } } else { System.out.println("Could not get listing for directory: " + IMAGE_FILES_DIRECTORY_PATH); } }
From source file:video.PictureAnalyser.java
public PictureAnalyser() { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); this.color = new ArrayList<>(); this.color.add(new Scalar(0, 0, 0)); this.color.add(new Scalar(179, 255, 255)); }
From source file:video.PictureAnalyser.java
public void setColor(List<Point> coloranalyse) { this.color.add(new Scalar(coloranalyse.get(0).x, coloranalyse.get(1).x, coloranalyse.get(2).x)); this.color.add(new Scalar(coloranalyse.get(0).y, coloranalyse.get(1).y, coloranalyse.get(2).y)); }
From source file:View.Signature.java
public static int sift(String routeVal, String route, String n_img1, String n_img2, String extension) { String bookObject = routeVal + n_img2 + extension; String bookScene = route + n_img1 + extension; //System.out.println("Iniciando SIFT"); //java.lang.System.out.print("Abriendo imagenes | "); Mat objectImage = Highgui.imread(bookObject, Highgui.CV_LOAD_IMAGE_COLOR); Mat sceneImage = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR); MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint(); FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.SIFT); //java.lang.System.out.print("Encontrar keypoints con SIFT | "); featureDetector.detect(objectImage, objectKeyPoints); KeyPoint[] keypoints = objectKeyPoints.toArray(); MatOfKeyPoint objectDescriptors = new MatOfKeyPoint(); DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.SIFT); //java.lang.System.out.print("Computando descriptores | "); descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors); // Create the matrix for output image. Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Highgui.CV_LOAD_IMAGE_COLOR); Scalar newKeypointColor = new Scalar(255, 0, 0); //java.lang.System.out.print("Dibujando keypoints en imagen base | "); Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0); // Match object image with the scene image MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint(); MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint(); //java.lang.System.out.print("Detectando keypoints en imagen base | "); featureDetector.detect(sceneImage, sceneKeyPoints); //java.lang.System.out.print("Computando descriptores en imagen base | "); descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors); Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Highgui.CV_LOAD_IMAGE_COLOR); Scalar matchestColor = new Scalar(0, 255, 0); List<MatOfDMatch> matches = new LinkedList<MatOfDMatch>(); DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED); //java.lang.System.out.print("Encontrando matches entre imagenes | "); descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2); //java.lang.System.out.println("Calculando buenos matches"); LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>(); float nndrRatio = 0.7f; java.lang.System.out.println(matches.size()); for (int i = 0; i < matches.size(); i++) { MatOfDMatch matofDMatch = matches.get(i); DMatch[] dmatcharray = matofDMatch.toArray(); DMatch m1 = dmatcharray[0];/*w w w . ja v a 2 s . com*/ DMatch m2 = dmatcharray[1]; if (m1.distance <= m2.distance * nndrRatio) { goodMatchesList.addLast(m1); } } if (goodMatchesList.size() >= 7) { //java.lang.System.out.println("Match enontrado!!! Matches: "+goodMatchesList.size()); //if(goodMatchesList.size()>max){ //cambio = 1; //} List<KeyPoint> objKeypointlist = objectKeyPoints.toList(); List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList(); LinkedList<Point> objectPoints = new LinkedList<>(); LinkedList<Point> scenePoints = new LinkedList<>(); for (int i = 0; i < goodMatchesList.size(); i++) { objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt); scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt); } MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f(); objMatOfPoint2f.fromList(objectPoints); MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f(); scnMatOfPoint2f.fromList(scenePoints); Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3); Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2); Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2); obj_corners.put(0, 0, new double[] { 0, 0 }); obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 }); obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() }); obj_corners.put(3, 0, new double[] { 0, objectImage.rows() }); //System.out.println("Transforming object corners to scene corners..."); Core.perspectiveTransform(obj_corners, scene_corners, homography); Mat img = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR); Core.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)), new Scalar(0, 255, 0), 4); //java.lang.System.out.println("Dibujando imagen de coincidencias"); MatOfDMatch goodMatches = new MatOfDMatch(); goodMatches.fromList(goodMatchesList); Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches, matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2); String n_outputImage = route + "results\\" + n_img2 + "_outputImage_sift" + extension; String n_matchoutput = route + "results\\" + n_img2 + "_matchoutput_sift" + extension; String n_img = route + "results\\" + n_img2 + "_sift" + extension; Highgui.imwrite(n_outputImage, outputImage); Highgui.imwrite(n_matchoutput, matchoutput); //Highgui.imwrite(n_img, img); java.lang.System.out.println(goodMatches.size().height); double result = goodMatches.size().height * 100 / matches.size(); java.lang.System.out.println((int) result); //double result =goodMatches.size().height; if (result > 100) { return 100; } else if (result <= 100 && result > 85) { return 85; } else if (result <= 85 && result > 50) { return 50; } else if (result <= 50 && result > 25) { return 25; } else { return 0; } } else { //java.lang.System.out.println("Firma no encontrada"); } return 0; //System.out.println("Terminando SIFT"); }
From source file:View.SignatureLib.java
public static int sift(String routeRNV, String routeAdherent) { String bookObject = routeAdherent; String bookScene = routeRNV;/*from w ww .j av a 2s.co m*/ //System.out.println("Iniciando SIFT"); //java.lang.System.out.print("Abriendo imagenes | "); Mat objectImage = Highgui.imread(bookObject, Highgui.CV_LOAD_IMAGE_COLOR); Mat sceneImage = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR); MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint(); FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.SIFT); //java.lang.System.out.print("Encontrar keypoints con SIFT | "); featureDetector.detect(objectImage, objectKeyPoints); KeyPoint[] keypoints = objectKeyPoints.toArray(); MatOfKeyPoint objectDescriptors = new MatOfKeyPoint(); DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.SIFT); //java.lang.System.out.print("Computando descriptores | "); descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors); // Create the matrix for output image. Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Highgui.CV_LOAD_IMAGE_COLOR); Scalar newKeypointColor = new Scalar(255, 0, 0); //java.lang.System.out.print("Dibujando keypoints en imagen base | "); Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0); // Match object image with the scene image MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint(); MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint(); //java.lang.System.out.print("Detectando keypoints en imagen base | "); featureDetector.detect(sceneImage, sceneKeyPoints); //java.lang.System.out.print("Computando descriptores en imagen base | "); descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors); Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Highgui.CV_LOAD_IMAGE_COLOR); Scalar matchestColor = new Scalar(0, 255, 0); List<MatOfDMatch> matches = new LinkedList<MatOfDMatch>(); DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED); //java.lang.System.out.println(sceneDescriptors); if (sceneDescriptors.empty()) { java.lang.System.out.println("Objeto no encontrado"); return 0; } descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2); //java.lang.System.out.println("Calculando buenos matches"); LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>(); float nndrRatio = 0.7f; for (int i = 0; i < matches.size(); i++) { MatOfDMatch matofDMatch = matches.get(i); DMatch[] dmatcharray = matofDMatch.toArray(); DMatch m1 = dmatcharray[0]; DMatch m2 = dmatcharray[1]; if (m1.distance <= m2.distance * nndrRatio) { goodMatchesList.addLast(m1); } } if (goodMatchesList.size() >= 7) { max = goodMatchesList.size(); List<KeyPoint> objKeypointlist = objectKeyPoints.toList(); List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList(); LinkedList<Point> objectPoints = new LinkedList<>(); LinkedList<Point> scenePoints = new LinkedList<>(); for (int i = 0; i < goodMatchesList.size(); i++) { objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt); scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt); } MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f(); objMatOfPoint2f.fromList(objectPoints); MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f(); scnMatOfPoint2f.fromList(scenePoints); Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3); Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2); Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2); obj_corners.put(0, 0, new double[] { 0, 0 }); obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 }); obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() }); obj_corners.put(3, 0, new double[] { 0, objectImage.rows() }); //System.out.println("Transforming object corners to scene corners..."); Core.perspectiveTransform(obj_corners, scene_corners, homography); Mat img = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR); Core.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)), new Scalar(0, 255, 0), 4); //java.lang.System.out.println("Dibujando imagen de coincidencias"); MatOfDMatch goodMatches = new MatOfDMatch(); goodMatches.fromList(goodMatchesList); Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches, matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2); String n_outputImage = "../pre/outputImage_sift.jpg"; String n_matchoutput = "../pre/matchoutput_sift.jpg"; String n_img = "../pre/sift.jpg"; Highgui.imwrite(n_outputImage, outputImage); Highgui.imwrite(n_matchoutput, matchoutput); Highgui.imwrite(n_img, img); java.lang.System.out.println(goodMatches.size().height); double result = goodMatches.size().height;//*100/matches.size(); int score = 0; if (result > 26) { score = 100; } else if (result <= 26 && result > 22) { score = 85; } else if (result <= 22 && result > 17) { score = 50; } else if (result <= 17 && result > 11) { score = 25; } else { score = 0; } java.lang.System.out.println("Score: " + score); return score; } else { java.lang.System.out.println("Objeto no encontrado"); return 0; } //System.out.println("Terminando SIFT"); }
From source file:view.TelaComCaptura.java
public void mostraVideo(JPanel containerVideo, Thread t) { Utils ut = new Utils(); Graphics g = containerVideo.getGraphics(); //Matriz que contem os dados da imagem Mat frame = new Mat(); Mat frameSemRetangulo = new Mat(); if (captura != null) { if (captura.isOpened()) { captura.release();/*from w w w . j a v a 2s .c o m*/ } } try { captura = new VideoCapture(0); } catch (Exception e) { Utils.msgErro("Erro ao iniciar webcam", "Erro"); } ClassificadorFacial.classificador = new CascadeClassifier( "resources\\cascades\\haarcascade_frontalface_alt.xml"); // String cascadeFile = localPath + "\\resources\\cascades\\haarcascade_frontalface_alt.xml"; //String cascadePath = cascadeFile; //CascadeClassifier classificador = new CascadeClassifier(); //classificador.load(cascadeFile); // if (!classificador.load(cascadeFile)) { // System.out.println("Erro ao carregar cascade file"); // // if (!new File(cascadeFile).canRead()) { // new File(cascadeFile).setReadable(true); // System.out.println("Arquivo travado"); // } // // return; // } else { // if (new File(cascadeFile).canRead()) { // System.out.println("Arquivo liberado"); // } // } MatOfRect facesDetectadas = new MatOfRect(); double scaleFactor = 1.1; int minNeighbors = 2; int flags = 0; Size minSize = new Size(30, 30); Size maxSize = new Size(500, 500); Scalar cor = new Scalar(0, 255, 0); int larguraPanel = containerVideo.getWidth(); int alturaPanel = containerVideo.getHeight(); int thickness = 3; while (!t.isInterrupted()) { // try { // Thread.sleep(100); // } catch (InterruptedException ex) { // Logger.getLogger(TelaComCaptura.class.getName()).log(Level.SEVERE, null, ex); // } if (captura.isOpened()) { //Captura um frame captura.read(frame); if (!frame.empty()) { // setSize(frame.width(), frame.height()); Mat imagemColorida = frame; Mat imagemCinza = new Mat(); Imgproc.cvtColor(imagemColorida, imagemCinza, Imgproc.COLOR_BGR2GRAY); //Detecta faces ClassificadorFacial.classificador.detectMultiScale(imagemCinza, facesDetectadas, scaleFactor, minNeighbors, flags, minSize, maxSize); Rect[] faces = facesDetectadas.toArray(); frame.copyTo(frameSemRetangulo); Rect faceRecortada = null; if (lstFacesRecortadas.size() > faces.length) { lstFacesRecortadas.clear(); } for (int i = 0; i < faces.length; i++) { Imgproc.rectangle(frame, faces[i].tl(), faces[i].br(), cor, thickness); int larguraFace = (int) (faces[i].width * 0.8f); int alturaFace = (int) (faces[i].height * 0.8f); faceRecortada = new Rect(faces[i].x + 20, faces[i].y + 20, larguraFace, alturaFace); // faceRecortada = new Rect(faces[i].x, faces[i].y, 150, 160); if (faceRecortada != null) { // if (lstFacesRecortadas.size() < faces.length) { //System.out.println("Pessoa entrou da captura"); try { //lstFacesRecortadas.add(ut.matToBufferedImage(new Mat(frameSemRetangulo, faceRecortada))); lstFacesRecortadas.add(new Mat(frameSemRetangulo, faceRecortada)); } catch (Exception e) { } // } else if (lstFacesRecortadas.size() > faces.length) { // //System.out.println("Pessoa saiu da captura"); // lstFacesRecortadas.clear(); // lstFacesRecortadas.add(ut.matToBufferedImage(new Mat(frameSemRetangulo, faceRecortada))); // } } } BufferedImage frameLimpo = ut.matToBufferedImage(frameSemRetangulo); BufferedImage frameComRetangulos = ut.matToBufferedImage(frame); this.setFrameAtual(frameLimpo); g.drawImage(frameComRetangulos, 0, 0, larguraPanel, alturaPanel, null); //Limpa variveis imagemCinza = null; faceRecortada = null; frameLimpo = null; if (!isSalvandoPGM()) { //lstFacesRecortadas.clear(); } frameComRetangulos = null; } } } captura.release(); //new File(cascadeFile). System.out.println("Captura finalizada"); }
From source file:vinylsleevedetection.Analyze.java
public void Check() { count = 1;/*w w w . ja va2 s . c om*/ //load openCV library System.loadLibrary(Core.NATIVE_LIBRARY_NAME); //for loop to compare source images to user image for (int j = 1; j < 4; j++) { //source image location (record sleeve) String Object = "E:\\Users\\Jamie\\Documents\\NetBeansProjects\\VinylSleeveDetection\\Source\\" + j + ".jpg"; //user image location String Scene = "E:\\Users\\Jamie\\Documents\\NetBeansProjects\\VinylSleeveDetection\\Output\\camera.jpg"; //load images Mat objectImage = Imgcodecs.imread(Object, Imgcodecs.CV_LOAD_IMAGE_COLOR); Mat sceneImage = Imgcodecs.imread(Scene, Imgcodecs.CV_LOAD_IMAGE_COLOR); //use BRISK feature detection MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint(); FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.BRISK); //perform feature detection on source image featureDetector.detect(objectImage, objectKeyPoints); KeyPoint[] keypoints = objectKeyPoints.toArray(); //use descriptor extractor MatOfKeyPoint objectDescriptors = new MatOfKeyPoint(); DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.BRISK); descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors); Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Imgcodecs.CV_LOAD_IMAGE_COLOR); Scalar newKeypointColor = new Scalar(255, 0, 0); Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0); MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint(); MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint(); featureDetector.detect(sceneImage, sceneKeyPoints); descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors); Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Imgcodecs.CV_LOAD_IMAGE_COLOR); Scalar matchestColor = new Scalar(0, 255, 0); List<MatOfDMatch> matches = new LinkedList<>(); DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE); descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2); LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>(); float nndrRatio = 0.7f; for (int i = 0; i < matches.size(); i++) { MatOfDMatch matofDMatch = matches.get(i); DMatch[] dmatcharray = matofDMatch.toArray(); DMatch m1 = dmatcharray[0]; DMatch m2 = dmatcharray[1]; if (m1.distance <= m2.distance * nndrRatio) { goodMatchesList.addLast(m1); } } //if the number of good mathces is more than 150 a match is found if (goodMatchesList.size() > 150) { System.out.println("Object Found"); List<KeyPoint> objKeypointlist = objectKeyPoints.toList(); List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList(); LinkedList<Point> objectPoints = new LinkedList<>(); LinkedList<Point> scenePoints = new LinkedList<>(); for (int i = 0; i < goodMatchesList.size(); i++) { objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt); scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt); } MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f(); objMatOfPoint2f.fromList(objectPoints); MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f(); scnMatOfPoint2f.fromList(scenePoints); Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3); Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2); Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2); obj_corners.put(0, 0, new double[] { 0, 0 }); obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 }); obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() }); obj_corners.put(3, 0, new double[] { 0, objectImage.rows() }); Core.perspectiveTransform(obj_corners, scene_corners, homography); Mat img = Imgcodecs.imread(Scene, Imgcodecs.CV_LOAD_IMAGE_COLOR); //draw a green square around the matched object Imgproc.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)), new Scalar(0, 255, 0), 10); Imgproc.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)), new Scalar(0, 255, 0), 10); Imgproc.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)), new Scalar(0, 255, 0), 10); Imgproc.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)), new Scalar(0, 255, 0), 10); MatOfDMatch goodMatches = new MatOfDMatch(); goodMatches.fromList(goodMatchesList); Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches, matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2); //output image with match, image of the match locations and keypoints image String folder = "E:\\Users\\Jamie\\Documents\\NetBeansProjects\\VinylSleeveDetection\\Output\\"; Imgcodecs.imwrite(folder + "outputImage.jpg", outputImage); Imgcodecs.imwrite(folder + "matchoutput.jpg", matchoutput); Imgcodecs.imwrite(folder + "found.jpg", img); count = j; break; } else { System.out.println("Object Not Found"); count = 0; } } }