List of usage examples for org.opencv.core MatOfDMatch toArray
public DMatch[] toArray()
From source file:cn.xiongyihui.webcam.JpegFactory.java
License:Open Source License
public void onPreviewFrame(byte[] data, Camera camera) { YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, mWidth, mHeight, null); mJpegOutputStream.reset();/*w w w . j a va 2 s .c o m*/ try { //Log.e(TAG, "Beginning to read values!"); double distanceTemplateFeatures = this.globalClass.getDistanceTemplateFeatures(); double xTemplateCentroid = this.globalClass.getXtemplateCentroid(); double yTemplateCentroid = this.globalClass.getYtemplateCentroid(); int x0template = this.globalClass.getX0display(); int y0template = this.globalClass.getY0display(); int x1template = this.globalClass.getX1display(); int y1template = this.globalClass.getY1display(); Mat templateDescriptor = this.globalClass.getTemplateDescriptor(); MatOfKeyPoint templateKeyPoints = this.globalClass.getKeyPoints(); KeyPoint[] templateKeyPointsArray = templateKeyPoints.toArray(); int numberOfTemplateFeatures = this.globalClass.getNumberOfTemplateFeatures(); int numberOfPositiveTemplateFeatures = this.globalClass.getNumberOfPositiveTemplateFeatures(); KeyPoint[] normalisedTemplateKeyPoints = this.globalClass.getNormalisedTemplateKeyPoints(); double normalisedXcentroid = this.globalClass.getNormalisedXcentroid(); double normalisedYcentroid = this.globalClass.getNormalisedYcentroid(); int templateCapturedBitmapWidth = this.globalClass.getTemplateCapturedBitmapWidth(); int templateCapturedBitmapHeight = this.globalClass.getTemplateCapturedBitmapHeight(); //Log.e(TAG, "Ended reading values!"); globalClass.setJpegFactoryDimensions(mWidth, mHeight); double scalingRatio, scalingRatioHeight, scalingRatioWidth; scalingRatioHeight = (double) mHeight / (double) templateCapturedBitmapHeight; scalingRatioWidth = (double) mWidth / (double) templateCapturedBitmapWidth; scalingRatio = (scalingRatioHeight + scalingRatioWidth) / 2; //Just to account for any minor variations. //Log.e(TAG, "Scaling ratio:" + String.valueOf(scalingRatio)); //Log.e("Test", "Captured Bitmap's dimensions: (" + templateCapturedBitmapHeight + "," + templateCapturedBitmapWidth + ")"); //Scale the actual features of the image int flag = this.globalClass.getFlag(); if (flag == 0) { int iterate = 0; int iterationMax = numberOfTemplateFeatures; for (iterate = 0; iterate < (iterationMax); iterate++) { Log.e(TAG, "Point detected " + iterate + ":(" + templateKeyPointsArray[iterate].pt.x + "," + templateKeyPointsArray[iterate].pt.y + ")"); if (flag == 0) { templateKeyPointsArray[iterate].pt.x = scalingRatio * (templateKeyPointsArray[iterate].pt.x + (double) x0template); templateKeyPointsArray[iterate].pt.y = scalingRatio * (templateKeyPointsArray[iterate].pt.y + (double) y0template); } Log.e(TAG, "Scaled points:(" + templateKeyPointsArray[iterate].pt.x + "," + templateKeyPointsArray[iterate].pt.y + ")"); } this.globalClass.setFlag(1); } templateKeyPoints.fromArray(templateKeyPointsArray); //Log.e(TAG, "Template-features have been scaled successfully!"); long timeBegin = (int) System.currentTimeMillis(); Mat mYuv = new Mat(mHeight + mHeight / 2, mWidth, CvType.CV_8UC1); mYuv.put(0, 0, data); Mat mRgb = new Mat(); Imgproc.cvtColor(mYuv, mRgb, Imgproc.COLOR_YUV420sp2RGB); Mat result = new Mat(); Imgproc.cvtColor(mRgb, result, Imgproc.COLOR_RGB2GRAY); int detectorType = FeatureDetector.ORB; FeatureDetector featureDetector = FeatureDetector.create(detectorType); MatOfKeyPoint keypointsImage = new MatOfKeyPoint(); featureDetector.detect(result, keypointsImage); KeyPoint[] imageKeypoints = keypointsImage.toArray(); Scalar color = new Scalar(0, 0, 0); DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.ORB); Mat imageDescriptor = new Mat(); descriptorExtractor.compute(result, keypointsImage, imageDescriptor); //BRUTEFORCE_HAMMING apparently finds even the suspicious feature-points! So, inliers and outliers can turn out to be a problem DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING); MatOfDMatch matches = new MatOfDMatch(); matcher.match(imageDescriptor, templateDescriptor, matches); //Log.e("Prasad", String.valueOf(mWidth) + "," + String.valueOf(mHeight)); DMatch[] matchesArray = matches.toArray(); double minimumMatchDistance = globalClass.getHammingDistance(); int iDescriptorMax = matchesArray.length; int iterateDescriptor; double xMatchedPoint, yMatchedPoint; int flagDraw = Features2d.NOT_DRAW_SINGLE_POINTS; Point point; double rHigh = this.globalClass.getRhigh(); double rLow = this.globalClass.getRlow(); double gHigh = this.globalClass.getGhigh(); double gLow = this.globalClass.getGlow(); double bHigh = this.globalClass.getBhigh(); double bLow = this.globalClass.getBlow(); double[] colorValue; double red, green, blue; int[] featureCount; double xKernelSize = 9, yKernelSize = 9; globalClass.setKernelSize(xKernelSize, yKernelSize); double xImageKernelScaling, yImageKernelScaling; xImageKernelScaling = xKernelSize / mWidth; yImageKernelScaling = yKernelSize / mHeight; int[][] kernel = new int[(int) xKernelSize][(int) yKernelSize]; double[][] kernelCounter = new double[(int) xKernelSize][(int) yKernelSize]; int numberKernelMax = 10; globalClass.setNumberKernelMax(numberKernelMax); int[][][] kernelArray = new int[(int) xKernelSize][(int) yKernelSize][numberKernelMax]; double featureImageResponse; double xImageCentroid, yImageCentroid; double xSum = 0, ySum = 0; double totalImageResponse = 0; for (iterateDescriptor = 0; iterateDescriptor < iDescriptorMax; iterateDescriptor++) { if (matchesArray[iterateDescriptor].distance < minimumMatchDistance) { //MatchedPoint: Awesome match without color feedback xMatchedPoint = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].pt.x; yMatchedPoint = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].pt.y; colorValue = mRgb.get((int) yMatchedPoint, (int) xMatchedPoint); red = colorValue[0]; green = colorValue[1]; blue = colorValue[2]; int xKernelFeature, yKernelFeature; //Color feedback if ((rLow < red) & (red < rHigh) & (gLow < green) & (green < gHigh) & (bLow < blue) & (blue < bHigh)) { try { featureImageResponse = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].response; if (featureImageResponse > 0) { xSum = xSum + featureImageResponse * xMatchedPoint; ySum = ySum + featureImageResponse * yMatchedPoint; totalImageResponse = totalImageResponse + featureImageResponse; point = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].pt; xKernelFeature = (int) (xMatchedPoint * xImageKernelScaling); yKernelFeature = (int) (yMatchedPoint * yImageKernelScaling); kernelCounter[xKernelFeature][yKernelFeature]++; //Core.circle(result, point, 3, color); } } catch (Exception e) { } } //Log.e(TAG, iterateDescriptor + ": (" + xMatchedPoint + "," + yMatchedPoint + ")"); } } int iKernel = 0, jKernel = 0; for (iKernel = 0; iKernel < xKernelSize; iKernel++) { for (jKernel = 0; jKernel < yKernelSize; jKernel++) { if (kernelCounter[iKernel][jKernel] > 0) { kernel[iKernel][jKernel] = 1; } else { kernel[iKernel][jKernel] = 0; } } } xImageCentroid = xSum / totalImageResponse; yImageCentroid = ySum / totalImageResponse; if ((Double.isNaN(xImageCentroid)) | (Double.isNaN(yImageCentroid))) { //Log.e(TAG, "Centroid is not getting detected! Increasing hamming distance (error-tolerance)!"); globalClass.setHammingDistance((int) (minimumMatchDistance + 2)); } else { //Log.e(TAG, "Centroid is getting detected! Decreasing and optimising hamming (error-tolerance)!"); globalClass.setHammingDistance((int) (minimumMatchDistance - 1)); int jpegCount = globalClass.getJpegFactoryCallCount(); jpegCount++; globalClass.setJpegFactoryCallCount(jpegCount); int initialisationFlag = globalClass.getInitialisationFlag(); int numberOfDistances = 10; globalClass.setNumberOfDistances(numberOfDistances); if ((jpegCount > globalClass.getNumberKernelMax()) & (jpegCount > numberOfDistances)) { globalClass.setInitialisationFlag(1); } int[][] kernelSum = new int[(int) xKernelSize][(int) yKernelSize], mask = new int[(int) xKernelSize][(int) yKernelSize]; int iJpeg, jJpeg; kernelSum = globalClass.computeKernelSum(kernel); Log.e(TAG, Arrays.deepToString(kernelSum)); for (iJpeg = 0; iJpeg < xKernelSize; iJpeg++) { for (jJpeg = 0; jJpeg < yKernelSize; jJpeg++) { if (kernelSum[iJpeg][jJpeg] > (numberKernelMax / 4)) {//Meant for normalised kernel mask[iJpeg][jJpeg]++; } } } Log.e(TAG, Arrays.deepToString(mask)); int maskedFeatureCount = 1, xMaskFeatureSum = 0, yMaskFeatureSum = 0; for (iJpeg = 0; iJpeg < xKernelSize; iJpeg++) { for (jJpeg = 0; jJpeg < yKernelSize; jJpeg++) { if (mask[iJpeg][jJpeg] == 1) { xMaskFeatureSum = xMaskFeatureSum + iJpeg; yMaskFeatureSum = yMaskFeatureSum + jJpeg; maskedFeatureCount++; } } } double xMaskMean = xMaskFeatureSum / maskedFeatureCount; double yMaskMean = yMaskFeatureSum / maskedFeatureCount; double xSquaredSum = 0, ySquaredSum = 0; for (iJpeg = 0; iJpeg < xKernelSize; iJpeg++) { for (jJpeg = 0; jJpeg < yKernelSize; jJpeg++) { if (mask[iJpeg][jJpeg] == 1) { xSquaredSum = xSquaredSum + (iJpeg - xMaskMean) * (iJpeg - xMaskMean); ySquaredSum = ySquaredSum + (jJpeg - yMaskMean) * (jJpeg - yMaskMean); } } } double xRMSscaled = Math.sqrt(xSquaredSum); double yRMSscaled = Math.sqrt(ySquaredSum); double RMSimage = ((xRMSscaled / xImageKernelScaling) + (yRMSscaled / yImageKernelScaling)) / 2; Log.e(TAG, "RMS radius of the image: " + RMSimage); /*//Command the quadcopter and send PWM values to Arduino double throttlePWM = 1500, yawPWM = 1500, pitchPWM = 1500; double deltaThrottle = 1, deltaYaw = 1, deltaPitch = 1; throttlePWM = globalClass.getThrottlePWM(); pitchPWM = globalClass.getPitchPWM(); yawPWM = globalClass.getYawPWM(); deltaThrottle = globalClass.getThrottleDelta(); deltaPitch = globalClass.getPitchDelta(); deltaYaw = globalClass.getYawDelta(); if(yImageCentroid>yTemplateCentroid) { throttlePWM = throttlePWM + deltaThrottle; }else{ throttlePWM = throttlePWM - deltaThrottle; } if(RMSimage>distanceTemplateFeatures) { pitchPWM = pitchPWM + deltaPitch; }else{ pitchPWM = pitchPWM - deltaPitch; } if(xImageCentroid>xTemplateCentroid) { yawPWM = yawPWM + deltaYaw; }else{ yawPWM = yawPWM - deltaYaw; } if(1000>throttlePWM){ throttlePWM = 1000; } if(2000<throttlePWM){ throttlePWM = 2000; } if(1000>pitchPWM){ pitchPWM = 1000; } if(2000<pitchPWM){ pitchPWM = 2000; } if(1000>yawPWM){ yawPWM = 1000; } if(2000<yawPWM){ yawPWM = 2000; } globalClass.setPitchPWM(pitchPWM); globalClass.setYawPWM(yawPWM); globalClass.setThrottlePWM(throttlePWM);*/ //Display bounding circle int originalWidthBox = x1template - x0template; int originalHeightBox = y1template - y0template; double scaledBoundingWidth = (originalWidthBox * RMSimage / distanceTemplateFeatures); double scaledBoundingHeight = (originalHeightBox * RMSimage / distanceTemplateFeatures); double displayRadius = (scaledBoundingWidth + scaledBoundingHeight) / 2; displayRadius = displayRadius * 1.4826; displayRadius = displayRadius / numberKernelMax; double distanceAverage = 0; if (Double.isNaN(displayRadius)) { //Log.e(TAG, "displayRadius is NaN!"); } else { distanceAverage = globalClass.imageDistanceAverage(displayRadius); //Log.e(TAG, "Average distance: " + distanceAverage); } if ((Double.isNaN(xImageCentroid)) | Double.isNaN(yImageCentroid)) { //Log.e(TAG, "Centroid is NaN!"); } else { globalClass.centroidAverage(xImageCentroid, yImageCentroid); } if (initialisationFlag == 1) { //int displayRadius = 50; Point pointDisplay = new Point(); //pointDisplay.x = xImageCentroid; //pointDisplay.y = yImageCentroid; pointDisplay.x = globalClass.getXcentroidAverageGlobal(); pointDisplay.y = globalClass.getYcentroidAverageGlobal(); globalClass.centroidAverage(xImageCentroid, yImageCentroid); int distanceAverageInt = (int) distanceAverage; Core.circle(result, pointDisplay, distanceAverageInt, color); } } Log.e(TAG, "Centroid in the streamed image: (" + xImageCentroid + "," + yImageCentroid + ")"); /*try { //Features2d.drawKeypoints(result, keypointsImage, result, color, flagDraw); Features2d.drawKeypoints(result, templateKeyPoints, result, color, flagDraw); }catch(Exception e){}*/ //Log.e(TAG, "High (R,G,B): (" + rHigh + "," + gHigh + "," + bHigh + ")"); //Log.e(TAG, "Low (R,G,B): (" + rLow + "," + gLow + "," + bLow + ")"); //Log.e(TAG, Arrays.toString(matchesArray)); try { Bitmap bmp = Bitmap.createBitmap(result.cols(), result.rows(), Bitmap.Config.ARGB_8888); Utils.matToBitmap(result, bmp); //Utils.matToBitmap(mRgb, bmp); bmp.compress(Bitmap.CompressFormat.JPEG, mQuality, mJpegOutputStream); } catch (Exception e) { Log.e(TAG, "JPEG not working!"); } long timeEnd = (int) System.currentTimeMillis(); Log.e(TAG, "Time consumed is " + String.valueOf(timeEnd - timeBegin) + "milli-seconds!"); mJpegData = mJpegOutputStream.toByteArray(); synchronized (mJpegOutputStream) { mJpegOutputStream.notifyAll(); } } catch (Exception e) { Log.e(TAG, "JPEG-factory is not working!"); } }
From source file:com.oetermann.imageclassifier.MatchFinderWrapper.java
License:Open Source License
public int bestMatch(Mat queryDescriptors, int minMatches) { queryDescriptors.convertTo(queryDescriptors, CvType.CV_32F); MatOfDMatch matches = new MatOfDMatch(); matcher.match(queryDescriptors, matches); queryDescriptors.empty(); // Attempt to stop GC from releasing mat Arrays.fill(matchesPerImage, 0); DMatch[] matchesArray = matches.toArray(); for (DMatch match : matchesArray) { // match.distance; if (match.distance > 1) { match.distance = match.distance / 1000; }/*from ww w .ja v a2 s. co m*/ if (match.distance < 1) { matchesPerImage[match.imgIdx] += 1 - match.distance; } // matchesPerImage[match.imgIdx] += 1; // System.out.println("MatchDistance: "+match.distance + "\t\tImage: "+ imageNames[match.imgIdx]); } int index = 0; for (int i = 0; i < matchesPerImage.length; i++) { // System.out.println(matchesPerImage[i] + "\t\tmatches for image " + imageNames[i]); if (matchesPerImage[i] > matchesPerImage[index]) { index = i; } } // System.out.println("Total Matches: "+matches.size()); if (matchesPerImage[index] >= minMatches) { return index; } return -1; }
From source file:imageanalyzercv.ImageAnalyzerCV.java
/** * @param args the command line arguments *///from w w w . j a v a 2 s .com public static void main(String[] args) { System.out.println("path: " + System.getProperty("java.library.path")); System.loadLibrary("opencv_java300"); Mat m = Highgui.imread("/Users/chintan/Downloads/software/image_analyis/mydata/SAM_0763.JPG"); System.out.println("m = " + m.height()); MatOfKeyPoint points = new MatOfKeyPoint(); FeatureDetector.create(FeatureDetector.SURF).detect(m, points); Mat m2 = Highgui.imread("/Users/chintan/Downloads/software/image_analyis/mydata/SAM_0764.JPG"); System.out.println("m = " + m2.height()); MatOfKeyPoint points2 = new MatOfKeyPoint(); FeatureDetector.create(FeatureDetector.SURF).detect(m2, points2); DescriptorExtractor SurfExtractor = DescriptorExtractor.create(DescriptorExtractor.BRISK); Mat imag1Desc = new Mat(); SurfExtractor.compute(m, points, imag1Desc); Mat imag2Desc = new Mat(); SurfExtractor.compute(m2, points2, imag2Desc); MatOfDMatch matches = new MatOfDMatch(); Mat imgd = new Mat(); imag1Desc.copyTo(imgd); System.out.println(imgd.size()); DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING).match(imag2Desc, imag1Desc, (MatOfDMatch) matches); double min_distance = 1000.0; double max_distance = 0.0; DMatch[] matchArr = matches.toArray(); for (int i = 0; i < matchArr.length; i++) { if (matchArr[i].distance > max_distance) max_distance = matchArr[i].distance; if (matchArr[i].distance < min_distance) min_distance = matchArr[i].distance; } ArrayList<DMatch> good_matches = new ArrayList<DMatch>(); System.out.println("Min Distance: " + min_distance + " Max distance: " + max_distance); double totalScore = 0.0; for (int j = 0; j < imag1Desc.rows() && j < matchArr.length; j++) { if ((matchArr[j].distance <= (11 * min_distance)) && (matchArr[j].distance >= min_distance * 1)) { good_matches.add(matchArr[j]); //System.out.println(matchArr[j]); totalScore = totalScore + matchArr[j].distance; } //good_matches.add(matchArr[j]); } System.out.println((1 - (totalScore / (good_matches.size() * ((max_distance + min_distance) / 2)))) * 100); // System.out.println(matches.toList().size()); Mat out = new Mat(); MatOfDMatch mats = new MatOfDMatch(); mats.fromList(good_matches); Features2d.drawMatches(m2, points2, m, points, mats, out); Highgui.imwrite("/Users/chintan/Downloads/one2.jpg", out); }
From source file:io.github.jakejmattson.facialrecognition.FacialRecognition.java
License:Open Source License
private static int compareFaces(Mat currentImage, String fileName) { Mat compareImage = Imgcodecs.imread(fileName); ORB orb = ORB.create(); int similarity = 0; MatOfKeyPoint keypoints1 = new MatOfKeyPoint(); MatOfKeyPoint keypoints2 = new MatOfKeyPoint(); orb.detect(currentImage, keypoints1); orb.detect(compareImage, keypoints2); Mat descriptors1 = new Mat(); Mat descriptors2 = new Mat(); orb.compute(currentImage, keypoints1, descriptors1); orb.compute(compareImage, keypoints2, descriptors2); if (descriptors1.cols() == descriptors2.cols()) { MatOfDMatch matchMatrix = new MatOfDMatch(); DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING); matcher.match(descriptors1, descriptors2, matchMatrix); DMatch[] matches = matchMatrix.toArray(); for (DMatch match : matches) if (match.distance <= 50) similarity++;/*from w w w . ja va 2s. com*/ } return similarity; }
From source file:overwatchteampicker.OverwatchTeamPicker.java
public static ReturnValues findImage(String template, String source, int flag) { File lib = null;/* w w w . j av a 2 s . com*/ BufferedImage image = null; try { image = ImageIO.read(new File(source)); } catch (Exception e) { e.printStackTrace(); } String os = System.getProperty("os.name"); String bitness = System.getProperty("sun.arch.data.model"); if (os.toUpperCase().contains("WINDOWS")) { if (bitness.endsWith("64")) { lib = new File("C:\\Users\\POWERUSER\\Downloads\\opencv\\build\\java\\x64\\" + System.mapLibraryName("opencv_java2413")); } else { lib = new File("libs//x86//" + System.mapLibraryName("opencv_java2413")); } } System.load(lib.getAbsolutePath()); String tempObject = "images\\hero_templates\\" + template + ".png"; String source_pic = source; Mat objectImage = Highgui.imread(tempObject, Highgui.CV_LOAD_IMAGE_GRAYSCALE); Mat sceneImage = Highgui.imread(source_pic, Highgui.CV_LOAD_IMAGE_GRAYSCALE); MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint(); FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.SURF); featureDetector.detect(objectImage, objectKeyPoints); KeyPoint[] keypoints = objectKeyPoints.toArray(); MatOfKeyPoint objectDescriptors = new MatOfKeyPoint(); DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.SURF); descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors); // Create the matrix for output image. Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Highgui.CV_LOAD_IMAGE_COLOR); Scalar newKeypointColor = new Scalar(255, 0, 0); Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0); // Match object image with the scene image MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint(); MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint(); featureDetector.detect(sceneImage, sceneKeyPoints); descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors); Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Highgui.CV_LOAD_IMAGE_COLOR); Scalar matchestColor = new Scalar(0, 255, 25); List<MatOfDMatch> matches = new LinkedList<MatOfDMatch>(); DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED); descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2); LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>(); float nndrRatio = .78f; for (int i = 0; i < matches.size(); i++) { MatOfDMatch matofDMatch = matches.get(i); DMatch[] dmatcharray = matofDMatch.toArray(); DMatch m1 = dmatcharray[0]; DMatch m2 = dmatcharray[1]; if (m1.distance <= m2.distance * nndrRatio) { goodMatchesList.addLast(m1); } } if (goodMatchesList.size() >= 4) { List<KeyPoint> objKeypointlist = objectKeyPoints.toList(); List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList(); LinkedList<Point> objectPoints = new LinkedList<>(); LinkedList<Point> scenePoints = new LinkedList<>(); for (int i = 0; i < goodMatchesList.size(); i++) { objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt); scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt); } MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f(); objMatOfPoint2f.fromList(objectPoints); MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f(); scnMatOfPoint2f.fromList(scenePoints); Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3); Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2); Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2); obj_corners.put(0, 0, new double[] { 0, 0 }); obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 }); obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() }); obj_corners.put(3, 0, new double[] { 0, objectImage.rows() }); Core.perspectiveTransform(obj_corners, scene_corners, homography); Mat img = Highgui.imread(source_pic, Highgui.CV_LOAD_IMAGE_COLOR); Core.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)), new Scalar(0, 255, 255), 4); Core.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)), new Scalar(255, 255, 0), 4); Core.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)), new Scalar(0, 255, 0), 4); MatOfDMatch goodMatches = new MatOfDMatch(); goodMatches.fromList(goodMatchesList); Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches, matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2); if (new Point(scene_corners.get(0, 0)).x < new Point(scene_corners.get(1, 0)).x && new Point(scene_corners.get(0, 0)).y < new Point(scene_corners.get(2, 0)).y) { System.out.println("found " + template); Highgui.imwrite("points.jpg", outputImage); Highgui.imwrite("matches.jpg", matchoutput); Highgui.imwrite("final.jpg", img); if (flag == 0) { ReturnValues retVal = null; int y = (int) new Point(scene_corners.get(3, 0)).y; int yHeight = (int) new Point(scene_corners.get(3, 0)).y - (int) new Point(scene_corners.get(2, 0)).y; if (y < image.getHeight() * .6) { //if found hero is in upper half of image then return point 3,0 retVal = new ReturnValues(y + (int) (image.getHeight() * .01), yHeight); } else { //if found hero is in lower half of image then return point 2,0 y = (int) new Point(scene_corners.get(2, 0)).y; retVal = new ReturnValues(y + (int) (image.getHeight() * .3), yHeight); } return retVal; } else if (flag == 1) { int[] xPoints = new int[4]; int[] yPoints = new int[4]; xPoints[0] = (int) (new Point(scene_corners.get(0, 0)).x); xPoints[1] = (int) (new Point(scene_corners.get(1, 0)).x); xPoints[2] = (int) (new Point(scene_corners.get(2, 0)).x); xPoints[3] = (int) (new Point(scene_corners.get(3, 0)).x); yPoints[0] = (int) (new Point(scene_corners.get(0, 0)).y); yPoints[1] = (int) (new Point(scene_corners.get(1, 0)).y); yPoints[2] = (int) (new Point(scene_corners.get(2, 0)).y); yPoints[3] = (int) (new Point(scene_corners.get(3, 0)).y); ReturnValues retVal = new ReturnValues(xPoints, yPoints); return retVal; } } } return null; }
From source file:Recognizer.Recognizer.java
public void SIFT(Image imQ, Image imDB) { Mat Q = imQ.Image1CtoMat_CV(); Mat DB = imDB.Image1CtoMat_CV(); Mat matQ = new Mat(); Mat matDB = new Mat(); Q.convertTo(matQ, CvType.CV_8U);//from w w w.j a v a 2s . co m DB.convertTo(matDB, CvType.CV_8U); FeatureDetector siftDet = FeatureDetector.create(FeatureDetector.SIFT); DescriptorExtractor siftExt = DescriptorExtractor.create(DescriptorExtractor.SIFT); MatOfKeyPoint kpQ = new MatOfKeyPoint(); MatOfKeyPoint kpDB = new MatOfKeyPoint(); siftDet.detect(matQ, kpQ); siftDet.detect(matDB, kpDB); Mat matDescriptorQ = new Mat(matQ.rows(), matQ.cols(), matQ.type()); Mat matDescriptorDB = new Mat(matDB.rows(), matDB.cols(), matDB.type()); siftExt.compute(matQ, kpQ, matDescriptorQ); siftExt.compute(matDB, kpDB, matDescriptorDB); MatOfDMatch matchs = new MatOfDMatch(); DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE); matcher.match(matDescriptorQ, matDescriptorDB, matchs); int N = 10; DMatch[] tmp01 = matchs.toArray(); DMatch[] tmp02 = new DMatch[N]; for (int i = 0; i < tmp02.length; i++) { tmp02[i] = tmp01[i]; } matchs.fromArray(tmp02); Mat matchedImage = new Mat(matQ.rows(), matQ.cols() * 2, matQ.type()); Features2d.drawMatches(matQ, kpQ, matDB, kpDB, matchs, matchedImage); Highgui.imwrite("./descriptedImageBySIFT.jpg", matchedImage); }
From source file:View.Signature.java
public static int sift(String routeVal, String route, String n_img1, String n_img2, String extension) { String bookObject = routeVal + n_img2 + extension; String bookScene = route + n_img1 + extension; //System.out.println("Iniciando SIFT"); //java.lang.System.out.print("Abriendo imagenes | "); Mat objectImage = Highgui.imread(bookObject, Highgui.CV_LOAD_IMAGE_COLOR); Mat sceneImage = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR); MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint(); FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.SIFT); //java.lang.System.out.print("Encontrar keypoints con SIFT | "); featureDetector.detect(objectImage, objectKeyPoints); KeyPoint[] keypoints = objectKeyPoints.toArray(); MatOfKeyPoint objectDescriptors = new MatOfKeyPoint(); DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.SIFT); //java.lang.System.out.print("Computando descriptores | "); descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors); // Create the matrix for output image. Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Highgui.CV_LOAD_IMAGE_COLOR); Scalar newKeypointColor = new Scalar(255, 0, 0); //java.lang.System.out.print("Dibujando keypoints en imagen base | "); Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0); // Match object image with the scene image MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint(); MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint(); //java.lang.System.out.print("Detectando keypoints en imagen base | "); featureDetector.detect(sceneImage, sceneKeyPoints); //java.lang.System.out.print("Computando descriptores en imagen base | "); descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors); Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Highgui.CV_LOAD_IMAGE_COLOR); Scalar matchestColor = new Scalar(0, 255, 0); List<MatOfDMatch> matches = new LinkedList<MatOfDMatch>(); DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED); //java.lang.System.out.print("Encontrando matches entre imagenes | "); descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2); //java.lang.System.out.println("Calculando buenos matches"); LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>(); float nndrRatio = 0.7f; java.lang.System.out.println(matches.size()); for (int i = 0; i < matches.size(); i++) { MatOfDMatch matofDMatch = matches.get(i); DMatch[] dmatcharray = matofDMatch.toArray(); DMatch m1 = dmatcharray[0];//from w w w . j a va2 s . c o m DMatch m2 = dmatcharray[1]; if (m1.distance <= m2.distance * nndrRatio) { goodMatchesList.addLast(m1); } } if (goodMatchesList.size() >= 7) { //java.lang.System.out.println("Match enontrado!!! Matches: "+goodMatchesList.size()); //if(goodMatchesList.size()>max){ //cambio = 1; //} List<KeyPoint> objKeypointlist = objectKeyPoints.toList(); List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList(); LinkedList<Point> objectPoints = new LinkedList<>(); LinkedList<Point> scenePoints = new LinkedList<>(); for (int i = 0; i < goodMatchesList.size(); i++) { objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt); scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt); } MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f(); objMatOfPoint2f.fromList(objectPoints); MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f(); scnMatOfPoint2f.fromList(scenePoints); Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3); Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2); Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2); obj_corners.put(0, 0, new double[] { 0, 0 }); obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 }); obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() }); obj_corners.put(3, 0, new double[] { 0, objectImage.rows() }); //System.out.println("Transforming object corners to scene corners..."); Core.perspectiveTransform(obj_corners, scene_corners, homography); Mat img = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR); Core.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)), new Scalar(0, 255, 0), 4); //java.lang.System.out.println("Dibujando imagen de coincidencias"); MatOfDMatch goodMatches = new MatOfDMatch(); goodMatches.fromList(goodMatchesList); Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches, matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2); String n_outputImage = route + "results\\" + n_img2 + "_outputImage_sift" + extension; String n_matchoutput = route + "results\\" + n_img2 + "_matchoutput_sift" + extension; String n_img = route + "results\\" + n_img2 + "_sift" + extension; Highgui.imwrite(n_outputImage, outputImage); Highgui.imwrite(n_matchoutput, matchoutput); //Highgui.imwrite(n_img, img); java.lang.System.out.println(goodMatches.size().height); double result = goodMatches.size().height * 100 / matches.size(); java.lang.System.out.println((int) result); //double result =goodMatches.size().height; if (result > 100) { return 100; } else if (result <= 100 && result > 85) { return 85; } else if (result <= 85 && result > 50) { return 50; } else if (result <= 50 && result > 25) { return 25; } else { return 0; } } else { //java.lang.System.out.println("Firma no encontrada"); } return 0; //System.out.println("Terminando SIFT"); }
From source file:View.SignatureLib.java
public static int sift(String routeRNV, String routeAdherent) { String bookObject = routeAdherent; String bookScene = routeRNV;/*w ww .j a v a 2s . c o m*/ //System.out.println("Iniciando SIFT"); //java.lang.System.out.print("Abriendo imagenes | "); Mat objectImage = Highgui.imread(bookObject, Highgui.CV_LOAD_IMAGE_COLOR); Mat sceneImage = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR); MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint(); FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.SIFT); //java.lang.System.out.print("Encontrar keypoints con SIFT | "); featureDetector.detect(objectImage, objectKeyPoints); KeyPoint[] keypoints = objectKeyPoints.toArray(); MatOfKeyPoint objectDescriptors = new MatOfKeyPoint(); DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.SIFT); //java.lang.System.out.print("Computando descriptores | "); descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors); // Create the matrix for output image. Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Highgui.CV_LOAD_IMAGE_COLOR); Scalar newKeypointColor = new Scalar(255, 0, 0); //java.lang.System.out.print("Dibujando keypoints en imagen base | "); Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0); // Match object image with the scene image MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint(); MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint(); //java.lang.System.out.print("Detectando keypoints en imagen base | "); featureDetector.detect(sceneImage, sceneKeyPoints); //java.lang.System.out.print("Computando descriptores en imagen base | "); descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors); Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Highgui.CV_LOAD_IMAGE_COLOR); Scalar matchestColor = new Scalar(0, 255, 0); List<MatOfDMatch> matches = new LinkedList<MatOfDMatch>(); DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED); //java.lang.System.out.println(sceneDescriptors); if (sceneDescriptors.empty()) { java.lang.System.out.println("Objeto no encontrado"); return 0; } descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2); //java.lang.System.out.println("Calculando buenos matches"); LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>(); float nndrRatio = 0.7f; for (int i = 0; i < matches.size(); i++) { MatOfDMatch matofDMatch = matches.get(i); DMatch[] dmatcharray = matofDMatch.toArray(); DMatch m1 = dmatcharray[0]; DMatch m2 = dmatcharray[1]; if (m1.distance <= m2.distance * nndrRatio) { goodMatchesList.addLast(m1); } } if (goodMatchesList.size() >= 7) { max = goodMatchesList.size(); List<KeyPoint> objKeypointlist = objectKeyPoints.toList(); List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList(); LinkedList<Point> objectPoints = new LinkedList<>(); LinkedList<Point> scenePoints = new LinkedList<>(); for (int i = 0; i < goodMatchesList.size(); i++) { objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt); scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt); } MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f(); objMatOfPoint2f.fromList(objectPoints); MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f(); scnMatOfPoint2f.fromList(scenePoints); Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3); Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2); Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2); obj_corners.put(0, 0, new double[] { 0, 0 }); obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 }); obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() }); obj_corners.put(3, 0, new double[] { 0, objectImage.rows() }); //System.out.println("Transforming object corners to scene corners..."); Core.perspectiveTransform(obj_corners, scene_corners, homography); Mat img = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR); Core.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)), new Scalar(0, 255, 0), 4); //java.lang.System.out.println("Dibujando imagen de coincidencias"); MatOfDMatch goodMatches = new MatOfDMatch(); goodMatches.fromList(goodMatchesList); Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches, matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2); String n_outputImage = "../pre/outputImage_sift.jpg"; String n_matchoutput = "../pre/matchoutput_sift.jpg"; String n_img = "../pre/sift.jpg"; Highgui.imwrite(n_outputImage, outputImage); Highgui.imwrite(n_matchoutput, matchoutput); Highgui.imwrite(n_img, img); java.lang.System.out.println(goodMatches.size().height); double result = goodMatches.size().height;//*100/matches.size(); int score = 0; if (result > 26) { score = 100; } else if (result <= 26 && result > 22) { score = 85; } else if (result <= 22 && result > 17) { score = 50; } else if (result <= 17 && result > 11) { score = 25; } else { score = 0; } java.lang.System.out.println("Score: " + score); return score; } else { java.lang.System.out.println("Objeto no encontrado"); return 0; } //System.out.println("Terminando SIFT"); }
From source file:vinylsleevedetection.Analyze.java
public void Check() { count = 1;/* w ww .java 2 s .com*/ //load openCV library System.loadLibrary(Core.NATIVE_LIBRARY_NAME); //for loop to compare source images to user image for (int j = 1; j < 4; j++) { //source image location (record sleeve) String Object = "E:\\Users\\Jamie\\Documents\\NetBeansProjects\\VinylSleeveDetection\\Source\\" + j + ".jpg"; //user image location String Scene = "E:\\Users\\Jamie\\Documents\\NetBeansProjects\\VinylSleeveDetection\\Output\\camera.jpg"; //load images Mat objectImage = Imgcodecs.imread(Object, Imgcodecs.CV_LOAD_IMAGE_COLOR); Mat sceneImage = Imgcodecs.imread(Scene, Imgcodecs.CV_LOAD_IMAGE_COLOR); //use BRISK feature detection MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint(); FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.BRISK); //perform feature detection on source image featureDetector.detect(objectImage, objectKeyPoints); KeyPoint[] keypoints = objectKeyPoints.toArray(); //use descriptor extractor MatOfKeyPoint objectDescriptors = new MatOfKeyPoint(); DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.BRISK); descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors); Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Imgcodecs.CV_LOAD_IMAGE_COLOR); Scalar newKeypointColor = new Scalar(255, 0, 0); Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0); MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint(); MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint(); featureDetector.detect(sceneImage, sceneKeyPoints); descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors); Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Imgcodecs.CV_LOAD_IMAGE_COLOR); Scalar matchestColor = new Scalar(0, 255, 0); List<MatOfDMatch> matches = new LinkedList<>(); DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE); descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2); LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>(); float nndrRatio = 0.7f; for (int i = 0; i < matches.size(); i++) { MatOfDMatch matofDMatch = matches.get(i); DMatch[] dmatcharray = matofDMatch.toArray(); DMatch m1 = dmatcharray[0]; DMatch m2 = dmatcharray[1]; if (m1.distance <= m2.distance * nndrRatio) { goodMatchesList.addLast(m1); } } //if the number of good mathces is more than 150 a match is found if (goodMatchesList.size() > 150) { System.out.println("Object Found"); List<KeyPoint> objKeypointlist = objectKeyPoints.toList(); List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList(); LinkedList<Point> objectPoints = new LinkedList<>(); LinkedList<Point> scenePoints = new LinkedList<>(); for (int i = 0; i < goodMatchesList.size(); i++) { objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt); scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt); } MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f(); objMatOfPoint2f.fromList(objectPoints); MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f(); scnMatOfPoint2f.fromList(scenePoints); Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3); Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2); Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2); obj_corners.put(0, 0, new double[] { 0, 0 }); obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 }); obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() }); obj_corners.put(3, 0, new double[] { 0, objectImage.rows() }); Core.perspectiveTransform(obj_corners, scene_corners, homography); Mat img = Imgcodecs.imread(Scene, Imgcodecs.CV_LOAD_IMAGE_COLOR); //draw a green square around the matched object Imgproc.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)), new Scalar(0, 255, 0), 10); Imgproc.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)), new Scalar(0, 255, 0), 10); Imgproc.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)), new Scalar(0, 255, 0), 10); Imgproc.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)), new Scalar(0, 255, 0), 10); MatOfDMatch goodMatches = new MatOfDMatch(); goodMatches.fromList(goodMatchesList); Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches, matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2); //output image with match, image of the match locations and keypoints image String folder = "E:\\Users\\Jamie\\Documents\\NetBeansProjects\\VinylSleeveDetection\\Output\\"; Imgcodecs.imwrite(folder + "outputImage.jpg", outputImage); Imgcodecs.imwrite(folder + "matchoutput.jpg", matchoutput); Imgcodecs.imwrite(folder + "found.jpg", img); count = j; break; } else { System.out.println("Object Not Found"); count = 0; } } }