List of usage examples for org.opencv.core MatOfKeyPoint MatOfKeyPoint
public MatOfKeyPoint()
From source file:OctoEye.java
License:Open Source License
private void detectPupil() { // min and max pupil radius int r_min = 2; int r_max = 45; // min and max pupil diameter int d_min = 2 * r_min; int d_max = 2 * r_max; // min and max pupil area double area;//ww w. j a va 2s . c o m double a_min = Math.PI * r_min * r_min; double a_max = Math.PI * r_max * r_max; // histogram stuff List<Mat> images; MatOfInt channels; Mat mask; Mat hist; MatOfInt mHistSize; MatOfFloat mRanges; // contour and circle stuff Rect rect = null; Rect rectMin; Rect rectMax; List<MatOfPoint> contours; MatOfPoint3 circles; // pupil center Point p; // ellipse test points Point v; Point r; Point s; // rect points Point tl; Point br; // pupil edge detection Vector<Point> pointsTest; Vector<Point> pointsEllipse; Vector<Point> pointsRemoved; // temporary variables double distance; double rad; double length; int x; int y; int tmp; byte buff[]; // ------------------------------------------------------------------------------------------------------------- // step 1 // blur the image to reduce noise Imgproc.medianBlur(src, tmp1, 25); // ------------------------------------------------------------------------------------------------------------- // step 2 // locate the pupil with feature detection and compute a histogram for each, // the best feature will be used as rough pupil location (rectMin) int score = 0; int winner = 0; // feature detection MatOfKeyPoint matOfKeyPoints = new MatOfKeyPoint(); FeatureDetector blobDetector = FeatureDetector.create(FeatureDetector.MSER); // Maximal Stable Extremal Regions blobDetector.detect(tmp1, matOfKeyPoints); List<KeyPoint> keyPoints = matOfKeyPoints.toList(); // histogram calculation for (int i = 0; i < keyPoints.size(); i++) { x = (int) keyPoints.get(i).pt.x; y = (int) keyPoints.get(i).pt.y; tl = new Point(x - 5 >= 0 ? x - 5 : 0, y - 5 >= 0 ? y - 5 : 0); br = new Point(x + 5 < WIDTH ? x + 5 : WIDTH - 1, y + 5 < HEIGHT ? y + 5 : HEIGHT - 1); images = new ArrayList<Mat>(); images.add(tmp1.submat(new Rect(tl, br))); channels = new MatOfInt(0); mask = new Mat(); hist = new Mat(); mHistSize = new MatOfInt(256); mRanges = new MatOfFloat(0f, 256f); Imgproc.calcHist(images, channels, mask, hist, mHistSize, mRanges); tmp = 0; for (int j = 0; j < 256 / 3; j++) { tmp += (256 / 3 - j) * (int) hist.get(j, 0)[0]; } if (tmp >= score) { score = tmp; winner = i; rect = new Rect(tl, br); } if (debug) { // show features (orange) Core.circle(dbg, new Point(x, y), 3, ORANGE); } } if (rect == null) { return; } rectMin = rect.clone(); if (debug) { // show rectMin (red) Core.rectangle(dbg, rectMin.tl(), rect.br(), RED, 1); } // ------------------------------------------------------------------------------------------------------------- // step 3 // compute a rectMax (blue) which is larger than the pupil int margin = 32; rect.x = rect.x - margin; rect.y = rect.y - margin; rect.width = rect.width + 2 * margin; rect.height = rect.height + 2 * margin; rectMax = rect.clone(); if (debug) { // show features (orange) Core.rectangle(dbg, rectMax.tl(), rectMax.br(), BLUE); } // ------------------------------------------------------------------------------------------------------------- // step 4 // blur the image again Imgproc.medianBlur(src, tmp1, 7); Imgproc.medianBlur(tmp1, tmp1, 3); Imgproc.medianBlur(tmp1, tmp1, 3); Imgproc.medianBlur(tmp1, tmp1, 3); // ------------------------------------------------------------------------------------------------------------- // step 5 // detect edges Imgproc.Canny(tmp1, tmp2, 40, 50); // ------------------------------------------------------------------------------------------------------------- // step 6 // from pupil center to maxRect borders, find all edge points, compute a first ellipse p = new Point(rectMin.x + rectMin.width / 2, rectMin.y + rectMin.height / 2); pointsTest = new Vector<Point>(); pointsEllipse = new Vector<Point>(); pointsRemoved = new Vector<Point>(); buff = new byte[tmp2.rows() * tmp2.cols()]; tmp2.get(0, 0, buff); length = Math.min(p.x - rectMax.x - 3, p.y - rectMax.y - 3); length = Math.sqrt(2 * Math.pow(length, 2)); Point z = new Point(p.x, p.y - length); for (int i = 0; i < 360; i += 15) { rad = Math.toRadians(i); x = (int) (p.x + Math.cos(rad) * (z.x - p.x) - Math.sin(rad) * (z.y - p.y)); y = (int) (p.y + Math.sin(rad) * (z.x - p.x) - Math.cos(rad) * (z.y - p.y)); pointsTest.add(new Point(x, y)); } if (debug) { for (int i = 0; i < pointsTest.size(); i++) { Core.line(dbg, p, pointsTest.get(i), GRAY, 1); Core.rectangle(dbg, rectMin.tl(), rectMin.br(), GREEN, 1); Core.rectangle(dbg, rectMax.tl(), rectMax.br(), BLUE, 1); } Core.rectangle(dbg, rectMin.tl(), rectMin.br(), BLACK, -1); Core.rectangle(dbg, rectMin.tl(), rectMin.br(), RED, 1); Core.rectangle(dbg, rectMax.tl(), rectMax.br(), BLUE); } // p: Ursprung ("Mittelpunkt" der Ellipse) // v: Zielpunkt (Testpunkt) // r: Richtungsvektor PV for (int i = 0; i < pointsTest.size(); i++) { v = new Point(pointsTest.get(i).x, pointsTest.get(i).y); r = new Point(v.x - p.x, v.y - p.y); length = Math.sqrt(Math.pow(p.x - v.x, 2) + Math.pow(p.y - v.y, 2)); boolean found = false; for (int j = 0; j < Math.round(length); j++) { s = new Point(Math.rint(p.x + (double) j / length * r.x), Math.rint(p.y + (double) j / length * r.y)); s.x = Math.max(1, Math.min(s.x, WIDTH - 2)); s.y = Math.max(1, Math.min(s.y, HEIGHT - 2)); tl = new Point(s.x - 1, s.y - 1); br = new Point(s.x + 1, s.y + 1); buff = new byte[3 * 3]; rect = new Rect(tl, br); try { (tmp2.submat(rect)).get(0, 0, buff); for (int k = 0; k < 3 * 3; k++) { if (Math.abs(buff[k]) == 1) { pointsEllipse.add(s); found = true; break; } } } catch (Exception e) { break; } if (found) { break; } } } double e_min = Double.POSITIVE_INFINITY; double e_max = 0; double e_med = 0; for (int i = 0; i < pointsEllipse.size(); i++) { v = pointsEllipse.get(i); length = Math.sqrt(Math.pow(p.x - v.x, 2) + Math.pow(p.y - v.y, 2)); e_min = (length < e_min) ? length : e_min; e_max = (length > e_max) ? length : e_max; e_med = e_med + length; } e_med = e_med / pointsEllipse.size(); if (pointsEllipse.size() >= 5) { Point[] points1 = new Point[pointsEllipse.size()]; for (int i = 0; i < pointsEllipse.size(); i++) { points1[i] = pointsEllipse.get(i); } MatOfPoint2f points2 = new MatOfPoint2f(); points2.fromArray(points1); pupil = Imgproc.fitEllipse(points2); } if (pupil.center.x == 0 && pupil.center.y == 0) { // something went wrong, return null reset(); return; } if (debug) { Core.ellipse(dbg, pupil, PURPLE, 2); } // ------------------------------------------------------------------------------------------------------------- // step 7 // remove some outlier points and compute the ellipse again try { for (int i = 1; i <= 4; i++) { distance = 0; int remove = 0; for (int j = pointsEllipse.size() - 1; j >= 0; j--) { v = pointsEllipse.get(j); length = Math.sqrt(Math.pow(v.x - pupil.center.x, 2) + Math.pow(v.y - pupil.center.y, 2)); if (length > distance) { distance = length; remove = j; } } v = pointsEllipse.get(remove); pointsEllipse.removeElementAt(remove); pointsRemoved.add(v); } } catch (Exception e) { // something went wrong, return null reset(); return; } if (pointsEllipse.size() >= 5) { Point[] points1 = new Point[pointsEllipse.size()]; for (int i = 0; i < pointsEllipse.size(); i++) { points1[i] = pointsEllipse.get(i); } MatOfPoint2f points2 = new MatOfPoint2f(); points2.fromArray(points1); pupil = Imgproc.fitEllipse(points2); Point[] vertices = new Point[4]; pupil.points(vertices); double d1 = Math .sqrt(Math.pow(vertices[1].x - vertices[0].x, 2) + Math.pow(vertices[1].y - vertices[0].y, 2)); double d2 = Math .sqrt(Math.pow(vertices[2].x - vertices[1].x, 2) + Math.pow(vertices[2].y - vertices[1].y, 2)); if (d1 >= d2) { pupilMajorAxis = (int) (d1 / 2); pupilMinorAxis = (int) (d2 / 2); axisA = new Point(vertices[1].x + (vertices[2].x - vertices[1].x) / 2, vertices[1].y + (vertices[2].y - vertices[1].y) / 2); axisB = new Point(vertices[0].x + (vertices[1].x - vertices[0].x) / 2, vertices[0].y + (vertices[1].y - vertices[0].y) / 2); } else { pupilMajorAxis = (int) (d2 / 2); pupilMinorAxis = (int) (d1 / 2); axisB = new Point(vertices[1].x + (vertices[2].x - vertices[1].x) / 2, vertices[1].y + (vertices[2].y - vertices[1].y) / 2); axisA = new Point(vertices[0].x + (vertices[1].x - vertices[0].x) / 2, vertices[0].y + (vertices[1].y - vertices[0].y) / 2); } } double ratio = (double) pupilMinorAxis / (double) pupilMajorAxis; if (ratio < 0.75 || 2 * pupilMinorAxis <= d_min || 2 * pupilMajorAxis >= d_max) { // something went wrong, return null reset(); return; } // pupil found if (debug) { Core.ellipse(dbg, pupil, GREEN, 2); Core.line(dbg, pupil.center, axisA, RED, 2); Core.line(dbg, pupil.center, axisB, BLUE, 2); Core.circle(dbg, pupil.center, 1, GREEN, 0); x = 5; y = 5; Core.rectangle(dbg, new Point(x, y), new Point(x + 80 + 4, y + 10), BLACK, -1); Core.rectangle(dbg, new Point(x + 2, y + 2), new Point(x + 2 + pupilMajorAxis, y + 4), RED, -1); Core.rectangle(dbg, new Point(x + 2, y + 6), new Point(x + 2 + pupilMinorAxis, y + 8), BLUE, -1); for (int i = pointsEllipse.size() - 1; i >= 0; i--) { Core.circle(dbg, pointsEllipse.get(i), 2, ORANGE, -1); } for (int i = pointsRemoved.size() - 1; i >= 0; i--) { Core.circle(dbg, pointsRemoved.get(i), 2, PURPLE, -1); } } Core.ellipse(dst, pupil, GREEN, 2); Core.circle(dst, pupil.center, 1, GREEN, 0); }
From source file:OCV_FeatureDetection.java
License:Open Source License
@Override public void run(ImageProcessor ip) { // QueryImage int[] arr_query = (int[]) imp_query.getChannelProcessor().getPixels(); int imw_query = imp_query.getWidth(); int imh_query = imp_query.getHeight(); Mat mat_query = new Mat(imh_query, imw_query, CvType.CV_8UC3); OCV__LoadLibrary.intarray2mat(arr_query, mat_query, imw_query, imh_query); // TrainImage int[] arr_train = (int[]) imp_train.getChannelProcessor().getPixels(); int imw_train = imp_train.getWidth(); int imh_train = imp_train.getHeight(); Mat mat_train = new Mat(imh_train, imw_train, CvType.CV_8UC3); OCV__LoadLibrary.intarray2mat(arr_train, mat_train, imw_train, imh_train); // KeyPoint/*w w w . j a v a2 s . c om*/ MatOfKeyPoint key_query = new MatOfKeyPoint(); MatOfKeyPoint key_train = new MatOfKeyPoint(); detector.detect(mat_query, key_query); detector.detect(mat_train, key_train); // Descriptor DescriptorExtractor extractor = DescriptorExtractor.create(type_ext); Mat desc_query = new Mat(); Mat desc_train = new Mat(); extractor.compute(mat_query, key_query, desc_query); extractor.compute(mat_train, key_train, desc_train); // Matcher DescriptorMatcher matcher = DescriptorMatcher.create(TYPE_VAL_MATCH[ind_match]); MatOfDMatch dmatch = new MatOfDMatch(); matcher.match(desc_query, desc_train, dmatch); dmatch = showData(key_query, key_train, dmatch); // Output if (enDrawMatches) { Mat mat_dst = new Mat(); Features2d.drawMatches(mat_query, key_query, mat_train, key_train, dmatch, mat_dst); String title_dst = WindowManager.getUniqueName("FeatureDetection"); int imw_dst = mat_dst.cols(); int imh_dst = mat_dst.rows(); ImagePlus imp_dst = new ImagePlus(title_dst, new ColorProcessor(imw_dst, imh_dst)); int[] arr_dst = (int[]) imp_dst.getChannelProcessor().getPixels(); OCV__LoadLibrary.mat2intarray(mat_dst, arr_dst, imw_dst, imh_dst); imp_dst.show(); } }
From source file:bikecalibration.ROIDetection.java
/** * This function processes the image that contains the ROIs. It returns the * array of nodes found in the image./*from w w w. ja va 2 s . com*/ * * @param image * @return Array of nodes */ public Node[] processImage(Mat image) { Node[] outputNodes = new Node[ROIs.size()]; // convert the scene mat to gray scale Mat grayImage = new Mat(); Imgproc.cvtColor(image, grayImage, Imgproc.COLOR_BGR2GRAY); // create a feature detector FeatureDetector detector = FeatureDetector.create(FeatureDetector.SURF); List<MatOfKeyPoint> keypoints_objects = new ArrayList<>(); MatOfKeyPoint keypoints_scene = new MatOfKeyPoint(); detector.detect(ROIs, keypoints_objects); detector.detect(grayImage, keypoints_scene); // create a descriptor extractor DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.SURF); List<Mat> descriptor_objects = new ArrayList<>(); Mat descriptor_scene = new Mat(); extractor.compute(ROIs, keypoints_objects, descriptor_objects); extractor.compute(grayImage, keypoints_scene, descriptor_scene); // create a descriptor matcher DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED); List<MatOfDMatch> matches = new ArrayList<>(); descriptor_objects.stream().map((descriptor_object) -> { MatOfDMatch match = new MatOfDMatch(); matcher.match(descriptor_object, descriptor_scene, match); return match; }).forEach((match) -> { matches.add(match); }); ArrayList<ArrayList<DMatch>> matchesList = new ArrayList<>(); matches.stream().forEach((match) -> { matchesList.add((ArrayList<DMatch>) match.toList()); }); double max_dist = 100; double min_dist = 0; return null; }
From source file:cn.xiongyihui.webcam.JpegFactory.java
License:Open Source License
public void onPreviewFrame(byte[] data, Camera camera) { YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, mWidth, mHeight, null); mJpegOutputStream.reset();//from w w w . ja va2 s. co m try { //Log.e(TAG, "Beginning to read values!"); double distanceTemplateFeatures = this.globalClass.getDistanceTemplateFeatures(); double xTemplateCentroid = this.globalClass.getXtemplateCentroid(); double yTemplateCentroid = this.globalClass.getYtemplateCentroid(); int x0template = this.globalClass.getX0display(); int y0template = this.globalClass.getY0display(); int x1template = this.globalClass.getX1display(); int y1template = this.globalClass.getY1display(); Mat templateDescriptor = this.globalClass.getTemplateDescriptor(); MatOfKeyPoint templateKeyPoints = this.globalClass.getKeyPoints(); KeyPoint[] templateKeyPointsArray = templateKeyPoints.toArray(); int numberOfTemplateFeatures = this.globalClass.getNumberOfTemplateFeatures(); int numberOfPositiveTemplateFeatures = this.globalClass.getNumberOfPositiveTemplateFeatures(); KeyPoint[] normalisedTemplateKeyPoints = this.globalClass.getNormalisedTemplateKeyPoints(); double normalisedXcentroid = this.globalClass.getNormalisedXcentroid(); double normalisedYcentroid = this.globalClass.getNormalisedYcentroid(); int templateCapturedBitmapWidth = this.globalClass.getTemplateCapturedBitmapWidth(); int templateCapturedBitmapHeight = this.globalClass.getTemplateCapturedBitmapHeight(); //Log.e(TAG, "Ended reading values!"); globalClass.setJpegFactoryDimensions(mWidth, mHeight); double scalingRatio, scalingRatioHeight, scalingRatioWidth; scalingRatioHeight = (double) mHeight / (double) templateCapturedBitmapHeight; scalingRatioWidth = (double) mWidth / (double) templateCapturedBitmapWidth; scalingRatio = (scalingRatioHeight + scalingRatioWidth) / 2; //Just to account for any minor variations. //Log.e(TAG, "Scaling ratio:" + String.valueOf(scalingRatio)); //Log.e("Test", "Captured Bitmap's dimensions: (" + templateCapturedBitmapHeight + "," + templateCapturedBitmapWidth + ")"); //Scale the actual features of the image int flag = this.globalClass.getFlag(); if (flag == 0) { int iterate = 0; int iterationMax = numberOfTemplateFeatures; for (iterate = 0; iterate < (iterationMax); iterate++) { Log.e(TAG, "Point detected " + iterate + ":(" + templateKeyPointsArray[iterate].pt.x + "," + templateKeyPointsArray[iterate].pt.y + ")"); if (flag == 0) { templateKeyPointsArray[iterate].pt.x = scalingRatio * (templateKeyPointsArray[iterate].pt.x + (double) x0template); templateKeyPointsArray[iterate].pt.y = scalingRatio * (templateKeyPointsArray[iterate].pt.y + (double) y0template); } Log.e(TAG, "Scaled points:(" + templateKeyPointsArray[iterate].pt.x + "," + templateKeyPointsArray[iterate].pt.y + ")"); } this.globalClass.setFlag(1); } templateKeyPoints.fromArray(templateKeyPointsArray); //Log.e(TAG, "Template-features have been scaled successfully!"); long timeBegin = (int) System.currentTimeMillis(); Mat mYuv = new Mat(mHeight + mHeight / 2, mWidth, CvType.CV_8UC1); mYuv.put(0, 0, data); Mat mRgb = new Mat(); Imgproc.cvtColor(mYuv, mRgb, Imgproc.COLOR_YUV420sp2RGB); Mat result = new Mat(); Imgproc.cvtColor(mRgb, result, Imgproc.COLOR_RGB2GRAY); int detectorType = FeatureDetector.ORB; FeatureDetector featureDetector = FeatureDetector.create(detectorType); MatOfKeyPoint keypointsImage = new MatOfKeyPoint(); featureDetector.detect(result, keypointsImage); KeyPoint[] imageKeypoints = keypointsImage.toArray(); Scalar color = new Scalar(0, 0, 0); DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.ORB); Mat imageDescriptor = new Mat(); descriptorExtractor.compute(result, keypointsImage, imageDescriptor); //BRUTEFORCE_HAMMING apparently finds even the suspicious feature-points! So, inliers and outliers can turn out to be a problem DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING); MatOfDMatch matches = new MatOfDMatch(); matcher.match(imageDescriptor, templateDescriptor, matches); //Log.e("Prasad", String.valueOf(mWidth) + "," + String.valueOf(mHeight)); DMatch[] matchesArray = matches.toArray(); double minimumMatchDistance = globalClass.getHammingDistance(); int iDescriptorMax = matchesArray.length; int iterateDescriptor; double xMatchedPoint, yMatchedPoint; int flagDraw = Features2d.NOT_DRAW_SINGLE_POINTS; Point point; double rHigh = this.globalClass.getRhigh(); double rLow = this.globalClass.getRlow(); double gHigh = this.globalClass.getGhigh(); double gLow = this.globalClass.getGlow(); double bHigh = this.globalClass.getBhigh(); double bLow = this.globalClass.getBlow(); double[] colorValue; double red, green, blue; int[] featureCount; double xKernelSize = 9, yKernelSize = 9; globalClass.setKernelSize(xKernelSize, yKernelSize); double xImageKernelScaling, yImageKernelScaling; xImageKernelScaling = xKernelSize / mWidth; yImageKernelScaling = yKernelSize / mHeight; int[][] kernel = new int[(int) xKernelSize][(int) yKernelSize]; double[][] kernelCounter = new double[(int) xKernelSize][(int) yKernelSize]; int numberKernelMax = 10; globalClass.setNumberKernelMax(numberKernelMax); int[][][] kernelArray = new int[(int) xKernelSize][(int) yKernelSize][numberKernelMax]; double featureImageResponse; double xImageCentroid, yImageCentroid; double xSum = 0, ySum = 0; double totalImageResponse = 0; for (iterateDescriptor = 0; iterateDescriptor < iDescriptorMax; iterateDescriptor++) { if (matchesArray[iterateDescriptor].distance < minimumMatchDistance) { //MatchedPoint: Awesome match without color feedback xMatchedPoint = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].pt.x; yMatchedPoint = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].pt.y; colorValue = mRgb.get((int) yMatchedPoint, (int) xMatchedPoint); red = colorValue[0]; green = colorValue[1]; blue = colorValue[2]; int xKernelFeature, yKernelFeature; //Color feedback if ((rLow < red) & (red < rHigh) & (gLow < green) & (green < gHigh) & (bLow < blue) & (blue < bHigh)) { try { featureImageResponse = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].response; if (featureImageResponse > 0) { xSum = xSum + featureImageResponse * xMatchedPoint; ySum = ySum + featureImageResponse * yMatchedPoint; totalImageResponse = totalImageResponse + featureImageResponse; point = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].pt; xKernelFeature = (int) (xMatchedPoint * xImageKernelScaling); yKernelFeature = (int) (yMatchedPoint * yImageKernelScaling); kernelCounter[xKernelFeature][yKernelFeature]++; //Core.circle(result, point, 3, color); } } catch (Exception e) { } } //Log.e(TAG, iterateDescriptor + ": (" + xMatchedPoint + "," + yMatchedPoint + ")"); } } int iKernel = 0, jKernel = 0; for (iKernel = 0; iKernel < xKernelSize; iKernel++) { for (jKernel = 0; jKernel < yKernelSize; jKernel++) { if (kernelCounter[iKernel][jKernel] > 0) { kernel[iKernel][jKernel] = 1; } else { kernel[iKernel][jKernel] = 0; } } } xImageCentroid = xSum / totalImageResponse; yImageCentroid = ySum / totalImageResponse; if ((Double.isNaN(xImageCentroid)) | (Double.isNaN(yImageCentroid))) { //Log.e(TAG, "Centroid is not getting detected! Increasing hamming distance (error-tolerance)!"); globalClass.setHammingDistance((int) (minimumMatchDistance + 2)); } else { //Log.e(TAG, "Centroid is getting detected! Decreasing and optimising hamming (error-tolerance)!"); globalClass.setHammingDistance((int) (minimumMatchDistance - 1)); int jpegCount = globalClass.getJpegFactoryCallCount(); jpegCount++; globalClass.setJpegFactoryCallCount(jpegCount); int initialisationFlag = globalClass.getInitialisationFlag(); int numberOfDistances = 10; globalClass.setNumberOfDistances(numberOfDistances); if ((jpegCount > globalClass.getNumberKernelMax()) & (jpegCount > numberOfDistances)) { globalClass.setInitialisationFlag(1); } int[][] kernelSum = new int[(int) xKernelSize][(int) yKernelSize], mask = new int[(int) xKernelSize][(int) yKernelSize]; int iJpeg, jJpeg; kernelSum = globalClass.computeKernelSum(kernel); Log.e(TAG, Arrays.deepToString(kernelSum)); for (iJpeg = 0; iJpeg < xKernelSize; iJpeg++) { for (jJpeg = 0; jJpeg < yKernelSize; jJpeg++) { if (kernelSum[iJpeg][jJpeg] > (numberKernelMax / 4)) {//Meant for normalised kernel mask[iJpeg][jJpeg]++; } } } Log.e(TAG, Arrays.deepToString(mask)); int maskedFeatureCount = 1, xMaskFeatureSum = 0, yMaskFeatureSum = 0; for (iJpeg = 0; iJpeg < xKernelSize; iJpeg++) { for (jJpeg = 0; jJpeg < yKernelSize; jJpeg++) { if (mask[iJpeg][jJpeg] == 1) { xMaskFeatureSum = xMaskFeatureSum + iJpeg; yMaskFeatureSum = yMaskFeatureSum + jJpeg; maskedFeatureCount++; } } } double xMaskMean = xMaskFeatureSum / maskedFeatureCount; double yMaskMean = yMaskFeatureSum / maskedFeatureCount; double xSquaredSum = 0, ySquaredSum = 0; for (iJpeg = 0; iJpeg < xKernelSize; iJpeg++) { for (jJpeg = 0; jJpeg < yKernelSize; jJpeg++) { if (mask[iJpeg][jJpeg] == 1) { xSquaredSum = xSquaredSum + (iJpeg - xMaskMean) * (iJpeg - xMaskMean); ySquaredSum = ySquaredSum + (jJpeg - yMaskMean) * (jJpeg - yMaskMean); } } } double xRMSscaled = Math.sqrt(xSquaredSum); double yRMSscaled = Math.sqrt(ySquaredSum); double RMSimage = ((xRMSscaled / xImageKernelScaling) + (yRMSscaled / yImageKernelScaling)) / 2; Log.e(TAG, "RMS radius of the image: " + RMSimage); /*//Command the quadcopter and send PWM values to Arduino double throttlePWM = 1500, yawPWM = 1500, pitchPWM = 1500; double deltaThrottle = 1, deltaYaw = 1, deltaPitch = 1; throttlePWM = globalClass.getThrottlePWM(); pitchPWM = globalClass.getPitchPWM(); yawPWM = globalClass.getYawPWM(); deltaThrottle = globalClass.getThrottleDelta(); deltaPitch = globalClass.getPitchDelta(); deltaYaw = globalClass.getYawDelta(); if(yImageCentroid>yTemplateCentroid) { throttlePWM = throttlePWM + deltaThrottle; }else{ throttlePWM = throttlePWM - deltaThrottle; } if(RMSimage>distanceTemplateFeatures) { pitchPWM = pitchPWM + deltaPitch; }else{ pitchPWM = pitchPWM - deltaPitch; } if(xImageCentroid>xTemplateCentroid) { yawPWM = yawPWM + deltaYaw; }else{ yawPWM = yawPWM - deltaYaw; } if(1000>throttlePWM){ throttlePWM = 1000; } if(2000<throttlePWM){ throttlePWM = 2000; } if(1000>pitchPWM){ pitchPWM = 1000; } if(2000<pitchPWM){ pitchPWM = 2000; } if(1000>yawPWM){ yawPWM = 1000; } if(2000<yawPWM){ yawPWM = 2000; } globalClass.setPitchPWM(pitchPWM); globalClass.setYawPWM(yawPWM); globalClass.setThrottlePWM(throttlePWM);*/ //Display bounding circle int originalWidthBox = x1template - x0template; int originalHeightBox = y1template - y0template; double scaledBoundingWidth = (originalWidthBox * RMSimage / distanceTemplateFeatures); double scaledBoundingHeight = (originalHeightBox * RMSimage / distanceTemplateFeatures); double displayRadius = (scaledBoundingWidth + scaledBoundingHeight) / 2; displayRadius = displayRadius * 1.4826; displayRadius = displayRadius / numberKernelMax; double distanceAverage = 0; if (Double.isNaN(displayRadius)) { //Log.e(TAG, "displayRadius is NaN!"); } else { distanceAverage = globalClass.imageDistanceAverage(displayRadius); //Log.e(TAG, "Average distance: " + distanceAverage); } if ((Double.isNaN(xImageCentroid)) | Double.isNaN(yImageCentroid)) { //Log.e(TAG, "Centroid is NaN!"); } else { globalClass.centroidAverage(xImageCentroid, yImageCentroid); } if (initialisationFlag == 1) { //int displayRadius = 50; Point pointDisplay = new Point(); //pointDisplay.x = xImageCentroid; //pointDisplay.y = yImageCentroid; pointDisplay.x = globalClass.getXcentroidAverageGlobal(); pointDisplay.y = globalClass.getYcentroidAverageGlobal(); globalClass.centroidAverage(xImageCentroid, yImageCentroid); int distanceAverageInt = (int) distanceAverage; Core.circle(result, pointDisplay, distanceAverageInt, color); } } Log.e(TAG, "Centroid in the streamed image: (" + xImageCentroid + "," + yImageCentroid + ")"); /*try { //Features2d.drawKeypoints(result, keypointsImage, result, color, flagDraw); Features2d.drawKeypoints(result, templateKeyPoints, result, color, flagDraw); }catch(Exception e){}*/ //Log.e(TAG, "High (R,G,B): (" + rHigh + "," + gHigh + "," + bHigh + ")"); //Log.e(TAG, "Low (R,G,B): (" + rLow + "," + gLow + "," + bLow + ")"); //Log.e(TAG, Arrays.toString(matchesArray)); try { Bitmap bmp = Bitmap.createBitmap(result.cols(), result.rows(), Bitmap.Config.ARGB_8888); Utils.matToBitmap(result, bmp); //Utils.matToBitmap(mRgb, bmp); bmp.compress(Bitmap.CompressFormat.JPEG, mQuality, mJpegOutputStream); } catch (Exception e) { Log.e(TAG, "JPEG not working!"); } long timeEnd = (int) System.currentTimeMillis(); Log.e(TAG, "Time consumed is " + String.valueOf(timeEnd - timeBegin) + "milli-seconds!"); mJpegData = mJpegOutputStream.toByteArray(); synchronized (mJpegOutputStream) { mJpegOutputStream.notifyAll(); } } catch (Exception e) { Log.e(TAG, "JPEG-factory is not working!"); } }
From source file:cn.xiongyihui.webcam.setup.java
License:Open Source License
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_setup); this.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); final Button cameraButton = (Button) findViewById(R.id.cameraButton); final Button selectButton = (Button) findViewById(R.id.selectButton); final Button templateButton = (Button) findViewById(R.id.templateButton); final Button instructionButton = (Button) findViewById(R.id.instructionButton); final ImageView imageView = (ImageView) findViewById(R.id.imageView); try {//from w w w .j a v a 2s . c o m int NUMBER_OF_CORES = Runtime.getRuntime().availableProcessors(); Toast.makeText(this, NUMBER_OF_CORES, Toast.LENGTH_SHORT).show(); } catch (Exception e) { Log.e(TAG, "Processor-cores are not getting detected!"); } try { final Toast toast = Toast.makeText(this, "Please capture image; \n" + "select image; \n" + "Drag-and-drop, swipe on the desired region and confirm template!", Toast.LENGTH_LONG); final TextView v = (TextView) toast.getView().findViewById(android.R.id.message); instructionButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View arg0) { if (v != null) v.setGravity(Gravity.CENTER); toast.show(); } }); } catch (Exception e) { Log.e(TAG, "Instructions are not getting displayed!"); } try { cameraButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View arg0) { Intent intent = new Intent("android.media.action.IMAGE_CAPTURE"); startActivityForResult(intent, requestCode); } }); } catch (Exception e) { Log.e(TAG, "Camera is not working!"); } try { selectButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View arg0) { Intent i = new Intent(Intent.ACTION_PICK, android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI); startActivityForResult(i, requestCode); bitmap = BitmapFactory.decodeFile(filePath); imageView.setImageBitmap(bitmap); } }); } catch (Exception e) { Log.e(TAG, "Selection is not working!"); } try { templateButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View arg0) { if (imageView.getDrawable() == null) { Log.e(TAG, "Null ImageView!"); } Log.e(TAG, "Button is working."); try { bitmap = BitmapFactory.decodeFile(filePath); bitmap = Bitmap.createScaledBitmap(bitmap, bitmapWidth, bitmapHeight, true); Mat frame = new Mat(bitmap.getWidth(), bitmap.getHeight(), CvType.CV_8UC4); Utils.bitmapToMat(bitmap, frame); GlobalClass globalVariable = (GlobalClass) getApplicationContext(); globalVariable.setTemplateCapturedBitmapHeight(bitmapHeight); globalVariable.setTemplateCapturedBitmapWidth(bitmapWidth); Log.e(TAG, "Bitmap has been set successfully; Template is being generated!"); rect = new Rect(x0final, y0final, x1final - x0final, y1final - y0final); Utils.matToBitmap(frame, bitmap); if (x0final < x1final) { x0display = x0final; x1display = x1final; } if (x0final > x1final) { x1display = x0final; x0display = x1final; } if (y0final < y1final) { y0display = y0final; y1display = y1final; } if (y0final > y1final) { y1display = y0final; y0display = y1final; } long timeBegin = (int) System.currentTimeMillis(); bitmap = Bitmap.createBitmap(bitmap, x0display, y0display, x1display - x0display, y1display - y0display); /*String path = Environment.getExternalStorageDirectory().toString(); Log.e(TAG, "File is about to be written!"); //File file = new File(path, "TraQuad"); //bitmap.compress(Bitmap.CompressFormat.PNG, 100, fOutputStream); //Log.e(TAG, "Stored image successfully!"); //fOutputStream.flush(); //fOutputStream.close(); //MediaStore.Images.Media.insertImage(getContentResolver(), file.getAbsolutePath(), file.getName(), file.getName());*/ /*Prominent colors code; This is not working in Android; OpenCV assertion error Log.e(TAG, "Retrieved image successfully!"); Imgproc.medianBlur(frame, frame, 3); Log.e(TAG, "Filtered image successfully!"); try { Mat mask = new Mat(bitmap.getWidth(), bitmap.getHeight(), CvType.CV_8UC1); MatOfFloat range = new MatOfFloat(0f, 255f); Mat hist = new Mat(); MatOfInt mHistSize = new MatOfInt(256); List<Mat> lHsv = new ArrayList<Mat>(3); Mat hsv = new Mat(); Imgproc.cvtColor(frame, hsv, Imgproc.COLOR_RGB2HSV); Core.split(frame, lHsv); Mat mH = lHsv.get(0); Mat mS = lHsv.get(1); Mat mV = lHsv.get(2); ArrayList<Mat> ListMat = new ArrayList<Mat>(); ListMat.add(mH); Log.e(TAG, String.valueOf(ListMat)); MatOfInt channels = new MatOfInt(0, 1); Imgproc.calcHist(Arrays.asList(mH), channels, mask, hist, mHistSize, range); ListMat.clear(); }catch (Exception e){ Log.e(TAG, "Prominent colors are not getting detected!"); }*/ Mat colorFrame = frame; colorFrame = frame.clone(); Utils.bitmapToMat(bitmap, frame); Imgproc.cvtColor(frame, frame, Imgproc.COLOR_RGB2GRAY); Log.e(TAG, "Converted color successfully!"); int detectorType = FeatureDetector.ORB; //int detectorType = FeatureDetector.SIFT; //SIFT and SURF are not working! //int detectorType = FeatureDetector.SURF; FeatureDetector featureDetector = FeatureDetector.create(detectorType); Log.e(TAG, "Feature detection has begun!"); MatOfKeyPoint keypoints = new MatOfKeyPoint(); featureDetector.detect(frame, keypoints); Log.e(TAG, "Feature detection has ended successfully!"); /*if (!featureDetector.empty()) { //Draw the detected keypoints int flagDraw = Features2d.NOT_DRAW_SINGLE_POINTS; Features2d.drawKeypoints(frame, keypoints, frame, color, flagDraw); Utils.matToBitmap(frame, bitmap); }*/ imageView.setImageBitmap(bitmap); Log.e(TAG, "Final bitmap has been loaded!"); KeyPoint[] referenceKeypoints = keypoints.toArray(); Log.e(TAG, "Number of keypoints detected is " + String.valueOf(referenceKeypoints.length)); int iterationMax = referenceKeypoints.length; int iterate = 0; double xFeaturePoint, yFeaturePoint; double xSum = 0, ySum = 0; double totalResponse = 0; double keyPointResponse = 0; double xTemplateCentroid = 0, yTemplateCentroid = 0; DescriptorExtractor descriptorExtractor = DescriptorExtractor .create(DescriptorExtractor.ORB); Mat templateDescriptor = new Mat(); descriptorExtractor.compute(frame, keypoints, templateDescriptor); for (iterate = 0; iterate < iterationMax; iterate++) { xFeaturePoint = referenceKeypoints[iterate].pt.x; yFeaturePoint = referenceKeypoints[iterate].pt.y; keyPointResponse = referenceKeypoints[iterate].response; if (keyPointResponse > 0) { xSum = xSum + keyPointResponse * xFeaturePoint; ySum = ySum + keyPointResponse * yFeaturePoint; totalResponse = totalResponse + keyPointResponse; //Log.e(TAG, "Feature " + String.valueOf(iterate) + ":" + String.valueOf(referenceKeypoints[iterate])); } } xTemplateCentroid = xSum / totalResponse; yTemplateCentroid = ySum / totalResponse; Log.e(TAG, "Finished conversion of features to points!"); Log.e(TAG, "Centroid location is: (" + xTemplateCentroid + "," + yTemplateCentroid + ")"); double xSquareDistance = 0, ySquareDistance = 0; double distanceTemplateFeatures = 0; int numberOfPositiveResponses = 0; double[] colorValue; double rSum = 0, gSum = 0, bSum = 0; double rCentral, gCentral, bCentral; for (iterate = 0; iterate < iterationMax; iterate++) { xFeaturePoint = referenceKeypoints[iterate].pt.x; yFeaturePoint = referenceKeypoints[iterate].pt.y; keyPointResponse = referenceKeypoints[iterate].response; colorValue = colorFrame.get((int) yFeaturePoint, (int) xFeaturePoint); rSum = rSum + colorValue[0]; gSum = gSum + colorValue[1]; bSum = bSum + colorValue[2]; if (keyPointResponse > 0) { xSquareDistance = xSquareDistance + (xFeaturePoint - xTemplateCentroid) * (xFeaturePoint - xTemplateCentroid); ySquareDistance = ySquareDistance + (yFeaturePoint - yTemplateCentroid) * (yFeaturePoint - yTemplateCentroid); numberOfPositiveResponses++; } } rCentral = rSum / iterationMax; gCentral = gSum / iterationMax; bCentral = bSum / iterationMax; double deltaColor = 21; double rLow = rCentral - deltaColor; double rHigh = rCentral + deltaColor; double gLow = rCentral - deltaColor; double gHigh = rCentral + deltaColor; double bLow = rCentral - deltaColor; double bHigh = rCentral + deltaColor; Log.e(TAG, "Prominent color (R,G,B): (" + rCentral + "," + gCentral + "," + bCentral + ")"); distanceTemplateFeatures = Math .sqrt((xSquareDistance + ySquareDistance) / numberOfPositiveResponses); KeyPoint[] offsetCompensatedKeyPoints = keypoints.toArray(); double xMaxNormalisation, yMaxNormalisation; xMaxNormalisation = x1display - x0display; yMaxNormalisation = y1display - y0display; for (iterate = 0; iterate < iterationMax; iterate++) { offsetCompensatedKeyPoints[iterate].pt.x = offsetCompensatedKeyPoints[iterate].pt.x / xMaxNormalisation; offsetCompensatedKeyPoints[iterate].pt.y = offsetCompensatedKeyPoints[iterate].pt.y / yMaxNormalisation; //Log.e(TAG, "Compensated: (" + String.valueOf(offsetCompensatedKeyPoints[iterate].pt.x) + "," + String.valueOf(offsetCompensatedKeyPoints[iterate].pt.y) + ")"); } double xCentroidNormalised, yCentroidNormalised; xCentroidNormalised = (xTemplateCentroid - x0display) / xMaxNormalisation; yCentroidNormalised = (yTemplateCentroid - y0display) / yMaxNormalisation; Log.e(TAG, "Normalised Centroid: (" + String.valueOf(xCentroidNormalised) + "," + String.valueOf(yCentroidNormalised)); long timeEnd = (int) System.currentTimeMillis(); Log.e(TAG, "Time consumed is " + String.valueOf(timeEnd - timeBegin) + " milli-seconds!"); Log.e(TAG, "RMS distance is: " + distanceTemplateFeatures); globalVariable.setDistanceTemplateFeatures(distanceTemplateFeatures); globalVariable.setX0display(x0display); globalVariable.setY0display(y0display); globalVariable.setX1display(x1display); globalVariable.setY1display(y1display); globalVariable.setKeypoints(keypoints); globalVariable.setXtemplateCentroid(xTemplateCentroid); globalVariable.setYtemplateCentroid(yTemplateCentroid); globalVariable.setTemplateDescriptor(templateDescriptor); globalVariable.setNumberOfTemplateFeatures(iterationMax); globalVariable.setNumberOfPositiveTemplateFeatures(numberOfPositiveResponses); globalVariable.setRhigh(rHigh); globalVariable.setRlow(rLow); globalVariable.setGhigh(gHigh); globalVariable.setGlow(gLow); globalVariable.setBhigh(bHigh); globalVariable.setBlow(bLow); globalVariable.setXnormalisedCentroid(xCentroidNormalised); globalVariable.setYnormalisedCentroid(yCentroidNormalised); globalVariable.setNormalisedTemplateKeyPoints(offsetCompensatedKeyPoints); Log.e(TAG, "Finished setting the global variables!"); } catch (Exception e) { Log.e(TAG, "Please follow instructions!"); } } }); } catch (Exception e) { Log.e(TAG, "Template is not working!"); } }
From source file:com.oetermann.imageclassifier.DescriptorExtractorWrapper.java
License:Open Source License
public Mat detectAndCompute(Mat image) { MatOfKeyPoint keypoint = new MatOfKeyPoint(); featureDetector.detect(image, keypoint); Mat descriptor = new Mat(); descriptorExtractor.compute(image, keypoint, descriptor); keypoint.release();//from w w w.j a va 2 s . co m return descriptor; }
From source file:com.seleniumtests.util.imaging.ImageDetector.java
License:Apache License
/** * Compute the rectangle where the searched picture is and the rotation angle between both images * Throw {@link ImageSearchException} if picture is not found * @return//www. j a v a 2 s . c o m * @Deprecated Kept here for information, but open CV 3 does not include SURF anymore for java build */ public void detectCorrespondingZone() { Mat objectImageMat = Imgcodecs.imread(objectImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR); Mat sceneImageMat = Imgcodecs.imread(sceneImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR); FeatureDetector surf = FeatureDetector.create(FeatureDetector.SURF); MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint(); MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint(); surf.detect(objectImageMat, objectKeyPoints); surf.detect(sceneImageMat, sceneKeyPoints); DescriptorExtractor surfExtractor = DescriptorExtractor.create(DescriptorExtractor.SURF); Mat objectDescriptor = new Mat(); Mat sceneDescriptor = new Mat(); surfExtractor.compute(objectImageMat, objectKeyPoints, objectDescriptor); surfExtractor.compute(sceneImageMat, sceneKeyPoints, sceneDescriptor); try { Mat outImage = new Mat(); Features2d.drawKeypoints(objectImageMat, objectKeyPoints, outImage); String tempFile = File.createTempFile("img", ".png").getAbsolutePath(); writeComparisonPictureToFile(tempFile, outImage); } catch (IOException e) { } // http://stackoverflow.com/questions/29828849/flann-for-opencv-java DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED); MatOfDMatch matches = new MatOfDMatch(); if (objectKeyPoints.toList().isEmpty()) { throw new ImageSearchException("No keypoints in object to search, check it's not uniformly coloured: " + objectImage.getAbsolutePath()); } if (sceneKeyPoints.toList().isEmpty()) { throw new ImageSearchException( "No keypoints in scene, check it's not uniformly coloured: " + sceneImage.getAbsolutePath()); } if (objectDescriptor.type() != CvType.CV_32F) { objectDescriptor.convertTo(objectDescriptor, CvType.CV_32F); } if (sceneDescriptor.type() != CvType.CV_32F) { sceneDescriptor.convertTo(sceneDescriptor, CvType.CV_32F); } matcher.match(objectDescriptor, sceneDescriptor, matches); double maxDist = 0; double minDist = 10000; for (int i = 0; i < objectDescriptor.rows(); i++) { double dist = matches.toList().get(i).distance; if (dist < minDist) { minDist = dist; } if (dist > maxDist) { maxDist = dist; } } logger.debug("-- Max dist : " + maxDist); logger.debug("-- Min dist : " + minDist); LinkedList<DMatch> goodMatches = new LinkedList<>(); MatOfDMatch gm = new MatOfDMatch(); for (int i = 0; i < objectDescriptor.rows(); i++) { if (matches.toList().get(i).distance < detectionThreshold) { goodMatches.addLast(matches.toList().get(i)); } } gm.fromList(goodMatches); Features2d.drawMatches(objectImageMat, objectKeyPoints, sceneImageMat, sceneKeyPoints, gm, imgMatch, Scalar.all(-1), Scalar.all(-1), new MatOfByte(), Features2d.NOT_DRAW_SINGLE_POINTS); if (goodMatches.isEmpty()) { throw new ImageSearchException("Cannot find matching zone"); } LinkedList<Point> objList = new LinkedList<>(); LinkedList<Point> sceneList = new LinkedList<>(); List<KeyPoint> objectKeyPointsList = objectKeyPoints.toList(); List<KeyPoint> sceneKeyPointsList = sceneKeyPoints.toList(); for (int i = 0; i < goodMatches.size(); i++) { objList.addLast(objectKeyPointsList.get(goodMatches.get(i).queryIdx).pt); sceneList.addLast(sceneKeyPointsList.get(goodMatches.get(i).trainIdx).pt); } MatOfPoint2f obj = new MatOfPoint2f(); obj.fromList(objList); MatOfPoint2f scene = new MatOfPoint2f(); scene.fromList(sceneList); // Calib3d.RANSAC could be used instead of 0 Mat hg = Calib3d.findHomography(obj, scene, 0, 5); Mat objectCorners = new Mat(4, 1, CvType.CV_32FC2); Mat sceneCorners = new Mat(4, 1, CvType.CV_32FC2); objectCorners.put(0, 0, new double[] { 0, 0 }); objectCorners.put(1, 0, new double[] { objectImageMat.cols(), 0 }); objectCorners.put(2, 0, new double[] { objectImageMat.cols(), objectImageMat.rows() }); objectCorners.put(3, 0, new double[] { 0, objectImageMat.rows() }); Core.perspectiveTransform(objectCorners, sceneCorners, hg); // points of object Point po1 = new Point(objectCorners.get(0, 0)); Point po2 = new Point(objectCorners.get(1, 0)); Point po3 = new Point(objectCorners.get(2, 0)); Point po4 = new Point(objectCorners.get(3, 0)); // point of object in scene Point p1 = new Point(sceneCorners.get(0, 0)); // top left Point p2 = new Point(sceneCorners.get(1, 0)); // top right Point p3 = new Point(sceneCorners.get(2, 0)); // bottom right Point p4 = new Point(sceneCorners.get(3, 0)); // bottom left logger.debug(po1); logger.debug(po2); logger.debug(po3); logger.debug(po4); logger.debug(p1); // top left logger.debug(p2); // top right logger.debug(p3); // bottom right logger.debug(p4); // bottom left if (debug) { try { // translate corners p1.set(new double[] { p1.x + objectImageMat.cols(), p1.y }); p2.set(new double[] { p2.x + objectImageMat.cols(), p2.y }); p3.set(new double[] { p3.x + objectImageMat.cols(), p3.y }); p4.set(new double[] { p4.x + objectImageMat.cols(), p4.y }); Imgproc.line(imgMatch, p1, p2, new Scalar(0, 255, 0), 1); Imgproc.line(imgMatch, p2, p3, new Scalar(0, 255, 0), 1); Imgproc.line(imgMatch, p3, p4, new Scalar(0, 255, 0), 1); Imgproc.line(imgMatch, p4, p1, new Scalar(0, 255, 0), 1); showResultingPicture(imgMatch); } catch (IOException e) { } } // check rotation angles checkRotationAngle(p1, p2, p3, p4, po1, po2, po3, po4); // rework on scene points as new, we are sure the object rotation is 0, 90, 180 or 270 reworkOnScenePoints(p1, p2, p3, p4); // check that aspect ratio of the detected height and width are the same checkDetectionZoneAspectRatio(p1, p2, p4, po1, po2, po4); recordDetectedRectangle(p1, p2, p3, p4); }
From source file:eu.fpetersen.robobrain.behavior.followobject.OrbObjectDetector.java
License:Open Source License
private MatOfKeyPoint detectInImage(Mat image) { MatOfKeyPoint keypoints = new MatOfKeyPoint(); featureDetector.detect(image, keypoints); return keypoints; }
From source file:fi.conf.tabare.ARDataProvider.java
private void detect() { //Mat composite_image; Mat input_image = new Mat(); Mat undistorted_image = new Mat(); Mat circles = new Mat(); MatOfKeyPoint mokp = new MatOfKeyPoint(); Mat cameraMatrix = null;//from w w w . j ava2 s . com //List<Mat> channels = new LinkedList<>(); //Loop while (running) { try { if (inputVideo.read(input_image)) { Mat preview_image = null; if (selectedView == View.calib) preview_image = input_image.clone(); //Imgproc.cvtColor(input_image, input_image, Imgproc.COLOR_RGB2HSV); //Core.split(input_image, channels); Imgproc.cvtColor(input_image, input_image, Imgproc.COLOR_BGR2GRAY); //Imgproc.equalizeHist(input_image, input_image); input_image.convertTo(input_image, -1, params.contrast, params.brightness); //image*contrast[1.0-3.0] + brightness[0-255] doBlur(input_image, input_image, params.blur, params.blurAmount); if (selectedView == View.raw) preview_image = input_image.clone(); if (params.enableDistortion) { if (cameraMatrix == null) cameraMatrix = Imgproc.getDefaultNewCameraMatrix(Mat.eye(3, 3, CvType.CV_64F), new Size(input_image.width(), input_image.height()), true); Imgproc.warpAffine(input_image, input_image, shiftMat, frameSize); if (undistorted_image == null) undistorted_image = new Mat((int) frameSize.width * 2, (int) frameSize.height * 2, CvType.CV_64F); Imgproc.undistort(input_image, undistorted_image, cameraMatrix, distCoeffs); input_image = undistorted_image.clone(); if (selectedView == View.dist) preview_image = input_image.clone(); } // if(background == null) background = input_image.clone(); // if(recaptureBg){ // backgSubstractor.apply(background, background); // System.out.println(background.channels() + " " + background.size() ); // System.out.println(input_image.channels() + " " + input_image.size() ); // recaptureBg = false; // } // if(dynamicBGRemoval){ // //Imgproc.accumulateWeighted(input_image, background, dynamicBGAmount); // //Imgproc.accumulateWeighted(input_image, background, 1.0f); // //Core.subtract(input_image, background, input_image); // //Core.bitwise_xor(input_image, background, input_image); // // doBlur(input_image, background, Blur.normal_7x7, 0); //Blur a little, to get nicer result when substracting // backgSubstractor.apply(background, background, dynamicBGAmount); // } // if(background != null) Core.add(input_image, background, input_image); if (params.blobTracking) { Mat blobs_image = input_image.clone(); Imgproc.threshold(blobs_image, blobs_image, params.blobThreshold, 254, (params.blobThInverted ? Imgproc.THRESH_BINARY_INV : Imgproc.THRESH_BINARY)); Size kernelSize = null; switch (params.blobMorpthKernelSize) { case size_3x3: kernelSize = new Size(3, 3); break; case size_5x5: kernelSize = new Size(5, 5); break; case size_7x7: kernelSize = new Size(7, 7); break; case size_9x9: kernelSize = new Size(9, 9); break; } int kernelType = -1; switch (params.blobMorphKernelShape) { case ellipse: kernelType = Imgproc.MORPH_ELLIPSE; break; case rect: kernelType = Imgproc.MORPH_RECT; break; default: break; } switch (params.blobMorphOps) { case dilate: Imgproc.dilate(blobs_image, blobs_image, Imgproc.getStructuringElement(kernelType, kernelSize)); break; case erode: Imgproc.erode(blobs_image, blobs_image, Imgproc.getStructuringElement(kernelType, kernelSize)); break; default: break; } if (blobFeatureDetector == null) blobFeatureDetector = FeatureDetector.create(FeatureDetector.SIMPLEBLOB); blobFeatureDetector.detect(blobs_image, mokp); blobData.add(mokp); if (selectedView == View.blob) preview_image = blobs_image.clone(); blobs_image.release(); } if (params.tripTracking) { Mat trips_image = undistorted_image.clone(); if (params.tripEnableThresholding) if (params.tripAdaptThreshold) { Imgproc.adaptiveThreshold(trips_image, trips_image, 255, (params.tripThInverted ? Imgproc.THRESH_BINARY_INV : Imgproc.THRESH_BINARY), Imgproc.ADAPTIVE_THRESH_MEAN_C, 5, params.tripThreshold * 0.256f); } else { Imgproc.threshold(trips_image, trips_image, params.tripThreshold, 255, (params.tripThInverted ? Imgproc.THRESH_BINARY_INV : Imgproc.THRESH_BINARY)); } switch (params.tripMorphOps) { case dilate: Imgproc.dilate(trips_image, trips_image, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(3, 3))); break; case erode: Imgproc.erode(trips_image, trips_image, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(3, 3))); break; default: break; } //Imgproc.HoughCircles(tres, circ, Imgproc.CV_HOUGH_GRADIENT, 1, tres.height()/8, 80, 1+p.par4, p.par5, p.par6); Imgproc.HoughCircles(trips_image, circles, Imgproc.CV_HOUGH_GRADIENT, params.tripDP, params.tripCenterDist, params.tripCannyThresh, params.tripAccumThresh, params.tripRadMin, params.tripRadMax); for (int i = 0; i < circles.cols(); i++) { double[] coords = circles.get(0, i); if (coords == null || coords[0] <= 1 || coords[1] <= 1) continue; //If the circle is off the limits, or too small, don't process it. TripcodeCandidateSample tc = new TripcodeCandidateSample(undistorted_image, coords); if (tc.isValid()) tripcodeData.add(tc); } if (selectedView == View.trip) preview_image = trips_image.clone(); trips_image.release(); } if (preview_image != null) { camPreviewPanel.updatePreviewImage(preview_image); preview_image.release(); } } else { System.out.println("frame/cam failiure!"); } } catch (Exception e) { e.printStackTrace(); running = false; } //FPS calculations if (camPreviewPanel != null) { long t = System.currentTimeMillis(); detectTime = (t - lastFrameDetectTime); lastFrameDetectTime = t; camPreviewPanel.updateDetectTime(detectTime); } } //De-init circles.release(); undistorted_image.release(); input_image.release(); inputVideo.release(); shiftMat.release(); }
From source file:imageanalyzercv.ImageAnalyzerCV.java
/** * @param args the command line arguments */// w w w . j av a2s . c o m public static void main(String[] args) { System.out.println("path: " + System.getProperty("java.library.path")); System.loadLibrary("opencv_java300"); Mat m = Highgui.imread("/Users/chintan/Downloads/software/image_analyis/mydata/SAM_0763.JPG"); System.out.println("m = " + m.height()); MatOfKeyPoint points = new MatOfKeyPoint(); FeatureDetector.create(FeatureDetector.SURF).detect(m, points); Mat m2 = Highgui.imread("/Users/chintan/Downloads/software/image_analyis/mydata/SAM_0764.JPG"); System.out.println("m = " + m2.height()); MatOfKeyPoint points2 = new MatOfKeyPoint(); FeatureDetector.create(FeatureDetector.SURF).detect(m2, points2); DescriptorExtractor SurfExtractor = DescriptorExtractor.create(DescriptorExtractor.BRISK); Mat imag1Desc = new Mat(); SurfExtractor.compute(m, points, imag1Desc); Mat imag2Desc = new Mat(); SurfExtractor.compute(m2, points2, imag2Desc); MatOfDMatch matches = new MatOfDMatch(); Mat imgd = new Mat(); imag1Desc.copyTo(imgd); System.out.println(imgd.size()); DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING).match(imag2Desc, imag1Desc, (MatOfDMatch) matches); double min_distance = 1000.0; double max_distance = 0.0; DMatch[] matchArr = matches.toArray(); for (int i = 0; i < matchArr.length; i++) { if (matchArr[i].distance > max_distance) max_distance = matchArr[i].distance; if (matchArr[i].distance < min_distance) min_distance = matchArr[i].distance; } ArrayList<DMatch> good_matches = new ArrayList<DMatch>(); System.out.println("Min Distance: " + min_distance + " Max distance: " + max_distance); double totalScore = 0.0; for (int j = 0; j < imag1Desc.rows() && j < matchArr.length; j++) { if ((matchArr[j].distance <= (11 * min_distance)) && (matchArr[j].distance >= min_distance * 1)) { good_matches.add(matchArr[j]); //System.out.println(matchArr[j]); totalScore = totalScore + matchArr[j].distance; } //good_matches.add(matchArr[j]); } System.out.println((1 - (totalScore / (good_matches.size() * ((max_distance + min_distance) / 2)))) * 100); // System.out.println(matches.toList().size()); Mat out = new Mat(); MatOfDMatch mats = new MatOfDMatch(); mats.fromList(good_matches); Features2d.drawMatches(m2, points2, m, points, mats, out); Highgui.imwrite("/Users/chintan/Downloads/one2.jpg", out); }