List of usage examples for org.opencv.core TermCriteria TermCriteria
public TermCriteria(int type, int maxCount, double epsilon)
From source file:classes.ObjectFinder.java
private void computeTrackBox() { trackBox = new RotatedRect(); if (computedSearchWindow.size().width > 0 && computedSearchWindow.size().height > 0 && computedSearchWindow.area() > 1) { trackBox = Video.CamShift(thresholdedBackprojection, computedSearchWindow, new TermCriteria(2 | 1, 10, 1)); }/*from w w w .ja v a 2 s . c om*/ if (trackBox.size.width > 0 && trackBox.size.height > 0 && trackBox.size.area() > 1) { Core.ellipse(inputFrame, trackBox, new Scalar(0, 0, 255), 2); } }
From source file:classes.TextRecognitionPreparer.java
public static Scalar cluster(Scalar userColor, Mat cutout, int k) { Mat samples = cutout.reshape(1, cutout.cols() * cutout.rows()); Mat samples32f = new Mat(); samples.convertTo(samples32f, CvType.CV_32F, 1.0 / 255.0); Mat labels = new Mat(); TermCriteria criteria = new TermCriteria(TermCriteria.COUNT, 100, 1); Mat centers = new Mat(); Core.kmeans(samples32f, k, labels, criteria, 1, Core.KMEANS_PP_CENTERS, centers); Scalar fillingColor = getFillingColor(userColor, cutout, labels, centers); return fillingColor; }
From source file:com.astrocytes.core.operationsengine.OperationsImpl.java
License:Open Source License
private Mat applyKmeans(Mat source) { Mat dest = new Mat(); source.convertTo(source, CvType.CV_32F, 1.0 / 255.0); Mat centers = new Mat(); Mat labels = new Mat(); TermCriteria criteria = new TermCriteria(TermCriteria.COUNT, 20, 0.1); Core.kmeans(source, 4, labels, criteria, 10, Core.KMEANS_PP_CENTERS, centers); List<Mat> mats = showClusters(source, labels, centers); //mats.get(0).convertTo(dest, CvType.CV_8UC3); Core.merge(mats, dest);// w w w.j av a2s. c o m //centers.convertTo(dest, CvType.CV_8UC3); return dest; }
From source file:com.mycompany.objectdetection.ObjectDetector.java
public void preProcessImg() { int spatialWindowRadius = ((img.width() + img.height()) / 2); spatialWindowRadius = spatialWindowRadius * 3 / 100; //System.out.println(spatialWindowRadius); TermCriteria termCriteria = new TermCriteria(COUNT + EPS, COUNT_VALUE, EPS_VALUE); Imgproc.pyrMeanShiftFiltering(img, imgMeanShifted, spatialWindowRadius, COLOR_WINDOW_RADIUS, MAX_LEVEL, termCriteria);//from w w w.j a v a 2s .c o m }
From source file:com.trandi.opentld.tld.LKTracker.java
License:Apache License
LKTracker() {
termCriteria = new TermCriteria(TermCriteria.COUNT + TermCriteria.EPS, MAX_COUNT, EPSILON);
}
From source file:imageprocess.ObjectFinder.java
public static void main(String[] args) { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); Mat image = Highgui.imread("D:\\backup\\opencv\\baboon1.jpg"); // Define ROI Rect rect = new Rect(110, 260, 35, 40); Mat imageROI = new Mat(image, rect); Core.rectangle(image, new Point(110, 260), new Point(145, 300), new Scalar(0, 0, 255)); Imshow origIm = new Imshow("Origin"); origIm.showImage(image);/* w w w . j av a 2 s .c om*/ ObjectFinder finder = new ObjectFinder(false, 0.2f); // Get the Hue histogram int minSat = 65; Mat hist = finder.getHueHistogram(imageROI, minSat); Mat norm = new Mat(); Core.normalize(hist, norm, 1, 0, NORM_L2); finder.setROIHistogram(norm); // Convert to HSV space Mat hsv = new Mat(); Imgproc.cvtColor(image, hsv, CV_BGR2HSV); // Split the image List<Mat> v = new ArrayList<>(); Core.split(hsv, v); // Eliminate pixels with low saturation Imgproc.threshold(v.get(1), v.get(1), minSat, 255, THRESH_BINARY); Imshow satIm = new Imshow("Saturation"); satIm.showImage(v.get(1)); // Get back-projection of hue histogram Mat result = finder.find(hsv, new MatOfInt(0), new MatOfFloat(0.0f, 180.0f)); Imshow resultHueIm = new Imshow("Result Hue"); resultHueIm.showImage(result); Core.bitwise_and(result, v.get(1), result); Imshow resultHueAndIm = new Imshow("Result Hue and raw"); resultHueAndIm.showImage(result); // Second image Mat image2 = Highgui.imread("D:\\backup\\opencv\\baboon3.jpg"); // Display image Imshow img2Im = new Imshow("Imgage2"); img2Im.showImage(image2); // Convert to HSV space Imgproc.cvtColor(image2, hsv, CV_BGR2HSV); // Split the image Core.split(hsv, v); // Eliminate pixels with low saturation Imgproc.threshold(v.get(1), v.get(1), minSat, 255, THRESH_BINARY); Imshow satIm2 = new Imshow("Saturation2"); satIm2.showImage(v.get(1)); // Get back-projection of hue histogram finder.setThreshold(-1.0f); result = finder.find(hsv, new MatOfInt(0), new MatOfFloat(0.0f, 180.0f)); Imshow resultHueIm2 = new Imshow("Result Hue2"); resultHueIm2.showImage(result); Core.bitwise_and(result, v.get(1), result); Imshow resultHueAndIm2 = new Imshow("Result Hue and raw2"); resultHueAndIm2.showImage(result); Rect rect2 = new Rect(110, 260, 35, 40); Core.rectangle(image2, new Point(110, 260), new Point(145, 300), new Scalar(0, 0, 255)); TermCriteria criteria = new TermCriteria(TermCriteria.MAX_ITER | TermCriteria.EPS, 100, 0.01); int steps = Video.meanShift(result, rect2, criteria); Core.rectangle(image2, new Point(rect2.x, rect2.y), new Point(rect2.x + rect2.width, rect2.y + rect2.height), new Scalar(0, 255, 0)); Imshow meanshiftIm = new Imshow("Meanshift result"); meanshiftIm.showImage(image2); }
From source file:nz.ac.auckland.lablet.vision.CamShiftTracker.java
License:Open Source License
/** * Finds the dominant colour in an image, and returns two values in HSV colour space to represent similar colours, * e.g. so you can keep all colours similar to the dominant colour. * * How the algorithm works:// w ww. ja v a2s . co m * * 1. Scale the frame down so that algorithm doesn't take too long. * 2. Segment the frame into different colours (number of colours determined by k) * 3. Find dominant cluster (largest area) and get its central colour point. * 4. Get range (min max) to represent similar colours. * * @param bgr The input frame, in BGR colour space. * @param k The number of segments to use (2 works well). * @return The min and max HSV colour values, which represent the colours similar to the dominant colour. */ private Pair<Scalar, Scalar> getMinMaxHsv(Mat bgr, int k) { //Convert to HSV Mat input = new Mat(); Imgproc.cvtColor(bgr, input, Imgproc.COLOR_BGR2BGRA, 3); //Scale image Size bgrSize = bgr.size(); Size newSize = new Size(); if (bgrSize.width > CamShiftTracker.KMEANS_IMG_SIZE || bgrSize.height > CamShiftTracker.KMEANS_IMG_SIZE) { if (bgrSize.width > bgrSize.height) { newSize.width = CamShiftTracker.KMEANS_IMG_SIZE; newSize.height = CamShiftTracker.KMEANS_IMG_SIZE / bgrSize.width * bgrSize.height; } else { newSize.width = CamShiftTracker.KMEANS_IMG_SIZE / bgrSize.height * bgrSize.width; newSize.height = CamShiftTracker.KMEANS_IMG_SIZE; } Imgproc.resize(input, input, newSize); } //Image quantization using k-means, see here for details of k-means algorithm: http://bit.ly/1JIvrlB Mat clusterData = new Mat(); Mat reshaped = input.reshape(1, input.rows() * input.cols()); reshaped.convertTo(clusterData, CvType.CV_32F, 1.0 / 255.0); Mat labels = new Mat(); Mat centres = new Mat(); TermCriteria criteria = new TermCriteria(TermCriteria.COUNT, 50, 1); Core.kmeans(clusterData, k, labels, criteria, 1, Core.KMEANS_PP_CENTERS, centres); //Get num hits for each category int[] counts = new int[k]; for (int i = 0; i < labels.rows(); i++) { int label = (int) labels.get(i, 0)[0]; counts[label] += 1; } //Get cluster index with maximum number of members int maxCluster = 0; int index = -1; for (int i = 0; i < counts.length; i++) { int value = counts[i]; if (value > maxCluster) { maxCluster = value; index = i; } } //Get cluster centre point hsv int r = (int) (centres.get(index, 2)[0] * 255.0); int g = (int) (centres.get(index, 1)[0] * 255.0); int b = (int) (centres.get(index, 0)[0] * 255.0); int sum = (r + g + b) / 3; //Get colour range Scalar min; Scalar max; int rg = Math.abs(r - g); int gb = Math.abs(g - b); int rb = Math.abs(r - b); int maxDiff = Math.max(Math.max(rg, gb), rb); if (maxDiff < 35 && sum > 120) { //white min = new Scalar(0, 0, 0); max = new Scalar(180, 40, 255); } else if (sum < 50 && maxDiff < 35) { //black min = new Scalar(0, 0, 0); max = new Scalar(180, 255, 40); } else { Mat bgrColour = new Mat(1, 1, CvType.CV_8UC3, new Scalar(r, g, b)); Mat hsvColour = new Mat(); Imgproc.cvtColor(bgrColour, hsvColour, Imgproc.COLOR_BGR2HSV, 3); double[] hsv = hsvColour.get(0, 0); int addition = 0; int minHue = (int) hsv[0] - colourRange; if (minHue < 0) { addition = Math.abs(minHue); } int maxHue = (int) hsv[0] + colourRange; min = new Scalar(Math.max(minHue, 0), 60, Math.max(35, hsv[2] - 30)); max = new Scalar(Math.min(maxHue + addition, 180), 255, 255); } return new Pair<>(min, max); }
From source file:objectdetection.ObjectDetector.java
public void preProcessImg() { TermCriteria termCriteria = new TermCriteria(COUNT + EPS, COUNT_VALUE, EPS_VALUE); Imgproc.pyrMeanShiftFiltering(img, imgMeanShifted, SPATIAL_WINDOW_RADIUS, COLOR_WINDOW_RADIUS, MAX_LEVEL, termCriteria);//from w ww . ja va 2 s.c o m Imgproc.cvtColor(imgMeanShifted, imgGrayscale, COLOR_BGR2GRAY); Imgproc.Canny(imgGrayscale, imgCanny, THRESHOLD1, THRESHOLD2, APERTURE_SIZE, true); }
From source file:opencv.CaptchaDetection.java
private static Mat k_means_spilter(Mat src) { Mat dst = Mat.zeros(src.size(), CvType.CV_8UC1); int width = src.cols(); int height = src.rows(); int dims = src.channels(); // /* ww w. j a v a 2 s . co m*/ int clusterCount = 3; Mat points = new Mat(width * height, dims, CvType.CV_32F, new Scalar(0)); Mat centers = new Mat(clusterCount, dims, CvType.CV_32F); Mat labels = new Mat(width * height, 1, CvType.CV_32S); // points for (int row = 0; row < height; row++) { for (int col = 0; col < width; col++) { int index = row * width + col; double[] s_data = src.get(row, col); for (int channel = 0; channel < 3; channel++) { float[] f_buff = new float[1]; f_buff[0] = (float) s_data[channel]; points.put(index, channel, f_buff); } } } // knn ? TermCriteria criteria = new TermCriteria(TermCriteria.EPS + TermCriteria.MAX_ITER, 10, 0.1); Core.kmeans(points, clusterCount, labels, criteria, 3, Core.KMEANS_PP_CENTERS, centers); // ??? label index Map<Integer, Integer> tmp = new TreeMap<>(); for (int i = 0; i < clusterCount; i++) { int sum = 0; for (int j = 0; j < dims; j++) { sum += centers.get(i, j)[0]; } while (tmp.containsKey(sum)) sum++; tmp.put(sum, i); } int count = 0; int[] label_order = new int[clusterCount]; for (Map.Entry<Integer, Integer> iter : tmp.entrySet()) { label_order[count++] = iter.getValue(); } for (int row = 0; row < height; row++) { for (int col = 0; col < width; col++) { int index = row * width + col; int label = (int) labels.get(index, 0)[0]; if (label == label_order[1]) { byte[] d_buff = new byte[1]; d_buff[0] = (byte) 255; dst.put(row, col, d_buff); } } } return dst; }
From source file:org.usfirst.frc.team2084.CMonster2016.vision.CameraCalibration.java
License:Open Source License
/** * Draws checkerboard corners on an image. * //from www.j av a2s .co m * @param image the image to process * @param addToCalibration if true, add this image to the corner list */ public void process(Mat image, boolean addToCalibration) { boolean patternFound = Calib3d.findChessboardCorners(image, boardSize, boardCorners, Calib3d.CALIB_CB_ADAPTIVE_THRESH | Calib3d.CALIB_CB_NORMALIZE_IMAGE | Calib3d.CALIB_CB_FAST_CHECK); if (patternFound) { // Refine corner positions to be more accurate Imgproc.cvtColor(image, grayImage, Imgproc.COLOR_BGR2GRAY); Imgproc.cornerSubPix(grayImage, boardCorners, new Size(6, 6), new Size(-1, -1), new TermCriteria(TermCriteria.EPS + TermCriteria.COUNT, 30, 0.1)); if (addToCalibration) { calibrationCorners.add(boardCorners); } } image.copyTo(boardImage); Calib3d.drawChessboardCorners(boardImage, boardSize, boardCorners, patternFound); if (!addToCalibration) { debugImage("Board", boardImage); } Imgproc.undistort(image, undistortImage, cameraMatrix, distCoeffs); undistortImage.copyTo(image); Imgproc.putText(image, "Error: " + error, new Point(20, 20), Core.FONT_HERSHEY_PLAIN, 1.5, new Scalar(0, 255, 0)); }