List of usage examples for org.opencv.core TermCriteria EPS
int EPS
To view the source code for org.opencv.core TermCriteria EPS.
Click Source Link
From source file:com.trandi.opentld.tld.LKTracker.java
License:Apache License
LKTracker() {
termCriteria = new TermCriteria(TermCriteria.COUNT + TermCriteria.EPS, MAX_COUNT, EPSILON);
}
From source file:imageprocess.ObjectFinder.java
public static void main(String[] args) { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); Mat image = Highgui.imread("D:\\backup\\opencv\\baboon1.jpg"); // Define ROI Rect rect = new Rect(110, 260, 35, 40); Mat imageROI = new Mat(image, rect); Core.rectangle(image, new Point(110, 260), new Point(145, 300), new Scalar(0, 0, 255)); Imshow origIm = new Imshow("Origin"); origIm.showImage(image);// w w w . ja v a 2s . co m ObjectFinder finder = new ObjectFinder(false, 0.2f); // Get the Hue histogram int minSat = 65; Mat hist = finder.getHueHistogram(imageROI, minSat); Mat norm = new Mat(); Core.normalize(hist, norm, 1, 0, NORM_L2); finder.setROIHistogram(norm); // Convert to HSV space Mat hsv = new Mat(); Imgproc.cvtColor(image, hsv, CV_BGR2HSV); // Split the image List<Mat> v = new ArrayList<>(); Core.split(hsv, v); // Eliminate pixels with low saturation Imgproc.threshold(v.get(1), v.get(1), minSat, 255, THRESH_BINARY); Imshow satIm = new Imshow("Saturation"); satIm.showImage(v.get(1)); // Get back-projection of hue histogram Mat result = finder.find(hsv, new MatOfInt(0), new MatOfFloat(0.0f, 180.0f)); Imshow resultHueIm = new Imshow("Result Hue"); resultHueIm.showImage(result); Core.bitwise_and(result, v.get(1), result); Imshow resultHueAndIm = new Imshow("Result Hue and raw"); resultHueAndIm.showImage(result); // Second image Mat image2 = Highgui.imread("D:\\backup\\opencv\\baboon3.jpg"); // Display image Imshow img2Im = new Imshow("Imgage2"); img2Im.showImage(image2); // Convert to HSV space Imgproc.cvtColor(image2, hsv, CV_BGR2HSV); // Split the image Core.split(hsv, v); // Eliminate pixels with low saturation Imgproc.threshold(v.get(1), v.get(1), minSat, 255, THRESH_BINARY); Imshow satIm2 = new Imshow("Saturation2"); satIm2.showImage(v.get(1)); // Get back-projection of hue histogram finder.setThreshold(-1.0f); result = finder.find(hsv, new MatOfInt(0), new MatOfFloat(0.0f, 180.0f)); Imshow resultHueIm2 = new Imshow("Result Hue2"); resultHueIm2.showImage(result); Core.bitwise_and(result, v.get(1), result); Imshow resultHueAndIm2 = new Imshow("Result Hue and raw2"); resultHueAndIm2.showImage(result); Rect rect2 = new Rect(110, 260, 35, 40); Core.rectangle(image2, new Point(110, 260), new Point(145, 300), new Scalar(0, 0, 255)); TermCriteria criteria = new TermCriteria(TermCriteria.MAX_ITER | TermCriteria.EPS, 100, 0.01); int steps = Video.meanShift(result, rect2, criteria); Core.rectangle(image2, new Point(rect2.x, rect2.y), new Point(rect2.x + rect2.width, rect2.y + rect2.height), new Scalar(0, 255, 0)); Imshow meanshiftIm = new Imshow("Meanshift result"); meanshiftIm.showImage(image2); }
From source file:opencv.CaptchaDetection.java
private static Mat k_means_spilter(Mat src) { Mat dst = Mat.zeros(src.size(), CvType.CV_8UC1); int width = src.cols(); int height = src.rows(); int dims = src.channels(); // //from w w w .j a va2 s . c o m int clusterCount = 3; Mat points = new Mat(width * height, dims, CvType.CV_32F, new Scalar(0)); Mat centers = new Mat(clusterCount, dims, CvType.CV_32F); Mat labels = new Mat(width * height, 1, CvType.CV_32S); // points for (int row = 0; row < height; row++) { for (int col = 0; col < width; col++) { int index = row * width + col; double[] s_data = src.get(row, col); for (int channel = 0; channel < 3; channel++) { float[] f_buff = new float[1]; f_buff[0] = (float) s_data[channel]; points.put(index, channel, f_buff); } } } // knn ? TermCriteria criteria = new TermCriteria(TermCriteria.EPS + TermCriteria.MAX_ITER, 10, 0.1); Core.kmeans(points, clusterCount, labels, criteria, 3, Core.KMEANS_PP_CENTERS, centers); // ??? label index Map<Integer, Integer> tmp = new TreeMap<>(); for (int i = 0; i < clusterCount; i++) { int sum = 0; for (int j = 0; j < dims; j++) { sum += centers.get(i, j)[0]; } while (tmp.containsKey(sum)) sum++; tmp.put(sum, i); } int count = 0; int[] label_order = new int[clusterCount]; for (Map.Entry<Integer, Integer> iter : tmp.entrySet()) { label_order[count++] = iter.getValue(); } for (int row = 0; row < height; row++) { for (int col = 0; col < width; col++) { int index = row * width + col; int label = (int) labels.get(index, 0)[0]; if (label == label_order[1]) { byte[] d_buff = new byte[1]; d_buff[0] = (byte) 255; dst.put(row, col, d_buff); } } } return dst; }
From source file:org.usfirst.frc.team2084.CMonster2016.vision.CameraCalibration.java
License:Open Source License
/** * Draws checkerboard corners on an image. * //from ww w. ja v a 2 s. c o m * @param image the image to process * @param addToCalibration if true, add this image to the corner list */ public void process(Mat image, boolean addToCalibration) { boolean patternFound = Calib3d.findChessboardCorners(image, boardSize, boardCorners, Calib3d.CALIB_CB_ADAPTIVE_THRESH | Calib3d.CALIB_CB_NORMALIZE_IMAGE | Calib3d.CALIB_CB_FAST_CHECK); if (patternFound) { // Refine corner positions to be more accurate Imgproc.cvtColor(image, grayImage, Imgproc.COLOR_BGR2GRAY); Imgproc.cornerSubPix(grayImage, boardCorners, new Size(6, 6), new Size(-1, -1), new TermCriteria(TermCriteria.EPS + TermCriteria.COUNT, 30, 0.1)); if (addToCalibration) { calibrationCorners.add(boardCorners); } } image.copyTo(boardImage); Calib3d.drawChessboardCorners(boardImage, boardSize, boardCorners, patternFound); if (!addToCalibration) { debugImage("Board", boardImage); } Imgproc.undistort(image, undistortImage, cameraMatrix, distCoeffs); undistortImage.copyTo(image); Imgproc.putText(image, "Error: " + error, new Point(20, 20), Core.FONT_HERSHEY_PLAIN, 1.5, new Scalar(0, 255, 0)); }
From source file:org.usfirst.frc.team2084.CMonster2016.vision.Target.java
License:Open Source License
/** * Creates a new possible target based on the specified blob and calculates * its score.//from w w w .j a va2 s . c o m * * @param p the shape of the possible target */ public Target(MatOfPoint contour, Mat grayImage) { // Simplify contour to make the corner finding algorithm work better MatOfPoint2f fContour = new MatOfPoint2f(); contour.convertTo(fContour, CvType.CV_32F); Imgproc.approxPolyDP(fContour, fContour, VisionParameters.getGoalApproxPolyEpsilon(), true); fContour.convertTo(contour, CvType.CV_32S); this.contour = contour; // Check area, and don't do any calculations if it is not valid if (validArea = validateArea()) { // Find a bounding rectangle RotatedRect rect = Imgproc.minAreaRect(fContour); Point[] rectPoints = new Point[4]; rect.points(rectPoints); for (int j = 0; j < rectPoints.length; j++) { Point rectPoint = rectPoints[j]; double minDistance = Double.MAX_VALUE; Point point = null; for (int i = 0; i < contour.rows(); i++) { Point contourPoint = new Point(contour.get(i, 0)); double dist = distance(rectPoint, contourPoint); if (dist < minDistance) { minDistance = dist; point = contourPoint; } } rectPoints[j] = point; } MatOfPoint2f rectMat = new MatOfPoint2f(rectPoints); // Refine the corners to improve accuracy Imgproc.cornerSubPix(grayImage, rectMat, new Size(4, 10), new Size(-1, -1), new TermCriteria(TermCriteria.EPS + TermCriteria.COUNT, 30, 0.1)); rectPoints = rectMat.toArray(); // Identify each corner SortedMap<Double, List<Point>> x = new TreeMap<>(); Arrays.stream(rectPoints).forEach((p) -> { List<Point> points; if ((points = x.get(p.x)) == null) { x.put(p.x, points = new LinkedList<>()); } points.add(p); }); int i = 0; for (Iterator<List<Point>> it = x.values().iterator(); it.hasNext();) { List<Point> s = it.next(); for (Point p : s) { switch (i) { case 0: topLeft = p; break; case 1: bottomLeft = p; break; case 2: topRight = p; break; case 3: bottomRight = p; } i++; } } // Organize corners if (topLeft.y > bottomLeft.y) { Point p = bottomLeft; bottomLeft = topLeft; topLeft = p; } if (topRight.y > bottomRight.y) { Point p = bottomRight; bottomRight = topRight; topRight = p; } // Create corners for centroid calculation corners = new MatOfPoint2f(rectPoints); // Calculate center Moments moments = Imgproc.moments(corners); center = new Point(moments.m10 / moments.m00, moments.m01 / moments.m00); // Put the points in the correct order for solvePNP rectPoints[0] = topLeft; rectPoints[1] = topRight; rectPoints[2] = bottomLeft; rectPoints[3] = bottomRight; // Recreate corners in the new order corners = new MatOfPoint2f(rectPoints); widthTop = distance(topLeft, topRight); widthBottom = distance(bottomLeft, bottomRight); width = (widthTop + widthBottom) / 2.0; heightLeft = distance(topLeft, bottomLeft); heightRight = distance(topRight, bottomRight); height = (heightLeft + heightRight) / 2.0; Mat tvec = new Mat(); // Calculate target's location Calib3d.solvePnP(OBJECT_POINTS, corners, CAMERA_MAT, DISTORTION_MAT, rotation, tvec, false, Calib3d.CV_P3P); // ======================================= // Position and Orientation Transformation // ======================================= double armAngle = VisionResults.getArmAngle(); // Flip y axis to point upward Core.multiply(tvec, SIGN_NORMALIZATION_MATRIX, tvec); // Shift origin to arm pivot point, on the robot's centerline CoordinateMath.translate(tvec, CAMERA_X_OFFSET, CAMERA_Y_OFFSET, ARM_LENGTH); // Align axes with ground CoordinateMath.rotateX(tvec, -armAngle); Core.add(rotation, new MatOfDouble(armAngle, 0, 0), rotation); // Shift origin to robot center of rotation CoordinateMath.translate(tvec, 0, ARM_PIVOT_Y_OFFSET, -ARM_PIVOT_Z_OFFSET); double xPosFeet = tvec.get(0, 0)[0]; double yPosFeet = tvec.get(1, 0)[0]; double zPosFeet = tvec.get(2, 0)[0]; // Old less effective aiming heading and distance calculation // double pixelsToFeet = TARGET_WIDTH / width; // distance = (TARGET_WIDTH * HighGoalProcessor.IMAGE_SIZE.width // / (2 * width ** Math.tan(VisionParameters.getFOVAngle() / 2))); // double xPosFeet = (center.x - (HighGoalProcessor.IMAGE_SIZE.width // / 2)) * pixelsToFeet; // double yPosFeet = -(center.y - // (HighGoalProcessor.IMAGE_SIZE.height / 2)) * pixelsToFeet; distance = Math.sqrt(xPosFeet * xPosFeet + zPosFeet * zPosFeet); position = new Point3(xPosFeet, yPosFeet, zPosFeet); xGoalAngle = Math.atan(xPosFeet / zPosFeet); yGoalAngle = Math.atan(yPosFeet / zPosFeet); validate(); score = calculateScore(); } else { valid = false; } }
From source file:qupath.opencv.classify.ParameterizableOpenCvClassifier.java
License:Open Source License
/** * Create TermCriteria using either or both of specified maxIterations and termEPS, or return null if both <= 0. * /* w w w . j a v a 2 s .c om*/ * @param maxIterations * @param termEPS * @return */ static TermCriteria createTerminationCriteria(final int maxIterations, final double termEPS) { if (maxIterations > 0) { if (termEPS > 0) return new TermCriteria(TermCriteria.COUNT + TermCriteria.EPS, maxIterations, termEPS); else return new TermCriteria(TermCriteria.COUNT, maxIterations, 0); } else if (termEPS > 0) return new TermCriteria(TermCriteria.EPS, 50, termEPS); return null; }
From source file:qupath.opencv.classify.RTreesClassifier.java
License:Open Source License
@Override protected void setPredictedClass(final RTrees classifier, final List<PathClass> pathClasses, final Mat samples, final Mat results, final PathObject pathObject) { if (pathClasses.size() == 2 && termCriteria != null && ((TermCriteria.EPS & termCriteria.type) == 0) && termCriteria.maxCount > 0) { double prediction = classifier.predict(samples, results, RTrees.PREDICT_SUM) / termCriteria.maxCount; int index = (int) Math.round(prediction); // Round the prediction // Convert to a probability based on the number of trees double probability = prediction; if (index == 0) probability = 1 - probability; // Set the class & probability PathClass pathClass = pathClasses.get(index); pathObject.setPathClass(pathClass, probability); } else//w w w .jav a2 s . c om super.setPredictedClass(classifier, pathClasses, samples, results, pathObject); }
From source file:se.hb.jcp.bindings.opencv.ClassifierBase.java
License:Open Source License
protected TermCriteria readTerminationCriteria() { if (_jsonParameters.has("termination_criteria")) { JSONObject termination = _jsonParameters.getJSONObject("termination_criteria"); int criteria = 0; int max_iter = 0; double epsilon = 0.0; if (termination.has("max_count")) { criteria += TermCriteria.MAX_ITER; max_iter = termination.getInt("max_iter"); }/* w w w . java2 s .c o m*/ if (termination.has("epsilon")) { criteria += TermCriteria.EPS; epsilon = termination.getDouble("epsilon"); } return new TermCriteria(criteria, max_iter, epsilon); } else { return null; } }
From source file:se.hb.jcp.bindings.opencv.SVMClassifier.java
License:Open Source License
private CvSVMParams readParameters() { CvSVMParams parameters = new CvSVMParams(); // Default SVM parameters. parameters = new CvSVMParams(); parameters.set_svm_type(CvSVM.C_SVC); parameters.set_kernel_type(CvSVM.RBF); parameters.set_degree(3);/*from www.j av a2 s . c o m*/ parameters.set_gamma(1.0 / 2); // FIXME: Should be 1/#classes parameters.set_coef0(0); parameters.set_nu(0.5); parameters.set_C(1); parameters.set_term_crit(new TermCriteria(TermCriteria.EPS, 0, 1e-3)); parameters.set_p(0.1); if (_jsonParameters != null) { if (_jsonParameters.has("svm_type")) { String type = _jsonParameters.getString("svm_type"); if (type.equals("C_SVC")) { parameters.set_svm_type(CvSVM.C_SVC); } else if (type.equals("NU_SVC")) { parameters.set_svm_type(CvSVM.NU_SVC); } else if (type.equals("ONE_CLASS")) { parameters.set_svm_type(CvSVM.ONE_CLASS); } else if (type.equals("EPSILON_SVR")) { parameters.set_svm_type(CvSVM.EPS_SVR); } else if (type.equals("NU_SVR")) { parameters.set_svm_type(CvSVM.NU_SVR); } else { throw new IllegalArgumentException( "se.hb.jcp.bindings.opencv.SVMClassifier: " + "Unknown svm_type '" + type + "'."); } } if (_jsonParameters.has("kernel_type")) { String type = _jsonParameters.getString("kernel_type"); if (type.equals("LINEAR")) { parameters.set_kernel_type(CvSVM.LINEAR); } else if (type.equals("POLY")) { parameters.set_kernel_type(CvSVM.POLY); } else if (type.equals("RBF")) { parameters.set_kernel_type(CvSVM.RBF); } else if (type.equals("SIGMOID")) { parameters.set_kernel_type(CvSVM.SIGMOID); } else { throw new IllegalArgumentException( "se.hb.jcp.bindings.opencv.SVMClassifier: " + "Unknown kernel_type '" + type + "'."); } } if (_jsonParameters.has("degree")) { parameters.set_degree(_jsonParameters.getDouble("degree")); } if (_jsonParameters.has("gamma")) { parameters.set_gamma(_jsonParameters.getDouble("gamma")); } if (_jsonParameters.has("coef0")) { parameters.set_coef0(_jsonParameters.getDouble("coef0")); } if (_jsonParameters.has("C")) { parameters.set_C(_jsonParameters.getDouble("C")); } if (_jsonParameters.has("nu")) { parameters.set_nu(_jsonParameters.getDouble("nu")); } if (_jsonParameters.has("p")) { parameters.set_p(_jsonParameters.getDouble("p")); } TermCriteria criteria = readTerminationCriteria(); if (criteria != null) { parameters.set_term_crit(criteria); } } return parameters; }
From source file:syncleus.dann.data.video.LKTracker.java
License:Apache License
public LKTracker() { termCriteria = new TermCriteria(TermCriteria.COUNT + TermCriteria.EPS, MAX_COUNT, EPSILON); }