List of usage examples for org.opencv.core Point Point
public Point(double x, double y)
From source file:by.zuyeu.deyestracker.core.detection.model.DetectFaceSample.java
public DetectFaceSample(final boolean fillWithZero) { face = new Rect(0, 0, 0, 0); leftEye = new Rect(0, 0, 0, 0); rightEye = new Rect(0, 0, 0, 0); leftPupil = new Point(0, 0); rightPupil = new Point(0, 0); }
From source file:by.zuyeu.deyestracker.core.detection.tracker.ScreenPointBorder.java
public Point getCenter() { return new Point(getWidth() / 2, getHeight() / 2); }
From source file:by.zuyeu.deyestracker.core.detection.tracker.ScreenPointTracker.java
private Point defineMedian(DetectFaceSample s1, DetectFaceSample s2) { final Point p1 = defineMedian(s1); final Point p2 = defineMedian(s2); LOG.debug("p1 = {}, p2 = {}", p1, p2); return new Point((p1.x + p2.x) / 2, (p1.y + p2.y) / 2); }
From source file:by.zuyeu.deyestracker.core.detection.tracker.ScreenPointTracker.java
private Point defineMedian(DetectFaceSample sample) { double pupilXMedian = (sample.getLeftPupil().x + sample.getRightPupil().x) / 2; double pupilYMedian = (sample.getLeftPupil().y + sample.getRightPupil().y) / 2; double eyesXMedian = (sample.getLeftEye().x + sample.getLeftEye().width / 2 + sample.getRightEye().x + sample.getRightEye().width / 2) / 2; double eyesYMedian = (sample.getLeftEye().y + sample.getLeftEye().height / 2 + sample.getRightEye().y + sample.getRightEye().height / 2) / 2; LOG.debug("pupilXMedian = {}, eyesXMedian = {}", pupilXMedian, eyesXMedian); LOG.debug("pupilYMedian = {}, eyesYMedian = {}", pupilYMedian, eyesYMedian); return new Point((pupilXMedian + eyesXMedian) / 2, (pupilYMedian + eyesYMedian) / 2); }
From source file:by.zuyeu.deyestracker.core.detection.tracker.ScreenPointTracker.java
private Point calculateViewPointBoundToBorders(Point nextMedian) { LOG.debug("calculateViewPointBoundToBorders() - start;"); // shift from left border double width = pointBorder.getWidth(); double xShift = nextMedian.x - pointBorder.getLeftX(); width = Math.max(width, xShift); double viewX = screenSize.width / width * xShift; //shift from top border double height = pointBorder.getHeight(); double yShift = nextMedian.y - pointBorder.getTopY(); height = Math.max(height, yShift); double viewY = screenSize.height / height * yShift; Point screenViewPoint = new Point(viewX, viewY); LOG.debug("calculateViewPointBoundToBorders() - end: point = {}", screenViewPoint); return screenViewPoint; }
From source file:by.zuyeu.deyestracker.core.util.CVCoreUtils.java
public static void fixRectTLFromSubmat(Rect[] eyes, Rect subRect) { final Point shiftPoint = subRect != null ? subRect.tl() : new Point(0, 0); Arrays.stream(eyes).forEach(p -> { p.x += shiftPoint.x;/*from w w w.j av a 2 s . c om*/ p.y += shiftPoint.y; }); }
From source file:car_counter.counting.opencv.OpencvCarCounter.java
License:Apache License
@Override public Collection<DetectedVehicle> processVideo(Path video, DateTime startDateTime) { CascadeClassifier carDetector = new CascadeClassifier("/Users/luke/working/car-counter/data/cars3.xml"); VideoCapture videoCapture = new VideoCapture(); videoCapture.open("/Users/luke/working/car-counter/data/video1.m4v"); int index = 0; while (true) { if (!videoCapture.read(image)) { break; }/* w ww. j ava 2 s . c o m*/ System.out.print("."); //processFrame(); MatOfRect carDetections = new MatOfRect(); carDetector.detectMultiScale(image, carDetections); System.out.println(String.format("Detected %s cars", carDetections.toArray().length)); // Draw a bounding box around each hit for (Rect rect : carDetections.toArray()) { Core.rectangle(image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0)); } String file = String.format("/Users/luke/working/car-counter/data/out/out-%03d.jpg", index++); org.opencv.highgui.Highgui.imwrite(file, image); } return null; }
From source file:ch.hslu.pren.t37.camera.BildAuswertungKorb.java
public int bildAuswerten() { //Bild in dem gesucht werden soll String inFile = "../camera.jpg"; //das Bild dass im infile gesucht wird String templateFile = "../Bilder/korb.jpg"; //Lsung wird in diesem Bild prsentiert String outFile = "../LoesungsBild.jpg"; //berprfungswert wird gesetzt int match_method = Imgproc.TM_CCOEFF_NORMED; //das original Bild und das zu suchende werden geladen Mat img = Highgui.imread(inFile, Highgui.CV_LOAD_IMAGE_COLOR); Mat templ = Highgui.imread(templateFile, Highgui.CV_LOAD_IMAGE_COLOR); // Lsungsmatrix generieren int result_cols = img.cols() - templ.cols() + 1; int result_rows = img.rows() - templ.rows() + 1; Mat result = new Mat(result_rows, result_cols, CvType.CV_32FC1); // Suchen und normalisieren Imgproc.matchTemplate(img, templ, result, match_method); Core.normalize(result, result, 0, 1, Core.NORM_MINMAX, -1, new Mat()); // Mit MinMax Logik wird der beste "Match" gesucht Core.MinMaxLocResult mmr = Core.minMaxLoc(result); Point matchLoc;// w w w. ja v a2s. c om if (match_method == Imgproc.TM_SQDIFF || match_method == Imgproc.TM_SQDIFF_NORMED) { matchLoc = mmr.minLoc; } else { matchLoc = mmr.maxLoc; } // Darstellen Core.rectangle(img, matchLoc, new Point(matchLoc.x + templ.cols(), matchLoc.y + templ.rows()), new Scalar(0, 255, 0), 10); // Alle 4 Eckpunkte speichern Point topLeft = new Point(matchLoc.x, matchLoc.y); Point topRight = new Point(matchLoc.x + templ.cols(), matchLoc.y); Point downLeft = new Point(matchLoc.x, matchLoc.y + templ.rows()); Point downRight = new Point(matchLoc.x + templ.cols(), matchLoc.y + templ.rows()); // Lsungsbild speichern Highgui.imwrite(outFile, img); //Mittelpunkt berechnen double mittePicture; double mitteKorb; double differnez; Mat sol = Highgui.imread(outFile, Highgui.CV_LOAD_IMAGE_COLOR); mittePicture = sol.width() / 2; mitteKorb = (topRight.x - topLeft.x) / 2; mitteKorb = topLeft.x + mitteKorb; differnez = mitteKorb - mittePicture; logger.log(PrenLogger.LogLevel.DEBUG, "Mitte Korb: " + mitteKorb); logger.log(PrenLogger.LogLevel.DEBUG, "Mitte Bild: " + mittePicture); logger.log(PrenLogger.LogLevel.DEBUG, "Differenz: " + differnez + "\nWenn Differnez negativ, nach rechts drehen"); return (int) differnez; }
From source file:ch.zhaw.facerecognitionlibrary.Helpers.MatOperation.java
License:Open Source License
public static Point drawRectangleOnPreview(Mat img, Rect face, boolean front_camera) { if (front_camera) { int topLeftX = (int) (img.cols() - (face.tl().x + face.width)); int bottomRightX = (int) (img.cols() - (face.br().x) + face.width); Point tl = new Point(topLeftX, face.tl().y); Point br = new Point(bottomRightX, face.br().y); Imgproc.rectangle(img, tl, br, FACE_RECT_COLOR, THICKNESS); return tl; } else {/*from w w w .j av a 2s. c om*/ Imgproc.rectangle(img, face.tl(), face.br(), FACE_RECT_COLOR, THICKNESS); return face.tl(); } }
From source file:ch.zhaw.facerecognitionlibrary.Helpers.MatOperation.java
License:Open Source License
public static Rect[] rotateFaces(Mat img, Rect[] faces, int angle) { Point center = new Point(img.cols() / 2, img.rows() / 2); Mat rotMat = Imgproc.getRotationMatrix2D(center, angle, 1); rotMat.convertTo(rotMat, CvType.CV_32FC1); float scale = img.cols() / img.rows(); for (Rect face : faces) { Mat m = new Mat(3, 1, CvType.CV_32FC1); m.put(0, 0, face.x);//from w w w . j av a 2 s . co m m.put(1, 0, face.y); m.put(2, 0, 1); Mat res = Mat.zeros(2, 1, CvType.CV_32FC1); Core.gemm(rotMat, m, 1, new Mat(), 0, res, 0); face.x = (int) res.get(0, 0)[0]; face.y = (int) res.get(1, 0)[0]; if (angle == 270 || angle == -90) { face.x = (int) (face.x * scale - face.width); face.x = face.x + face.width / 4; face.y = face.y + face.height / 4; } else if (angle == 180 || angle == -180) { face.x = face.x - face.width; face.y = face.y - face.height; } else if (angle == 90 || angle == -270) { face.y = (int) (face.y * scale - face.height); face.x = face.x - face.width / 4; face.y = face.y - face.height / 4; } } return faces; }