List of usage examples for org.opencv.core Point Point
public Point(double x, double y)
From source file:de.hu_berlin.informatik.spws2014.ImagePositionLocator.TriangleImagePositionLocator.java
License:Open Source License
/** * Builds ProjectionTriangles from triangulated markers. * Requires OpenCV!/*from ww w. j ava2 s .c om*/ */ public void newMarkerAdded(List<Marker> markers) { if (markers.size() < 2) return; if (markers.size() == 2) { //Guess third marker projs = new ArrayList<ProjectionTriangle>(); projs.add(new ProjectionTriangle(markers.get(0), markers.get(1))); } else { Subdiv2D subdiv = new Subdiv2D(); subdiv.initDelaunay(new Rect(0, 0, imageSize.x, imageSize.y)); for (Marker m : markers) System.out.println("-> " + m.realpoint.longitude + " / " + m.realpoint.latitude); for (Marker m : markers) subdiv.insert(new Point(m.imgpoint.x, m.imgpoint.y)); MatOfFloat6 mafloat = new MatOfFloat6(); subdiv.getTriangleList(mafloat); float[] tmparray = mafloat.toArray(); ArrayList<ProjectionTriangle> tmplist = new ArrayList<ProjectionTriangle>(); for (int i = 0; i < tmparray.length; i += 6) { Marker m1 = findMarkerByPoint(markers, tmparray[i], tmparray[i + 1]); Marker m2 = findMarkerByPoint(markers, tmparray[i + 2], tmparray[i + 3]); Marker m3 = findMarkerByPoint(markers, tmparray[i + 4], tmparray[i + 5]); if (m1 != null && m2 != null && m3 != null) tmplist.add(new ProjectionTriangle(m1, m2, m3, settings.getMaxDissimilarityPercent(), settings.getBadTriWeightPenalty(), settings.getMinTriAngleSize())); } for (ProjectionTriangle mainPt : tmplist) { for (ProjectionTriangle subPt : tmplist) { if (mainPt != subPt) mainPt.tryAddToProjGroup(subPt); } } projs = tmplist; } }
From source file:de.hu_berlin.informatik.spws2014.mapever.entzerrung.CornerDetector.java
License:Open Source License
/** * Finds the intersection point of 2 given lines. Uses a very ugly formula stolen from Wikipedia, feel free to * improve the code below ;-)/*from w ww . j av a 2 s. co m*/ * Doesnt do proper error handling and returns {-1,-1} with parallel lines. * Yes, I know its aweful, i was tired and not necessarily extremly motivated ;-) * * @param l0 The first line, that is two points on it, saved like this: {x0,y0,x1,y1} * @param l1 The second line, that is two points on it, saved like this: {x0,y0,x1,y1} * @return The intersection point between those lines as an OpenCV Point **/ private static Point find_intercept_point(double[] l0, double[] l1) { double denominator = (l0[0] - l0[2]) * (l1[1] - l1[3]) - (l0[1] - l0[3]) * (l1[0] - l1[2]); if (denominator == 0) return new Point(-1, -1); double l0_factor = l0[0] * l0[3] - l0[1] * l0[2]; double l1_factor = l1[0] * l1[3] - l1[1] * l1[2]; double x = (l0_factor * (l1[0] - l1[2]) - l1_factor * (l0[0] - l0[2])) / denominator; double y = (l0_factor * (l1[1] - l1[3]) - l1_factor * (l0[1] - l0[3])) / denominator; // ugly as hell ;_; return new Point(x, y); }
From source file:de.vion.eyetracking.cameracalib.calibration.opencv.CameraCalibrator.java
private void renderFrame(Mat rgbaFrame) { drawPoints(rgbaFrame);/*from ww w.j av a 2 s . c o m*/ Core.putText(rgbaFrame, "Captured: " + this.mCornersBuffer.size(), new Point(rgbaFrame.cols() / 3 * 2, rgbaFrame.rows() * 0.1), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 0)); }
From source file:digimesh.xbee.gui.SensorMap.java
private void drawSensor(SmartSensor sensor) { Point sensorLocation = new Point(sensor.getPositionXY().positionX - 50, sensor.getPositionXY().positionY + 30); int measNr = sensor.getMeasurmentToDraw(); String id = sensor.getId();/* w ww.jav a2s .c om*/ String sTextName = "Sensor ID : " + id.substring(id.length() - 4, id.length()); int fontFace = Core.FONT_HERSHEY_PLAIN; double fontScale = 0.7; Imgproc.putText(map, sTextName, sensorLocation, fontFace, fontScale, Scalar.all(255)); Scalar circleColor = new Scalar(255, 255, 255); if (sensor.hasMeasurements) { double value = sensor.m_measurments.get(measNr).value; String measName = sensor.m_measurments.get(measNr).name; String unit = sensor.m_measurments.get(measNr).unit; String sTextMeas = measName + " = " + value + "[" + unit + "]"; Imgproc.putText(map, sTextMeas, new Point(sensor.getPositionXY().positionX - 50, (sensor.getPositionXY().positionY + 50)), fontFace, fontScale, Scalar.all(255)); double upperLimit = sensor.m_measurments.get(measNr).upperLimit; double lowerLimit = sensor.m_measurments.get(measNr).lowerLimit; //check value if (value > upperLimit) { circleColor = new Scalar(0, 0, 255); } else if (value < lowerLimit) { circleColor = new Scalar(255, 0, 0); } else { circleColor = new Scalar(0, 255, 0); } } else { Imgproc.putText(map, HUB_LABEL, new Point(sensor.getPositionXY().positionX - 50, (sensor.getPositionXY().positionY + 12)), fontFace, fontScale, Scalar.all(255)); } Imgproc.circle(map, sensorLocation, 10, circleColor, 2, fontFace, 0); }
From source file:digitalassistant.Panel.java
public void face_detect(Mat image) { //System.out.println("\nRunning DetectFaceDemo"); // Create a face detector from the cascade file in the resources // directory. String test1 = getClass().getResource("lbpcascade_frontalface.xml").getPath(); test1 = test1.replace("/C:", "C:"); CascadeClassifier faceDetector = new CascadeClassifier(test1); /* String test=getClass().getResource("lena.png").getPath(); test = test.replace("/C:", "C:");//w ww. j a v a 2 s . co m System.out.println(test); Mat image = Highgui.imread(test);*/ // Detect faces in the image. // MatOfRect is a special container class for Rect. MatOfRect faceDetections = new MatOfRect(); faceDetector.detectMultiScale(image, faceDetections); // System.out.println(String.format("Detected %s faces", faceDetections.toArray().length)); // Draw a bounding box around each face. for (Rect rect : faceDetections.toArray()) { Core.rectangle(image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0)); } // Save the visualized detection. // String filename = "faceDetection.png"; // System.out.println(String.format("Writing %s", filename)); // Highgui.imwrite(filename, image); }
From source file:Domain.ImgProcess.java
public Mat DetectarFace(Canvas c, Mat image) throws IOException { System.out.println("Rodando DetectFace"); CascadeClassifier faceDetector = new CascadeClassifier( "C:\\Users\\Gertrude\\Documents\\NetBeansProjects\\OWL_Sight\\src\\Resources\\lbpcascade_frontalface.xml"); // seleciona o classificador para identificao da face MatOfRect faceDetections = new MatOfRect(); //matriz de rectangulos representando os rosto encontrados faceDetector.detectMultiScale(image, faceDetections);//detecta multiplas faces na imagem fornecida System.out.printf("Detected %s faces", faceDetections.toArray().length); Mat corte = null;//from www. j ava 2 s . c o m for (Rect rect : faceDetections.toArray()) { Core.rectangle(image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 0, 0));// desenha os rectangulos // Highgui.imwrite("C:/TCC-FacialRecognize/src/resources/TESTECORTE.jpg", utl.corte(image, rect)); corte = new Mat(image, rect); } Size sz = new Size(120, 120); MatOfByte bytemat = new MatOfByte(); //transformando a matriz image em uma matriz de bytes para carga no canvas do form principal Highgui.imencode(".jpg", corte, bytemat); byte[] bytes = bytemat.toArray(); InputStream in = new ByteArrayInputStream(bytes); BufferedImage render = ImageIO.read(in); Graphics g = c.getGraphics(); g.drawImage(render, 1, 1, c.getHeight(), c.getWidth(), c); return corte; }
From source file:dr.Interface.java
private void detect_BtnMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_detect_BtnMouseClicked if (file != null) { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); CascadeClassifier faceDetector = new CascadeClassifier(HAAR_FILE_PATH); CascadeClassifier eyeDetector = new CascadeClassifier("src//dr//haarcascade_eye.xml"); Mat image = Highgui.imread(file.getAbsolutePath()); MatOfRect faceDetections = new MatOfRect(); MatOfRect eyeDetections = new MatOfRect(); faceDetector.detectMultiScale(image, faceDetections, scale, minN, 0, min, max); for (Rect rect : faceDetections.toArray()) { Core.rectangle(image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0)); }//from www.j av a 2 s. c om if (eye_detection.isSelected()) { eyeDetector.detectMultiScale(image, eyeDetections, e_scale, e_minN, 0, e_min, e_max); for (Rect rect : eyeDetections.toArray()) { Core.circle(image, new Point(rect.x + rect.width / 2, rect.y + rect.width / 2), rect.width / 2, new Scalar(255, 0, 255)); Core.circle(image, new Point(rect.x + rect.width / 2, rect.y + rect.width / 2), rect.width / 2 + 1, new Scalar(255, 0, 255)); Core.circle(image, new Point(rect.x + rect.width / 2, rect.y + rect.width / 2), rect.width / 2 + 1, new Scalar(255, 0, 255)); } } String filename = file.getAbsolutePath().substring(0, file.getAbsolutePath().lastIndexOf("\\")) + "\\out.png"; Highgui.imwrite(filename, image); BufferedImage i; try { if ((i = ImageIO.read(new File(filename))) != null) { current_image = i; loadImage_Lbl.setIcon(new ImageIcon(resize_image(i))); } else { JOptionPane.showMessageDialog(this, "Nu s-a detectat nimic!", "Info", JOptionPane.INFORMATION_MESSAGE); } } catch (IOException ex) { Logger.getLogger(Interface.class.getName()).log(Level.SEVERE, null, ex); } } }
From source file:drawing.application.FaceDetection.java
License:Open Source License
@Override public void run() { CascadeClassifier faceDetector = new CascadeClassifier("lbpcascade_frontalface.xml"); Mat image = Imgcodecs.imread(filePath); MatOfRect faceDetections = new MatOfRect(); faceDetector.detectMultiScale(image, faceDetections); for (Rect rect : faceDetections.toArray()) { Imgproc.rectangle(image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0)); }//from ww w . jav a 2 s .co m System.out.println(String.format("Writing %s", filePath)); Imgcodecs.imwrite(filePath, image); int numFaces = faceDetections.toArray().length; JOptionPane.showMessageDialog(null, "Detected " + faceDetections.toArray().length + (numFaces == 1 ? " face" : " faces")); }
From source file:edu.fiu.cate.breader.BaseSegmentation.java
/** * Finds the bounding box for the book on the stand using * the high resolution image./*from w w w. j av a2s . c o m*/ * @param src- High Resolution image of the book * @return Rectangle delineating the book */ public Rect highRes(Mat src) { Mat dst = src.clone(); Imgproc.blur(src, dst, new Size(100.0, 100.0), new Point(-1, -1), Core.BORDER_REPLICATE); Imgproc.threshold(dst, dst, 0, 255, Imgproc.THRESH_BINARY_INV + Imgproc.THRESH_OTSU); Imgproc.Canny(dst, dst, 50, 200, 3, false); List<MatOfPoint> contours = new LinkedList<>(); Mat hierarchy = new Mat(); Imgproc.findContours(dst, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0)); Mat color = new Mat(); Imgproc.cvtColor(src, color, Imgproc.COLOR_GRAY2BGR); for (int k = 0; k < contours.size(); k++) { byte[] vals = ITools.getHeatMapColor((float) k / (float) contours.size()); Imgproc.drawContours(color, contours, k, new Scalar(vals[0], vals[1], vals[2]), 8); } new IViewer("HighRes Contours ", BReaderTools.bufferedImageFromMat(color)); Point center = new Point(src.cols() / 2, src.rows() / 2); //Check hierarchy tree int[] res = polySearch(center, hierarchy, contours, 0); while (res[0] != 1 && res[2] != -1) { res = polySearch(center, hierarchy, contours, res[2]); if (res[0] == 1) break; } MatOfInt tHull = new MatOfInt(); int index = 0; if (res[1] != -1) { index = res[1]; } Imgproc.convexHull(contours.get(index), tHull); //get bounding box MatOfPoint cont = contours.get(index); Point[] points = new Point[tHull.rows()]; for (int i = 0; i < tHull.rows(); i++) { int pIndex = (int) tHull.get(i, 0)[0]; points[i] = new Point(cont.get(pIndex, 0)); } Rect out = Imgproc.boundingRect(new MatOfPoint(points)); return out; }
From source file:edu.fiu.cate.breader.BaseSegmentation.java
/** * Finds the bounding box for the book on the stand using * the depth average image./* w w w. j ava 2 s . co m*/ * @param src- The Depth average image * @return Rectangle delineating the book */ public Rect lowResDist(Mat src) { Mat dst = src.clone(); Imgproc.blur(src, dst, new Size(5, 5), new Point(-1, -1), Core.BORDER_REPLICATE); // Imgproc.threshold(dst, dst, 0,255,Imgproc.THRESH_BINARY_INV+Imgproc.THRESH_OTSU); Imgproc.Canny(dst, dst, 50, 200, 3, false); // Canny(src, dst, 20, 60, 3); List<MatOfPoint> contours = new LinkedList<>(); Mat hierarchy = new Mat(); /// Find contours Imgproc.findContours(dst, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0)); Mat color = new Mat(); Imgproc.cvtColor(src, color, Imgproc.COLOR_GRAY2BGR); for (int k = 0; k < contours.size(); k++) { byte[] vals = ITools.getHeatMapColor((float) k / (float) contours.size()); Imgproc.drawContours(color, contours, k, new Scalar(vals[0], vals[1], vals[2]), 1); } new IViewer("LowRes Contours ", BReaderTools.bufferedImageFromMat(color)); for (int k = 0; k < contours.size(); k++) { MatOfPoint2f tMat = new MatOfPoint2f(); Imgproc.approxPolyDP(new MatOfPoint2f(contours.get(k).toArray()), tMat, 5, true); contours.set(k, new MatOfPoint(tMat.toArray())); } List<Point> points = new LinkedList<Point>(); for (int i = 0; i < contours.size(); i++) { points.addAll(contours.get(i).toList()); } MatOfInt tHull = new MatOfInt(); Imgproc.convexHull(new MatOfPoint(points.toArray(new Point[points.size()])), tHull); //get bounding box Point[] tHullPoints = new Point[tHull.rows()]; for (int i = 0; i < tHull.rows(); i++) { int pIndex = (int) tHull.get(i, 0)[0]; tHullPoints[i] = points.get(pIndex); } Rect out = Imgproc.boundingRect(new MatOfPoint(tHullPoints)); return out; }