List of usage examples for org.opencv.core Point Point
public Point(double x, double y)
From source file:team492.GripPipeline.java
License:Open Source License
/** * This is the primary method that runs the entire pipeline and updates the * outputs./*from ww w . j a v a2s .c o m*/ */ public void process(Mat source0) { // Step HSV_Threshold0: Mat hsvThresholdInput = source0; double[] hsvThresholdHue = { 50.17985611510791, 99.69696969696967 }; double[] hsvThresholdSaturation = { 135.29676258992805, 255.0 }; double[] hsvThresholdValue = { 169.69424460431654, 255.0 }; hsvThreshold(hsvThresholdInput, hsvThresholdHue, hsvThresholdSaturation, hsvThresholdValue, hsvThresholdOutput); // Step CV_erode0: Mat cvErodeSrc = hsvThresholdOutput; Mat cvErodeKernel = new Mat(); Point cvErodeAnchor = new Point(-1, -1); double cvErodeIterations = 10.0; int cvErodeBordertype = Core.BORDER_CONSTANT; Scalar cvErodeBordervalue = new Scalar(-1); cvErode(cvErodeSrc, cvErodeKernel, cvErodeAnchor, cvErodeIterations, cvErodeBordertype, cvErodeBordervalue, cvErodeOutput); // Step Find_Contours0: Mat findContoursInput = cvErodeOutput; boolean findContoursExternalOnly = false; findContours(findContoursInput, findContoursExternalOnly, findContoursOutput); }
From source file:team492.GripPipeline.java
License:Open Source License
/** * Expands area of lower value in an image. * /*from w w w.j a v a2 s. c om*/ * @param src * the Image to erode. * @param kernel * the kernel for erosion. * @param anchor * the center of the kernel. * @param iterations * the number of times to perform the erosion. * @param borderType * pixel extrapolation method. * @param borderValue * value to be used for a constant border. * @param dst * Output Image. */ private void cvErode(Mat src, Mat kernel, Point anchor, double iterations, int borderType, Scalar borderValue, Mat dst) { if (kernel == null) { kernel = new Mat(); } if (anchor == null) { anchor = new Point(-1, -1); } if (borderValue == null) { borderValue = new Scalar(-1); } Imgproc.erode(src, dst, kernel, anchor, (int) iterations, borderType, borderValue); }
From source file:templatematching.ProcessFrame.java
public Image processFrame(VideoCapture capture) { while (k < 2) { capture.grab();/* w w w . j av a 2s . co m*/ capture.retrieve(frame); capture.retrieve(frame); //treat each frame of the capture individually // retives the grabbed frame into Mat obj int frame_width = frame.rows(); int frame_height = frame.cols(); MatOfRect faces1 = new MatOfRect(); Mat frame_gray = new Mat(); Mat ImageROI; //change the frame to gray-scale Imgproc.cvtColor(frame, frame_gray, Imgproc.COLOR_BGR2GRAY);//gray scale conversion //use histogram equilization //Imgproc.equalizeHist(frame_gray, frame_gray ); //use the face classifie faceHaar.detectMultiScale(frame_gray, faces1, 1.1, 2, 2, new Size(30, 30), new Size()); Rect[] faces = faces1.toArray(); for (int i = 0; i < faces.length; i++) { // System.out.println("Processing faces"); Point center = new Point(faces[i].x + faces[i].width * 0.5, faces[i].y + faces[i].height * 0.5); Imgproc.ellipse(frame, center, new Size(faces[i].width * 0.5, faces[i].height * 0.5), 0, 0, 360, new Scalar(0, 0, 255), 4, 8, 0); Mat faceROI = frame_gray.submat(faces[i]); MatOfRect eyes1 = new MatOfRect(); eyesHaar.detectMultiScale(faceROI, eyes1, 1.15, 2, 2, new Size(30, 30), new Size()); //eyesHaar.detectMultiScale(faceROI, eyes1, 1.1, 2, Objdetect.CASCADE_FIND_BIGGEST_OBJECT|Objdetect.CASCADE_SCALE_IMAGE, new Size(30,30),new Size()); Rect[] eyes = eyes1.toArray(); // System.out.println("Processing eyes"); for (int j = 0; j < eyes.length; j++) { Mat eyeROI = frame_gray.submat(eyes[i]); Point center1 = new Point(faces[i].x + eyes[j].x + eyes[j].width * 0.5, faces[i].y + eyes[j].y + eyes[j].height * 0.5); int radius = (int) ((eyes[j].width + eyes[j].height) * 0.005); Imgproc.circle(frame, center1, radius, new Scalar(255, 0, 0), 4, 8, 0); Pupilx = (int) center1.x; Pupily = (int) center1.y; ROIwidth = eyes[j].width; ROIheight = eyes[j].height; Point centerX[] = new Point[2]; centerX[k] = center1; //performing the scaling of the coordintaes to fir to the screen dimensions if (k == 0) { scaledPupilx = Pupilx; scaledPupily = Pupily; k++; } else { System.out.println("In else part"); deltax = (int) Math.abs((centerX[k].x - centerX[k - 1].x)); deltay = (int) Math.abs((centerX[k].y - centerX[k - 1].y)); scaled_deltax = (deltax * (65535 / ROIwidth)); scaled_deltay = (deltay * (65535 / ROIheight)); scaledPupilx = centerX[k - 1].x + scaled_deltax; scaledPupily = centerX[k - 1].y + scaled_deltay; } if (k == 2) k = 0; //set the cursor position to the scaled coordinates try { Robot robot = new Robot(); robot.mouseMove((int) (1366 - scaledPupilx), (int) (768 - scaledPupily)); } catch (AWTException ex) { } } //define a window for displaying the frame //release window if any key is hit } MatOfByte mem = new MatOfByte(); Imgcodecs.imencode(".bmp", frame, mem); Image im = null; try { im = ImageIO.read(new ByteArrayInputStream(mem.toArray())); } catch (IOException ex) { ex.printStackTrace(); } return im; } return null; }
From source file:trclib.TrcOpenCvDetector.java
License:Open Source License
/** * This method is called to overlay rectangles on an image to the video output. * * @param image specifies the frame to be rendered to the video output. * @param detectedObjectRects specifies the detected object rectangles. * @param color specifies the color of the rectangle outline. * @param thickness specifies the thickness of the rectangle outline. *///from www. java 2 s. co m public void drawRectangles(Mat image, Rect[] detectedObjectRects, Scalar color, int thickness) { // // Overlay a rectangle on each detected object. // synchronized (image) { if (detectedObjectRects != null) { for (Rect r : detectedObjectRects) { // // Draw a rectangle around the detected object. // Imgproc.rectangle(image, new Point(r.x, r.y), new Point(r.x + r.width, r.y + r.height), color, thickness); } } videoSource.putFrame(image); } }
From source file:tv.danmaku.ijk.media.example.activities.VideoActivity.java
License:Apache License
public Mat onCameraFrame(CvCameraViewFrame inputFrame) { mRgba = inputFrame.rgba();// w w w.ja va 2s .co m mGray = inputFrame.gray(); // return mRgba; // iThreshold = 10000; //Imgproc.blur(mRgba, mRgba, new Size(5,5)); Imgproc.GaussianBlur(mRgba, mRgba, new org.opencv.core.Size(3, 3), 1, 1); //Imgproc.medianBlur(mRgba, mRgba, 3); if (!mIsColorSelected) return mRgba; List<MatOfPoint> contours = mDetector.getContours(); mDetector.process(mRgba); Log.d(TAG, "Contours count: " + contours.size()); if (contours.size() <= 0) { return mRgba; } RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours.get(0).toArray())); double boundWidth = rect.size.width; double boundHeight = rect.size.height; int boundPos = 0; for (int i = 1; i < contours.size(); i++) { rect = Imgproc.minAreaRect(new MatOfPoint2f(contours.get(i).toArray())); if (rect.size.width * rect.size.height > boundWidth * boundHeight) { boundWidth = rect.size.width; boundHeight = rect.size.height; boundPos = i; } } Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours.get(boundPos).toArray())); Imgproc.rectangle(mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0); Log.d(TAG, " Row start [" + (int) boundRect.tl().y + "] row end [" + (int) boundRect.br().y + "] Col start [" + (int) boundRect.tl().x + "] Col end [" + (int) boundRect.br().x + "]"); int rectHeightThresh = 0; double a = boundRect.br().y - boundRect.tl().y; a = a * 0.7; a = boundRect.tl().y + a; Log.d(TAG, " A [" + a + "] br y - tl y = [" + (boundRect.br().y - boundRect.tl().y) + "]"); //Core.rectangle( mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR, 2, 8, 0 ); Imgproc.rectangle(mRgba, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0); MatOfPoint2f pointMat = new MatOfPoint2f(); Imgproc.approxPolyDP(new MatOfPoint2f(contours.get(boundPos).toArray()), pointMat, 3, true); contours.set(boundPos, new MatOfPoint(pointMat.toArray())); MatOfInt hull = new MatOfInt(); MatOfInt4 convexDefect = new MatOfInt4(); Imgproc.convexHull(new MatOfPoint(contours.get(boundPos).toArray()), hull); if (hull.toArray().length < 3) return mRgba; Imgproc.convexityDefects(new MatOfPoint(contours.get(boundPos).toArray()), hull, convexDefect); List<MatOfPoint> hullPoints = new LinkedList<MatOfPoint>(); List<Point> listPo = new LinkedList<Point>(); for (int j = 0; j < hull.toList().size(); j++) { listPo.add(contours.get(boundPos).toList().get(hull.toList().get(j))); } MatOfPoint e = new MatOfPoint(); e.fromList(listPo); hullPoints.add(e); List<MatOfPoint> defectPoints = new LinkedList<MatOfPoint>(); List<Point> listPoDefect = new LinkedList<Point>(); for (int j = 0; j < convexDefect.toList().size(); j = j + 4) { Point farPoint = contours.get(boundPos).toList().get(convexDefect.toList().get(j + 2)); Integer depth = convexDefect.toList().get(j + 3); if (depth > iThreshold && farPoint.y < a) { listPoDefect.add(contours.get(boundPos).toList().get(convexDefect.toList().get(j + 2))); } Log.d(TAG, "defects [" + j + "] " + convexDefect.toList().get(j + 3)); } MatOfPoint e2 = new MatOfPoint(); e2.fromList(listPo); defectPoints.add(e2); Log.d(TAG, "hull: " + hull.toList()); Log.d(TAG, "defects: " + convexDefect.toList()); Imgproc.drawContours(mRgba, hullPoints, -1, CONTOUR_COLOR, 3); int defectsTotal = (int) convexDefect.total(); Log.d(TAG, "Defect total " + defectsTotal); this.numberOfFingers = listPoDefect.size(); if (this.numberOfFingers > 5) this.numberOfFingers = 5; mHandler.post(mUpdateFingerCountResults); for (Point p : listPoDefect) { Imgproc.circle(mRgba, p, 6, new Scalar(255, 0, 255)); } return mRgba; }
From source file:udp.server.ObjectTracker.java
private ArrayList<Float> getTargetError() { ArrayList<Float> angles = null; int x = getX(contours); int y = getY(contours); contours.clear();// w ww.jav a2s . c om angles = new ArrayList<>(); if (x > 0) { Core.circle(webcam_image, new Point(x, y), 4, new Scalar(50, 49, 0, 255), 4); float centerX = webcam_image.width() / 2; float centerY = webcam_image.height() / 2; // CenterCirle Core.circle(webcam_image, new Point(centerX, centerY), 4, new Scalar(50, 49, 0, 255), 4); // Setup camera angles (from producer) float cameraAngleX = 60.0f; //70.42f; float cameraAngleY = 43.30f; // Calculate difference from x and y to center float pixErrorX = x - centerX; float pixErrorY = -y + centerY; // Calculate angle error in x and y direction angleErrorX = (pixErrorX / centerX) * cameraAngleX; angleErrorY = (pixErrorY / centerY) * cameraAngleY; Core.line(webcam_image, new Point(x, y), new Point(centerX, centerY), new Scalar(150, 150, 100)/*CV_BGR(100,10,10)*/, 3); //angles = new float[]{angleErrorX, angleErrorY}; //angles.add(angleErrorX); //angles.add(angleErrorY); } else { if (angleErrorX > 0) { angleErrorX = 255; angleErrorY = 255; } else { angleErrorX = -255; angleErrorY = -255; } } angles.add(angleErrorX); angles.add(angleErrorY); return angles; }
From source file:uk.ac.horizon.artcodes.drawer.MarkerThumbnailDrawer.java
License:Open Source License
@Override public Mat drawMarker(Marker marker, ArrayList<MatOfPoint> contours, Mat hierarchy, Rect boundingRect, Mat imageToDrawOn) {//from ww w .j a va 2 s . c o m if (imageToDrawOn == null) { if (boundingRect == null) { boundingRect = Imgproc.boundingRect(contours.get(marker.markerIndex)); } Mat output = new Mat(boundingRect.size(), CvType.CV_8UC4, BACKGROUND); Imgproc.drawContours(output, contours, marker.markerIndex, COLOR, Core.FILLED, Core.LINE_8, hierarchy, 2, new Point(-boundingRect.tl().x, -boundingRect.tl().y)); return output; } else { Imgproc.drawContours(imageToDrawOn, contours, marker.markerIndex, COLOR, Core.FILLED, Core.LINE_8, hierarchy, 2, new Point(0, 0)); return imageToDrawOn; } }
From source file:uom.research.thalassemia.logic.BloodCellData.java
private void process(final Mat currentSelection) { sgf = 0;//from w w w . j ava2 s. c om getMinMaxDiameter(currentSelection); if (minDiameter != 0) { //calculate shape geometric factor sgf = maxDiameter / minDiameter; } double[] circles; for (int a = 0; a < currentSelection.cols(); a++) { Map<Point, Double> points = getPallorBloodCellsPointList(); areaPreparation = 0; circles = currentSelection.get(0, a); x = circles[0]; y = circles[1]; r = Math.round(circles[2]); //get area value area = calculateArea(r); //get perimeter value perimeter = calculatePerimeter(r); //get diameter value diameter = calculateDiameter(area, perimeter); // calculate deviational value deviationValue = sgf / area; Point point = new Point(x, y); if (points.containsKey(point)) { areaPreparation = calculateArea(points.get(point)) / area; } getAbnormalCellTypes(); } }
From source file:uom.research.thalassemia.logic.BloodCellData.java
/** * get Pallor Blood cells point list.// w ww . j av a 2 s .com * * @return List<Point> List */ private Map<Point, Double> getPallorBloodCellsPointList() { double[] circles; Map<Point, Double> points = new HashMap<>(); double xx, yy, rr; for (int a = 0; a < pallorBloodCells.cols(); a++) { circles = pallorBloodCells.get(0, a); xx = circles[0]; yy = circles[1]; rr = Math.round(circles[2]); points.put(new Point(xx, yy), rr); } return points; }
From source file:uom.research.thalassemia.logic.BloodCellDataProcessor.java
/** * process circular blood cell data including both pallor and red blood * cells.//from w ww . jav a2 s . c o m * * @param currentSelection Mat of selected */ public void circularBloodCellsProcesser(final Mat currentSelection) { if (currentSelection != null) { if (currentSelection == circularBloodCells) { circularBloodCellsArray = new ArrayList<>(); } else if (currentSelection == pallorBloodCells) { pallorBloodCellsArray = new ArrayList<>(); } getMinMaxDiameter(currentSelection); double sgf = 0; if (minDiameter != 0) { //calculate shape geometric factor sgf = maxDiameter / minDiameter; } Map<Point, Double> points = getPallorBloodCellsPointList(); double[] circles; double x, y, r, area, perimeter, diameter, deviationValue, areaPreparation; for (int a = 0; a < currentSelection.cols(); a++) { areaPreparation = 0; circles = currentSelection.get(0, a); x = circles[0]; y = circles[1]; r = Math.round(circles[2]); //get area value area = calculateArea(r); //get perimeter value perimeter = calculatePerimeter(r); //get diameter value diameter = calculateDiameter(area, perimeter); // calculate deviational value deviationValue = sgf / area; Point point = new Point(x, y); if (points.containsKey(point)) { areaPreparation = calculateArea(points.get(point)) / area; } Object[] ob = { (a + 1), x, y, r, Validator.formatDouble(perimeter), Validator.formatDouble(area), Validator.formatDouble(diameter), Validator.formatDouble(deviationValue), Validator.formatDouble(areaPreparation) }; if (currentSelection == circularBloodCells) { totalBloodCellArea += area; circularBloodCellsArray.add(ob); } else if (currentSelection == pallorBloodCells) { totalPallarArea += area; pallorBloodCellsArray.add(ob); } else { totalEllipseArea += area; } } } }