List of usage examples for org.opencv.core Point Point
public Point(double x, double y)
From source file:com.astrocytes.core.operationsengine.CoreOperations.java
License:Open Source License
/** * Applies morphological dilation./* w ww. jav a 2s . c o m*/ * * @param src - source image. * @param radius - radius of structure element. * @return dilated image. */ public static Mat dilate(Mat src, int radius) { Mat dest = new Mat(); int kernelSize = radius * 2 + 1; Mat kernel = getStructuringElement(Imgproc.CV_SHAPE_ELLIPSE, new Size(kernelSize, kernelSize), new Point(radius, radius)); Imgproc.dilate(src, dest, kernel); return dest; }
From source file:com.astrocytes.core.operationsengine.CoreOperations.java
License:Open Source License
/** * Calculates an average intensity of pixels on image within specified contour. * * @param src - source image used for calculating an average intensity. * @param contour - a contour which is presented as some region of interest for operation. * @return a level of average intensity. *//* w w w.j a v a2s .c om*/ public static int averageIntensity(Mat src, MatOfPoint contour) { int averageIntensityWithinContour = 0; int quantityOfPixelsWithinContour = 0; Rect boundingRectangle = boundingRect(contour); for (int xCoord = (int) boundingRectangle.tl().x; xCoord <= (int) boundingRectangle.br().x; xCoord++) { for (int yCoord = (int) boundingRectangle.tl().y; yCoord <= (int) boundingRectangle.br().y; yCoord++) { if (pointPolygonTest(new MatOfPoint2f(contour.toArray()), new Point(xCoord, yCoord), false) > 0) { averageIntensityWithinContour += intensity(src, xCoord, yCoord); quantityOfPixelsWithinContour++; } } } if (quantityOfPixelsWithinContour == 0) { quantityOfPixelsWithinContour = 1; averageIntensityWithinContour = intensity(src, boundingRectangle.x, boundingRectangle.y); if (src.channels() == 1) { averageIntensityWithinContour = averageIntensityWithinContour > 127 ? 0 : 255; } } return averageIntensityWithinContour / quantityOfPixelsWithinContour; }
From source file:com.astrocytes.core.operationsengine.OperationsImpl.java
License:Open Source License
@Override public Mat applyMathMorphology(Integer radius) { Mat dest = new Mat(); int instrumentSize = radius * 2 + 1; Mat kernel = getStructuringElement(Imgproc.CV_SHAPE_ELLIPSE, new Size(instrumentSize, instrumentSize), new Point(radius, radius)); Imgproc.morphologyEx(currentImage, dest, MORPH_CLOSE, kernel, new Point(-1, -1), 1); dest.copyTo(currentImage);//from w w w .j a v a2 s . c o m dest.release(); return currentImage; }
From source file:com.astrocytes.core.operationsengine.OperationsImpl.java
License:Open Source License
private void detectAstrocytesOld(Mat source, Integer averageRectSize, Double averageArea, int intensity) { if (source.channels() == 3) { source = CoreOperations.grayscale(source); }/*w w w . jav a 2 s . com*/ astrocytesCenters = new ArrayList<>(); List<MatOfPoint> contoursAfterFirstIteration = new ArrayList<>(); Mat hierarchy = new Mat(); /* Step 1 */ findContours(source, contoursAfterFirstIteration, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_TC89_L1); for (MatOfPoint contour : contoursAfterFirstIteration) { Rect boundingRectangle = boundingRect(contour); Double contourArea = contourArea(contour); Double contourPerimeter = arcLength(new MatOfPoint2f(contour.toArray()), true); /* Step 2 */ if (averageArea - 160 <= contourArea /*&& contourArea <= averageArea + 10*/) { /* Step 3 */ if (((averageRectSize - 15 <= boundingRectangle.width) && (boundingRectangle.width <= averageRectSize + 15) || (averageRectSize - 15 <= boundingRectangle.height) && (boundingRectangle.height <= averageRectSize + 15)) && (boundingRectangle.width / (float) boundingRectangle.height < 1.8f) && (boundingRectangle.height / (float) boundingRectangle.width < 1.8f)) { /* Step 4 */ if (contourArea / (contourPerimeter * contourPerimeter) > 0.05 && contourArea / (contourPerimeter * contourPerimeter) < 0.30) { int averageIntensityWithinContour = CoreOperations.averageIntensity(sourceImage, contour); /* Step 5 */ if (averageIntensityWithinContour <= intensity + 20) { int xCoordOfAstrocyteCenter = (int) boundingRectangle.tl().x + boundingRectangle.width / 2; int yCoordOfAstrocyteCenter = (int) boundingRectangle.tl().y + boundingRectangle.height / 2; astrocytesCenters.add(new Point(xCoordOfAstrocyteCenter, yCoordOfAstrocyteCenter)); } } } } } }
From source file:com.astrocytes.core.operationsengine.OperationsImpl.java
License:Open Source License
@Deprecated private Mat drawLayerBounds() { if (layerBounds == null) { return sourceImage; }/*from w w w. j a v a2 s . co m*/ Mat result = sourceImage.clone(); for (int i = 0; i < layerBounds.rows(); i++) { Scalar color = new Scalar(250, 20, 18); if (i == 0 || i == layerBounds.rows() - 1) { color = new Scalar(18, 20, 250); } for (int j = 0; j < layerBounds.cols(); j++) { Imgproc.circle(result, new Point(j, layerBounds.get(i, j)[0]), 1, color, -1); } } return result; }
From source file:com.astrocytes.core.operationsengine.OperationsImpl.java
License:Open Source License
private List<Neuron> findNeuronsInStep(Mat source, int stepRadius) { List<Neuron> neurons = new ArrayList<Neuron>(); List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); Mat hierarchy = new Mat(); findContours(CoreOperations.grayscale(source), contours, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_TC89_L1); for (MatOfPoint contour : contours) { Rect boundingRectangle = boundingRect(contour); int xCenter = boundingRectangle.x + boundingRectangle.width / 2; int yCenter = boundingRectangle.y + boundingRectangle.height / 2; neurons.add(new Neuron(new Point(xCenter, yCenter), stepRadius)); }//from w ww .jav a 2 s. co m return neurons; }
From source file:com.astrocytes.core.operationsengine.OperationsImpl.java
License:Open Source License
private void findAstrocytes(Mat src) { astrocytesCenters = new ArrayList<Point>(); List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); Mat hierarchy = new Mat(); findContours(CoreOperations.grayscale(src), contours, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_TC89_L1); for (MatOfPoint contour : contours) { Rect boundingRectangle = boundingRect(contour); Double contourArea = contourArea(contour); Double contourPerimeter = arcLength(new MatOfPoint2f(contour.toArray()), true); if (contourArea / (contourPerimeter * contourPerimeter) > 0.05 && contourArea / (contourPerimeter * contourPerimeter) < 0.30) { int xCenter = boundingRectangle.x + boundingRectangle.width / 2; int yCenter = boundingRectangle.y + boundingRectangle.height / 2; astrocytesCenters.add(new Point(xCenter, yCenter)); }/*from w ww. j ava 2 s .c o m*/ } }
From source file:com.carver.paul.truesight.ImageRecognition.ImageTools.java
License:Open Source License
public static void drawLinesOnImage(Mat lines, Mat image) { for (int i = 0; i < lines.rows(); i++) { double[] val = lines.get(i, 0); Imgproc.line(image, new Point(val[0], val[1]), new Point(val[2], val[3]), new Scalar(0, 255, 0), 2); }/*from w w w.j a va 2 s .c o m*/ }
From source file:com.compta.firstak.notedefrais.MainActivity.java
public void Opencv(String imageName) { bitmap = BitmapFactory.decodeFile(imageName); Mat imageMat = new Mat(); org.opencv.android.Utils.bitmapToMat(bitmap, imageMat); Imgproc.cvtColor(imageMat, imageMat, Imgproc.COLOR_BGR2GRAY); // 1) Apply gaussian blur to remove noise Imgproc.GaussianBlur(imageMat, imageMat, new Size(9, 9), 0); // 2) AdaptiveThreshold -> classify as either black or white Imgproc.adaptiveThreshold(imageMat, imageMat, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 5, 2);//from w ww . j av a 2 s . com // 3) Invert the image -> so most of the image is black Core.bitwise_not(imageMat, imageMat); // 4) Dilate -> fill the image using the MORPH_DILATE Mat kernel = Imgproc.getStructuringElement(Imgproc.MORPH_DILATE, new Size(3, 3), new Point(1, 1)); Imgproc.dilate(imageMat, imageMat, kernel); org.opencv.android.Utils.matToBitmap(imageMat, bitmap); mImageViewer.setImageBitmap(bitmap); ByteArrayOutputStream stream1 = new ByteArrayOutputStream(); bitmap.compress(Bitmap.CompressFormat.PNG, 100, stream1); byteArray = stream1.toByteArray(); }
From source file:com.example.colordetector.CamMainActivity.java
License:Apache License
public Mat onCameraFrame(CvCameraViewFrame inputFrame) { // The frame currently captured by the camera, converted in the color RGBA rgbaFrame = inputFrame.rgba();// w w w. j a v a 2 s . co m // Convert the frame in the HSV color space, to be able to identify the color with the thresholds Imgproc.cvtColor(rgbaFrame, rgbFrame, Imgproc.COLOR_RGBA2RGB); // Cant't convert directly rgba->hsv Imgproc.cvtColor(rgbFrame, hsvFrame, Imgproc.COLOR_RGB2HSV); // Create a mask with ONLY zones of the chosen color on the frame currently captured Core.inRange(hsvFrame, thresMin, thresMax, inRangeMask); filteredFrame.setTo(new Scalar(0, 0, 0)); rgbFrame.copyTo(filteredFrame, inRangeMask); // if the method of shooting image is set to manual, exit and return the filtered image... if (!methodAuto) { return filteredFrame; } //...else it was setted the automatic method, so continue with the method // Check the H channel of the image to see if the searched color is present on the frame Core.extractChannel(filteredFrame, hChannel, 0); /* There are two method to verify the color presence; below a little explanation */ /* checkRange: if almost one pixel of the searched color is found, continue with the countdown * Pro -> fast. * Versus -> less accurate, possible presence of false positive depending the quality of the camera * if(!Core.checkRange(hChannel, true, 0, 1)){ */ /* Percentage: count the pixel of the searched color, and if there are almost the * 0.1% of total pixel of the frame with the searched color, continue with the countdown * Pro: more accurate, lower risk of false positive * Versus: slower than checkRange * N.B.: the threshold percentage is imposted with a low value, otherwise small object will not be seen */ int perc = Core.countNonZero(hChannel); // Percentage if (perc > (frameDim * 0.001)) { // if the shooting method is setted to 'immediate', the photo is returned now; // otherwise continue with the countdown if (!countDown) { takePicture(); return rgbaFrame; } // 'point' is where the countdown will be visualized; in that case at // a quarter of height and width than left up angle Point point = new Point(rgbaFrame.cols() >> 2, rgbaFrame.rows() >> 2); // Update the osd countdown every 75*8 ms (if color searched is present) // Use the division in 75 ms cause a higher value would give the user the feeling of screen/app 'blocked'. if (timeToElapse % 8 == 0) { if (osdSecond.compareTo("") == 0) osdSecond = ((Integer) (timeToElapse >> 3)).toString(); else osdSecond = osdSecond.concat(".." + (((Integer) (timeToElapse >> 3)).toString())); Core.putText(rgbaFrame, osdSecond, point, 1, 3, Scalar.all(255)); } timeToElapse -= 1; // the user has framed an object for more than 3 seconds; shoot the photo if (timeToElapse <= 0) { timeToElapse = 24; takePicture(); } // the user has framed an object for less than 3 seconds; wait else { try { synchronized (this) { wait(75); } } catch (InterruptedException e) { e.printStackTrace(); } } } // the user has NOT framed a color searched object; reset osd else { timeToElapse = 24; osdSecond = ""; } return rgbaFrame; }