List of usage examples for org.opencv.imgproc Imgproc drawContours
public static void drawContours(Mat image, List<MatOfPoint> contours, int contourIdx, Scalar color)
From source file:com.projecttango.examples.java.pointcloud.MainActivity.java
License:Open Source License
/** * Set up the callback listeners for the Tango Service and obtain other parameters required * after Tango connection.//from w ww . jav a 2s. c o m * Listen to updates from the Point Cloud and Tango Events and Pose. */ private void startupTango() { final ArrayList<TangoCoordinateFramePair> framePairs = new ArrayList<TangoCoordinateFramePair>(); framePairs.add(new TangoCoordinateFramePair(TangoPoseData.COORDINATE_FRAME_START_OF_SERVICE, TangoPoseData.COORDINATE_FRAME_DEVICE)); mTango.connectListener(framePairs, new Tango.TangoUpdateCallback() { @Override public void onPoseAvailable(TangoPoseData pose) { // Passing in the pose data to UX library produce exceptions. if (mTangoUx != null) { mTangoUx.updatePoseStatus(pose.statusCode); } mapPos = pose; /* TANGO POSE UPDATE FOR MAP HERE */ //mapInfo.setCurrentCell(pose); } @Override public void onPointCloudAvailable(TangoPointCloudData pointCloud) { if (mTangoUx != null) { mTangoUx.updatePointCloud(pointCloud); } mPointCloudManager.updatePointCloud(pointCloud); final double currentTimeStamp = pointCloud.timestamp; final double pointCloudFrameDelta = (currentTimeStamp - mPointCloudPreviousTimeStamp) * SECS_TO_MILLISECS; mPointCloudPreviousTimeStamp = currentTimeStamp; final double averageDepth = getAveragedDepth(pointCloud.points, pointCloud.numPoints); mPointCloudTimeToNextUpdate -= pointCloudFrameDelta; if (mPointCloudTimeToNextUpdate < 0.0) { mPointCloudTimeToNextUpdate = UPDATE_INTERVAL_MS; final String pointCountString = Integer.toString(pointCloud.numPoints); runOnUiThread(new Runnable() { @Override public void run() { //mPointCountTextView.setText(pointCountString); //mAverageZTextView.setText(FORMAT_THREE_DECIMAL.format(averageDepth)); } }); } } @Override public void onFrameAvailable(int cameraId) { // We are not using onFrameAvailable for this application. if (cameraId == TangoCameraIntrinsics.TANGO_CAMERA_COLOR) { tangoCameraPreview.onFrameAvailable(); bm[0] = tangoCameraPreview.getBitmap(); frameCount++; Log.d("FPSTango", ": " + frameCount); Bitmap openCVBitmap = tangoCameraPreview.getBitmap(); tmp = new Mat(openCVBitmap.getWidth(), openCVBitmap.getHeight(), CvType.CV_8UC4); mDetector.process(tmp); //////////////////////// List<MatOfPoint> contours = mDetector.getContours(); // Log.e("rescue robotics", "Contours count: " + contours.size()); Imgproc.drawContours(tmp, contours, -1, CONTOUR_COLOR); Mat colorLabel = tmp.submat(4, 68, 4, 68); colorLabel.setTo(mBlobColorRgba); Mat spectrumLabel = tmp.submat(4, 4 + mSpectrum.rows(), 70, 70 + mSpectrum.cols()); mSpectrum.copyTo(spectrumLabel); if (mDetector.blobsDetected() > 0) { toast("I see a Blob!"); } if (frameCount == 30) { frameCount = 0; scan(tangoCameraPreview.getBitmap()); } } } @Override public void onTangoEvent(TangoEvent event) { if (mTangoUx != null) { mTangoUx.updateTangoEvent(event); } } }); }
From source file:cubesolversimulator.VisualInputForm.java
private void getContour() { Mat dilate = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(7, 7)); Imgproc.dilate(edge, edge, dilate);//ww w .java 2s .c om Imgproc.dilate(edge, edge, dilate); Highgui.imwrite("dilate.jpg", edge); List<MatOfPoint> contours = new ArrayList<>(); Imgproc.findContours(edge, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE); Imgproc.drawContours(contour, contours, -1, new Scalar(255, 255, 255)); Highgui.imwrite("Contour.jpg", contour); img4 = new JLabel(""); img4.setSize(jPanel7.getSize()); image4 = new ImageIcon(new ImageIcon("D:\\Project_work\\CubeSolverSimulator\\dilate.jpg").getImage() .getScaledInstance(img4.getWidth(), img4.getHeight(), Image.SCALE_FAST)); img4.setIcon(image4); img4.repaint(); jPanel7.add(img4); jPanel7.repaint(); findRect(contours); }
From source file:detectiontest.ImageDisplayer.java
public static void drawParticles(Mat image, List<Particle> particles) { for (Particle particle : particles) { ParticleClass pClass = particle.getParticleClass(); Scalar color = colorToScalar(Color.BLACK); if (pClass != null) { color = colorToScalar(pClass.getColor()); }//w ww. j av a 2 s . co m List<MatOfPoint> list = new ArrayList<>(); list.add(particle.getContour()); Imgproc.drawContours(image, list, 0, color); Core.rectangle(image, particle.getBoundingRect().tl(), particle.getBoundingRect().br(), color); } }
From source file:edu.ucue.tfc.Modelo.VideoProcessor.java
/** * Processes {@code firstFrame} and {@code secondFrame}. * @param firstFrame the first frame of a cycle. *//*from ww w.jav a 2 s . c o m*/ private void processFrame(Mat firstFrame) { double contourArea = 0; int position = 0; try { /** * Redimensiona el el cuadro actual * */ Imgproc.resize(firstFrame, firstFrame, frameSize); /** * Convierte el cuadro por segundo a escala de grises */ Imgproc.cvtColor(firstFrame, firstGrayImage, Imgproc.COLOR_BGR2GRAY); /** * Lee el siguiente cuadro, lo redimensiona y convierte a escala de grises */ video.read(secondFrame); Imgproc.resize(secondFrame, secondFrame, frameSize); Imgproc.cvtColor(secondFrame, secondGrayImage, Imgproc.COLOR_BGR2GRAY); /** * Obtiene la diferencia absoluta por pixel de los cuadros anteriores. */ Core.absdiff(firstGrayImage, secondGrayImage, differenceOfImages); Imgproc.threshold(differenceOfImages, thresholdImage, 25, 255, Imgproc.THRESH_BINARY); Imgproc.blur(thresholdImage, thresholdImage, new Size(12, 12)); Imgproc.threshold(thresholdImage, thresholdImage, 20, 255, Imgproc.THRESH_BINARY); ///// for (int i = 0; i < contours.size(); ++i) { contours.get(i).release(); } contours.clear(); /** * La linea Horizontal */ Imgproc.line(firstFrame, controlPoints.get(6), controlPoints.get(7), new Scalar(255, 0, 0), Imgproc.LINE_4); Imgproc.findContours(thresholdImage, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE); for (int i = 0; i < hullPoints.size(); ++i) { hullPoints.get(i).release(); } hullPoints.clear(); for (int i = 0; i < contours.size(); i++) { MatOfInt tmp = new MatOfInt(); Imgproc.convexHull(contours.get(i), tmp, false); hullPoints.add(tmp); } /** * Busca el contorno con el rea ms grande */ if (contours.size() > 0) { for (int i = 0; i < contours.size(); i++) { if (Imgproc.contourArea(contours.get(i)) > contourArea) { contourArea = Imgproc.contourArea(contours.get(i)); position = i; boundingRectangle = Imgproc.boundingRect(contours.get(i)); } } } secondFrame.release(); hierarchy.release(); secondGrayImage.release(); firstGrayImage.release(); thresholdImage.release(); differenceOfImages.release(); } catch (Exception e) { System.out.println(e.getMessage()); } if (controlPoints.get(6).inside(boundingRectangle)) { Imgproc.line(frame, controlPoints.get(0), controlPoints.get(1), new Scalar(0, 0, 255), 2); wasAtLeftPoint = true; } else if (!controlPoints.get(6).inside(boundingRectangle)) { Imgproc.line(frame, controlPoints.get(0), controlPoints.get(1), new Scalar(0, 255, 0), 2); } if (controlPoints.get(8).inside(boundingRectangle)) { Imgproc.line(frame, controlPoints.get(2), controlPoints.get(3), new Scalar(0, 0, 255), 2); wasAtCenterPoint = true; } else if (!controlPoints.get(8).inside(boundingRectangle)) { Imgproc.line(frame, controlPoints.get(2), controlPoints.get(3), new Scalar(0, 255, 0), 2); } if (controlPoints.get(7).inside(boundingRectangle)) { Imgproc.line(frame, controlPoints.get(4), controlPoints.get(5), new Scalar(0, 0, 255), 2); wasAtRightPoint = true; } else if (!controlPoints.get(7).inside(boundingRectangle)) { Imgproc.line(frame, controlPoints.get(4), controlPoints.get(5), new Scalar(0, 255, 0), 2); } if (wasAtCenterPoint && wasAtLeftPoint && wasAtRightPoint) { detectedCarsCount++; wasDetected = true; wasAtCenterPoint = false; wasAtLeftPoint = false; wasAtRightPoint = false; } if (contourArea > 3000) { Imgproc.drawContours(frame, contours, position, new Scalar(255, 255, 255)); } }
From source file:emotion.Eyebrow.java
public static void Harris(Mat img, boolean rightEyeFlag) { //Harris point extraction Mat harrisTestimg;/*from www . ja v a 2 s . c o m*/ harrisTestimg = img.clone(); cvtColor(harrisTestimg, harrisTestimg, Imgproc.COLOR_BGR2GRAY); threshold(harrisTestimg, harrisTestimg, 200, 255, Imgproc.THRESH_BINARY_INV); Mat struct = Imgproc.getStructuringElement(Imgproc.MORPH_CROSS, new Size(3, 3)); erode(harrisTestimg, harrisTestimg, struct); dilate(harrisTestimg, harrisTestimg, struct); imwrite("intermediateHaaris.jpg", harrisTestimg); harrisTestimg.convertTo(harrisTestimg, CV_8UC1); ArrayList<MatOfPoint> contours = new ArrayList<>(); Mat hierarchy = new Mat(); Imgproc.findContours(harrisTestimg, contours, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_NONE); //System.out.println("Average Y for contours:"); float[] averageY = new float[contours.size()]; for (int i = 0; i < contours.size(); ++i) { //We calculate mean of Y coordinates for each contour for (int j = 0; j < contours.get(i).total(); ++j) { int val = (int) contours.get(i).toArray()[j].y; averageY[i] += val; } averageY[i] /= contours.get(i).total(); //System.out.println(i+") "+averageY[i]); if (averageY[i] <= img.height() / 2 && //We consider just up half of an image contours.get(i).total() >= img.width()) //and longer than threshold Imgproc.drawContours(harrisTestimg, contours, i, new Scalar(255, 255, 255)); else Imgproc.drawContours(harrisTestimg, contours, i, new Scalar(0, 0, 0)); } MatOfPoint features = new MatOfPoint(); Imgproc.goodFeaturesToTrack(harrisTestimg, features, 100, 0.00001, 0); //We draw just 2 extreme points- first and last Point eyebrowsPoints[] = new Point[2]; for (int i = 0; i < features.toList().size(); i++) { if (i == 0) { eyebrowsPoints[0] = new Point(harrisTestimg.width() / 2, 0); eyebrowsPoints[1] = new Point(harrisTestimg.width() / 2, 0); } if (features.toArray()[i].x < eyebrowsPoints[0].x && features.toArray()[i].y < harrisTestimg.height() / 2) { eyebrowsPoints[0] = features.toArray()[i]; } if (features.toArray()[i].x > eyebrowsPoints[1].x && features.toArray()[i].y < harrisTestimg.height() / 2) { eyebrowsPoints[1] = features.toArray()[i]; } } StaticFunctions.drawCross(img, eyebrowsPoints[1], StaticFunctions.Features.EYEBROWS_ENDS); StaticFunctions.drawCross(img, eyebrowsPoints[0], StaticFunctions.Features.EYEBROWS_ENDS); imwrite("testHaris.jpg", img); if (rightEyeFlag) { EyeRegion.rightInnerEyebrowsCorner = eyebrowsPoints[0]; EyeRegion.rightInnerEyebrowsCorner.x += Eye.rightRect.x; EyeRegion.rightInnerEyebrowsCorner.y += Eye.rightRect.y; EyeRegion.rightOuterEyebrowsCorner = eyebrowsPoints[1]; EyeRegion.rightOuterEyebrowsCorner.x += Eye.rightRect.x; EyeRegion.rightOuterEyebrowsCorner.y += Eye.rightRect.y; } else { EyeRegion.leftInnerEyebrowsCorner = eyebrowsPoints[1]; EyeRegion.leftInnerEyebrowsCorner.x += Eye.leftRect.x; EyeRegion.leftInnerEyebrowsCorner.y += Eye.leftRect.y; EyeRegion.leftOuterEyebrowsCorner = eyebrowsPoints[0]; EyeRegion.leftOuterEyebrowsCorner.x += Eye.leftRect.x; EyeRegion.leftOuterEyebrowsCorner.y += Eye.leftRect.y; } }
From source file:eu.fpetersen.robobrain.behavior.followobject.ColorBlobDetector.java
License:Open Source License
public void process(Mat rgbaImage) { Imgproc.pyrDown(rgbaImage, mPyrDownMat); Imgproc.pyrDown(mPyrDownMat, mPyrDownMat); Imgproc.cvtColor(mPyrDownMat, mHsvMat, Imgproc.COLOR_RGB2HSV_FULL); Core.inRange(mHsvMat, mLowerBound, mUpperBound, mMask); Imgproc.dilate(mMask, mDilatedMask, new Mat()); List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); Imgproc.findContours(mDilatedMask, contours, mHierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); // Find max contour area maxArea = 0;/* w w w . j a v a2s.c o m*/ Iterator<MatOfPoint> each = contours.iterator(); Mat biggestContour = null; while (each.hasNext()) { MatOfPoint wrapper = each.next(); double area = Imgproc.contourArea(wrapper); if (area > maxArea) { maxArea = area; biggestContour = wrapper.clone(); } } if (biggestContour != null) { Core.multiply(biggestContour, new Scalar(4, 4), biggestContour); Moments mo = Imgproc.moments(biggestContour); centroidOfMaxArea = new Point(mo.get_m10() / mo.get_m00(), mo.get_m01() / mo.get_m00()); } else { centroidOfMaxArea = null; } // Filter contours by area and resize to fit the original image size mContours.clear(); each = contours.iterator(); while (each.hasNext()) { MatOfPoint contour = each.next(); if (Imgproc.contourArea(contour) > mMinContourArea * maxArea) { Core.multiply(contour, new Scalar(4, 4), contour); mContours.add(contour); } } Imgproc.drawContours(rgbaImage, mContours, -1, CONTOUR_COLOR); }
From source file:fuzzycv.MainFrame.java
private Mat findAndDrawCrust(Mat maskedImage, Mat frame) { List<MatOfPoint> contours = new ArrayList<>(); Mat hierarchy = new Mat(); Imgproc.findContours(maskedImage, contours, hierarchy, Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_SIMPLE); //if any contour exist... if (hierarchy.size().height > 0 && hierarchy.size().width > 0) { //for each contour, display it in blue for (int idx = 0; idx >= 0; idx = (int) hierarchy.get(0, idx)[0]) { Imgproc.drawContours(frame, contours, idx, new Scalar(160, 0, 0)); }/*from www . java 2 s . c o m*/ } return frame; }
From source file:hu.unideb.fksz.VideoProcessor.java
License:Open Source License
/** * Processes {@code firstFrame} and {@code secondFrame}. * @param firstFrame the first frame of a cycle. *//*from ww w .ja v a2s . c o m*/ private void processFrame(Mat firstFrame) { double contourArea = 0; int position = 0; try { /** * Resizes the {@code firstFrame} to {@code frameSize}. * */ Imgproc.resize(firstFrame, firstFrame, frameSize); /** * Convert the frame in grayscale color space. */ Imgproc.cvtColor(firstFrame, firstGrayImage, Imgproc.COLOR_BGR2GRAY); /** * {@code video} reads the second frame. */ video.read(secondFrame); Imgproc.resize(secondFrame, secondFrame, frameSize); Imgproc.cvtColor(secondFrame, secondGrayImage, Imgproc.COLOR_BGR2GRAY); /** * Getting the absolute per-pixel difference of the two frames into {@code differenceOfImages}. */ Core.absdiff(firstGrayImage, secondGrayImage, differenceOfImages); Imgproc.threshold(differenceOfImages, thresholdImage, 25, 255, Imgproc.THRESH_BINARY); Imgproc.blur(thresholdImage, thresholdImage, new Size(12, 12)); Imgproc.threshold(thresholdImage, thresholdImage, 20, 255, Imgproc.THRESH_BINARY); ///// for (int i = 0; i < contours.size(); ++i) { contours.get(i).release(); } contours.clear(); /** * The horizontal line. */ Imgproc.line(firstFrame, controlPoints.get(6), controlPoints.get(7), new Scalar(255, 0, 0), Imgproc.LINE_4); Imgproc.findContours(thresholdImage, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE); for (int i = 0; i < hullPoints.size(); ++i) { hullPoints.get(i).release(); } hullPoints.clear(); for (int i = 0; i < contours.size(); i++) { MatOfInt tmp = new MatOfInt(); Imgproc.convexHull(contours.get(i), tmp, false); hullPoints.add(tmp); } /** * Searches for the contour with the greatest area. */ if (contours.size() > 0) { for (int i = 0; i < contours.size(); i++) { if (Imgproc.contourArea(contours.get(i)) > contourArea) { contourArea = Imgproc.contourArea(contours.get(i)); position = i; boundingRectangle = Imgproc.boundingRect(contours.get(i)); } } } secondFrame.release(); hierarchy.release(); secondGrayImage.release(); firstGrayImage.release(); thresholdImage.release(); differenceOfImages.release(); } catch (Exception e) { logger.error(e.getMessage()); } /** * Checking whether the control point on the left is * inside of {@code boundingRectangle}, which is a {@code Rect}, * bounding the greatest contour. */ if (controlPoints.get(6).inside(boundingRectangle)) { Imgproc.line(frame, controlPoints.get(0), controlPoints.get(1), new Scalar(0, 0, 255), 2); wasAtLeftPoint = true; } else if (!controlPoints.get(6).inside(boundingRectangle)) { Imgproc.line(frame, controlPoints.get(0), controlPoints.get(1), new Scalar(0, 255, 0), 2); } /** * Checking whether the control point on the middle is * inside of {@code boundingRectangle}, which is a {@code Rect}, * bounding the greatest contour. */ if (controlPoints.get(8).inside(boundingRectangle)) { Imgproc.line(frame, controlPoints.get(2), controlPoints.get(3), new Scalar(0, 0, 255), 2); wasAtCenterPoint = true; } else if (!controlPoints.get(8).inside(boundingRectangle)) { Imgproc.line(frame, controlPoints.get(2), controlPoints.get(3), new Scalar(0, 255, 0), 2); } /** * Checking whether the control point on the right is * inside of {@code boundingRectangle}, which is a {@code Rect}, * bounding the greatest contour. */ if (controlPoints.get(7).inside(boundingRectangle)) { Imgproc.line(frame, controlPoints.get(4), controlPoints.get(5), new Scalar(0, 0, 255), 2); wasAtRightPoint = true; } else if (!controlPoints.get(7).inside(boundingRectangle)) { Imgproc.line(frame, controlPoints.get(4), controlPoints.get(5), new Scalar(0, 255, 0), 2); } /** * If the three control points have were inside the {@code boundingRectangle}, * it means that a "car" has passed. */ if (wasAtCenterPoint && wasAtLeftPoint && wasAtRightPoint) { detectedCarsCount++; wasAtCenterPoint = false; wasAtLeftPoint = false; wasAtRightPoint = false; logger.info("Detected " + detectedCarsCount + " car(s)"); } /** * If the contour is big enough, draw it. */ if (contourArea > 3000) { Imgproc.drawContours(frame, contours, position, new Scalar(255, 255, 255)); } }
From source file:in.fabinpaul.sixthsense.ColorBlobDetectionFragment.java
License:Apache License
public Mat onCameraFrame(CvCameraViewFrame inputFrame) { mRgba = inputFrame.rgba();/* w w w. ja v a 2 s . com*/ Mat[] colorLabel = new Mat[4]; org.opencv.core.Point markersXY[] = new org.opencv.core.Point[4]; for (int i = 0; i < 4; i++) { colorLabel[i] = new Mat(); if (mIsColorSelected[i]) { mDetector[i].process(mRgba); List<MatOfPoint> contours = mDetector[i].getContours(); Log.e(TAG, "Contours count: " + contours.size()); Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR); markersXY[i] = new org.opencv.core.Point(); markersXY[i] = mDetector[i].getXY(); Log.i(TAG, "Point:X" + markersXY[i].x + " Y:" + markersXY[i].y); switch (i) { case 0: colorLabel[i] = mRgba.submat(4, 68, 4, 68); break; case 1: colorLabel[i] = mRgba.submat(4, 68, mRgba.cols() - 68, mRgba.cols() - 4); break; case 2: colorLabel[i] = mRgba.submat(mRgba.rows() - 68, mRgba.rows() - 4, 4, 68); break; case 3: colorLabel[i] = mRgba.submat(mRgba.rows() - 68, mRgba.rows() - 4, mRgba.cols() - 68, mRgba.cols() - 4); break; } colorLabel[i].setTo(mBlobColorRgba[i]); } } return mRgba; }
From source file:opencvdemos.BallGame.java
License:Apache License
private Mat findAndDrawObjects(Mat maskedImage, Mat frame) { // Init// www. j av a2 s . co m List<MatOfPoint> contours = new ArrayList<>(); Mat hierarchy = new Mat(); // Find contours Imgproc.findContours(maskedImage, contours, hierarchy, Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_SIMPLE); // If any contour exist... if (hierarchy.size().height > 0 && hierarchy.size().width > 0) { // for each contour, display it in blue for (int idx = 0; idx >= 0; idx = (int) hierarchy.get(0, idx)[0]) { Imgproc.drawContours(frame, contours, idx, new Scalar(250, 0, 0)); } } return frame; }