List of usage examples for org.opencv.imgproc Imgproc getStructuringElement
public static Mat getStructuringElement(int shape, Size ksize)
From source file:ImageReade.java
public static void detectLetter(Mat img) { ArrayList<Rect> boundRect = new ArrayList<>(); Mat img_gray, img_sobel, img_threshold, element; img_gray = new Mat(); img_sobel = new Mat(); img_threshold = new Mat(); element = new Mat(); Imgproc.cvtColor(img, img_gray, Imgproc.COLOR_BGRA2GRAY); imshow("Rec img_gray", img_gray); Imgproc.Sobel(img_gray, img_sobel, CvType.CV_8U, 1, 0, 3, 1, 0, Imgproc.BORDER_DEFAULT); imshow("Rec img_sobel", img_sobel); Imgproc.threshold(img_sobel, img_threshold, 0, 255, CV_THRESH_OTSU + CV_THRESH_BINARY); imshow("Rec img_threshold", img_threshold); element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(16, 6)); Imgproc.morphologyEx(img_threshold, img_threshold, CV_MOP_CLOSE, element); imshow("Rec img_threshold second", img_threshold); List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); //Imgproc.findContours(img_threshold, contours, new Mat(), Imgproc.RETR_LIST,Imgproc.CHAIN_APPROX_SIMPLE); Imgproc.findContours(img_threshold, contours, new Mat(), 0, 1); for (int i = 0; i < contours.size(); i++) { System.out.println(Imgproc.contourArea(contours.get(i))); // if (Imgproc.contourArea(contours.get(i)) > 100) { // //Imgproc.approxPolyDP( contours.get(i), contours_poly[i], 3, true ); // Rect rect = Imgproc.boundingRect(contours.get(i)); // System.out.println(rect.height); // if (rect.width > rect.height) { // //System.out.println(rect.x +","+rect.y+","+rect.height+","+rect.width); // Core.rectangle(img, new Point(rect.x,rect.y), new Point(rect.x+rect.width,rect.y+rect.height),new Scalar(0,0,255)); // } // // // } if (Imgproc.contourArea(contours.get(i)) > 100) { MatOfPoint2f mMOP2f1 = new MatOfPoint2f(); MatOfPoint2f mMOP2f2 = new MatOfPoint2f(); contours.get(i).convertTo(mMOP2f1, CvType.CV_32FC2); Imgproc.approxPolyDP(mMOP2f1, mMOP2f2, 3, true); mMOP2f2.convertTo(contours.get(i), CvType.CV_32S); Rect rect = Imgproc.boundingRect(contours.get(i)); if (rect.width > rect.height) { Core.rectangle(img, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 0, 255)); }/*from w ww. j a va2 s . c om*/ } } imshow("Rec Detected", img); }
From source file:MainEroding.java
public static void main(String[] args) { try {/*from ww w. j av a 2 s.c om*/ int erosion_size = 2; //int dilation_size = 5; System.loadLibrary(Core.NATIVE_LIBRARY_NAME); Mat source = Highgui.imread("D://teste.png", Highgui.CV_LOAD_IMAGE_COLOR); Mat destination = new Mat(source.rows(), source.cols(), source.type()); destination = source; Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2 * erosion_size + 1, 2 * erosion_size + 1)); Imgproc.erode(source, destination, element); Highgui.imwrite("D://Erosion.jpg", destination); } catch (Exception e) { System.out.println("Exception: " + e.getMessage()); } }
From source file:ThirdTry.java
public static void detectLetter(Mat img, Mat m2) { ArrayList<Rect> boundRect = new ArrayList<>(); Mat img_gray, img_sobel, img_threshold, element; img_gray = new Mat(); img_sobel = new Mat(); img_threshold = new Mat(); element = new Mat(); Imgproc.cvtColor(img, img_gray, Imgproc.COLOR_BGRA2GRAY); //imshow("Rec img_gray", img_gray); Imgproc.Sobel(img_gray, img_sobel, CvType.CV_8UC1, 1, 0, 3, 1, 0, Imgproc.BORDER_DEFAULT); //imshow("Rec img_sobel", img_sobel); Imgproc.threshold(m2, img_threshold, 0, 255, CV_THRESH_OTSU + CV_THRESH_BINARY); //imshow("Rec img_threshold", img_threshold); element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 2)); Imgproc.morphologyEx(m2, img_threshold, CV_MOP_CLOSE, element); imshow("Rec img_threshold second", img_threshold); element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(12, 12)); Imgproc.morphologyEx(img_threshold, img_threshold, CV_MOP_CLOSE, element); //imshow("Rec img_threshold second", img_threshold); List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); //Imgproc.findContours(img_threshold, contours, new Mat(), Imgproc.RETR_LIST,Imgproc.CHAIN_APPROX_SIMPLE); Imgproc.findContours(img_threshold, contours, new Mat(), 0, 1); for (int i = 0; i < contours.size(); i++) { System.out.println(Imgproc.contourArea(contours.get(i))); // if (Imgproc.contourArea(contours.get(i)) > 100) { // //Imgproc.approxPolyDP( contours.get(i), contours_poly[i], 3, true ); // Rect rect = Imgproc.boundingRect(contours.get(i)); // System.out.println(rect.height); // if (rect.width > rect.height) { // //System.out.println(rect.x +","+rect.y+","+rect.height+","+rect.width); // Core.rectangle(img, new Point(rect.x,rect.y), new Point(rect.x+rect.width,rect.y+rect.height),new Scalar(0,0,255)); // } // // // } if (Imgproc.contourArea(contours.get(i)) > 100) { MatOfPoint2f mMOP2f1 = new MatOfPoint2f(); MatOfPoint2f mMOP2f2 = new MatOfPoint2f(); contours.get(i).convertTo(mMOP2f1, CvType.CV_32FC2); Imgproc.approxPolyDP(mMOP2f1, mMOP2f2, 3, true); mMOP2f2.convertTo(contours.get(i), CvType.CV_32S); Rect rect = Imgproc.boundingRect(contours.get(i)); if (rect.width > rect.height) { Core.rectangle(img, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 0, 255)); }/*from w w w. j a va 2s. c om*/ } } //imshow("Rec Detected", img); }
From source file:LicenseDetection.java
public void run() { // ------------------ set up tesseract for later use ------------------ ITesseract tessInstance = new Tesseract(); tessInstance.setDatapath("/Users/BradWilliams/Downloads/Tess4J"); tessInstance.setLanguage("eng"); // ------------------ Save image first ------------------ Mat img;//from w w w. ja v a 2 s .co m img = Imgcodecs.imread(getClass().getResource("/resources/car_2_shopped2.jpg").getPath()); Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/True_Image.png", img); // ------------------ Convert to grayscale ------------------ Mat imgGray = new Mat(); Imgproc.cvtColor(img, imgGray, Imgproc.COLOR_BGR2GRAY); Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/Gray.png", imgGray); // ------------------ Blur so edge detection wont pick up noise ------------------ Mat imgGaussianBlur = new Mat(); Imgproc.GaussianBlur(imgGray, imgGaussianBlur, new Size(3, 3), 0); Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/gaussian_blur.png", imgGaussianBlur); // ****************** Create image that will be cropped at end of program before OCR *************************** // ------------------ Binary theshold for OCR (used later)------------------ Mat imgThresholdOCR = new Mat(); Imgproc.adaptiveThreshold(imgGaussianBlur, imgThresholdOCR, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 7, 10); //Imgproc.threshold(imgSobel,imgThreshold,120,255,Imgproc.THRESH_TOZERO); Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgThresholdOCR.png", imgThresholdOCR); // ------------------ Erosion operation------------------ Mat kern = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_CROSS, new Size(3, 3)); Mat imgErodeOCR = new Mat(); Imgproc.morphologyEx(imgThresholdOCR, imgErodeOCR, Imgproc.MORPH_DILATE, kern); //Imgproc.MORPH_DILATE is performing erosion, wtf? Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgErodeOCR.png", imgErodeOCR); //------------------ Dilation operation ------------------ Mat kernall = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(3, 3)); Mat imgDilateOCR = new Mat(); Imgproc.morphologyEx(imgErodeOCR, imgDilateOCR, Imgproc.MORPH_ERODE, kernall); Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgDilateOCR.png", imgDilateOCR); // ************************************************************************************************************* // // ------------------ Close operation (dilation followed by erosion) to reduce noise ------------------ // Mat k = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(3, 3)); // Mat imgCloseOCR = new Mat(); // Imgproc.morphologyEx(imgThresholdOCR,imgCloseOCR,1,k); // Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgCloseOCR.png", imgCloseOCR); // ------------------ Sobel vertical edge detection ------------------ Mat imgSobel = new Mat(); Imgproc.Sobel(imgGaussianBlur, imgSobel, -1, 1, 0); Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgSobel.png", imgSobel); // ------------------ Binary theshold ------------------ Mat imgThreshold = new Mat(); Imgproc.adaptiveThreshold(imgSobel, imgThreshold, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 99, -60); //Imgproc.threshold(imgSobel,imgThreshold,120,255,Imgproc.THRESH_TOZERO); Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgThreshold.png", imgThreshold); // // ------------------ Open operation (erosion followed by dilation) ------------------ // Mat ker = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_CROSS, new Size(3, 2)); // Mat imgOpen = new Mat(); // Imgproc.morphologyEx(imgThreshold,imgOpen,0,ker); // Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgOpen.png", imgOpen); // ------------------ Close operation (dilation followed by erosion) to reduce noise ------------------ Mat kernel = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(22, 8)); Mat imgClose = new Mat(); Imgproc.morphologyEx(imgThreshold, imgClose, 1, kernel); Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgClose.png", imgClose); // ------------------ Find contours ------------------ List<MatOfPoint> contours = new ArrayList<>(); Imgproc.findContours(imgClose, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE); // **************************** DEBUG CODE ************************** Mat contourImg = new Mat(imgClose.size(), imgClose.type()); for (int i = 0; i < contours.size(); i++) { Imgproc.drawContours(contourImg, contours, i, new Scalar(255, 255, 255), -1); } Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/contours.png", contourImg); // ****************************************************************** // -------------- Convert contours -------------------- //Convert to MatOfPoint2f so that minAreaRect can be called List<MatOfPoint2f> newContours = new ArrayList<>(); for (MatOfPoint mat : contours) { MatOfPoint2f newPoint = new MatOfPoint2f(mat.toArray()); newContours.add(newPoint); } //Get minAreaRects List<RotatedRect> minAreaRects = new ArrayList<>(); for (MatOfPoint2f mat : newContours) { RotatedRect rect = Imgproc.minAreaRect(mat); /* --------------- BUG WORK AROUND ------------ Possible bug: When converting from MatOfPoint2f to RotatectRect the width height were reversed and the angle was -90 degrees from what it would be if the width and height were correct. When painting rectangle in image, the correct boxes were produced, but performing calculations on rect.angle rect.width, or rect.height yielded unwanted results. The following work around is buggy but works for my purpose */ if (rect.size.width < rect.size.height) { double temp; temp = rect.size.width; rect.size.width = rect.size.height; rect.size.height = temp; rect.angle = rect.angle + 90; } //check aspect ratio and area and angle if (rect.size.width / rect.size.height > 1 && rect.size.width / rect.size.height < 5 && rect.size.width * rect.size.height > 10000 && rect.size.width * rect.size.height < 50000 && Math.abs(rect.angle) < 20) { minAreaRects.add(rect); } //minAreaRects.add(rect); } // **************************** DEBUG CODE ************************** /* The following code is used to draw the rectangles on top of the original image for debugging purposes */ //Draw Rotated Rects Point[] vertices = new Point[4]; Mat imageWithBoxes = img; // Draw color rectangles on top of binary contours // Mat imageWithBoxes = new Mat(); // Mat temp = imgDilateOCR; // Imgproc.cvtColor(temp, imageWithBoxes, Imgproc.COLOR_GRAY2RGB); for (RotatedRect rect : minAreaRects) { rect.points(vertices); for (int i = 0; i < 4; i++) { Imgproc.line(imageWithBoxes, vertices[i], vertices[(i + 1) % 4], new Scalar(0, 0, 255), 2); } } Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgWithBoxes.png", imageWithBoxes); // ****************************************************************** // **************************** DEBUG CODE ************************** // for(RotatedRect rect : minAreaRects) { // System.out.println(rect.toString()); // } // ****************************************************************** /* In order to rotate image without cropping it: 1. Create new square image with dimension = diagonal of initial image. 2. Draw initial image into the center of new image. Insert initial image at ROI (Region of Interest) in new image 3. Rotate new image */ //Find diagonal/hypotenuse int hypotenuse = (int) Math.sqrt((img.rows() * img.rows()) + (img.cols() * img.cols())); //New Mat with hypotenuse as height and width Mat rotateSpace = new Mat(hypotenuse, hypotenuse, 0); int ROI_x = (rotateSpace.width() - imgClose.width()) / 2; //x start of ROI int ROI_y = (rotateSpace.height() - imgClose.height()) / 2; //x start of ROI //designate region of interest Rect r = new Rect(ROI_x, ROI_y, imgClose.width(), imgClose.height()); //Insert image into region of interest imgDilateOCR.copyTo(rotateSpace.submat(r)); Mat rotatedTemp = new Mat(); //Mat to hold temporarily rotated mat Mat rectMat = new Mat();//Mat to hold rect contents (needed for looping through pixels) Point[] rectVertices = new Point[4];//Used to build rect to make ROI Rect rec = new Rect(); List<RotatedRect> edgeDensityRects = new ArrayList<>(); //populate new arraylist with rects that satisfy edge density int count = 0; //Loop through Rotated Rects and find edge density for (RotatedRect rect : minAreaRects) { count++; rect.center = new Point((float) ROI_x + rect.center.x, (float) ROI_y + rect.center.y); //rotate image to math orientation of rotated rect rotate(rotateSpace, rotatedTemp, rect.center, rect.angle); //remove rect rotation rect.angle = 0; //get vertices from rotatedRect rect.points(rectVertices); // **************************** DEBUG CODE ************************** // // for (int k = 0; k < 4; k++) { // System.out.println(rectVertices[k]); // Imgproc.line(rotatedTemp, rectVertices[k], rectVertices[(k + 1) % 4], new Scalar(0, 0, 255), 2); // } // // Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rotated" + count + ".png", rotatedTemp); // ***************************************************************** //build rect to use as ROI rec = new Rect(rectVertices[1], rectVertices[3]); rectMat = rotatedTemp.submat(rec); Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/extracted" + count + ".png", rectMat); //find edge density // // ------------------------ edge density check NOT IMPLEMENTED -------------------- // /* // Checking for edge density was not necessary for this image so it was not implemented due to lack of time // */ // for(int i = 0; i < rectMat.rows(); ++i){ // for(int j = 0; j < rectMat.cols(); ++j){ // // //add up white pixels // } // } // // //check number of white pixels against total pixels // //only add rects to new arraylist that satisfy threshold edgeDensityRects.add(rect); } // **************************** DEBUG CODE ************************** Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rotatedSpace.png", rotateSpace); //Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rotatedSpaceROTATED.png", rotatedTemp); //System.out.println(imgGray.type()); // ***************************************************************** // if there is only one rectangle left, its the license plate if (edgeDensityRects.size() == 1) { String result = ""; //Hold result from OCR BufferedImage bimg; Mat cropped; cropped = rectMat.submat(new Rect(20, 50, rectMat.width() - 40, rectMat.height() - 70)); Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rectMatCropped.png", cropped); bimg = matToBufferedImage(cropped); BufferedImage image = bimg; try { result = tessInstance.doOCR(image); } catch (TesseractException e) { System.err.println(e.getMessage()); } for (int i = 0; i < 10; ++i) { } result = result.replace("\n", ""); System.out.println(result); CarProfDBImpl db = new CarProfDBImpl(); db.connect("localhost:3306/computer_vision", "root", "*******"); CarProf c = db.getCarProf(result); System.out.print(c.toString()); db.close(); } }
From source file:MainDilation.java
public static void main(String[] args) { try {/* w ww. j av a 2s.c o m*/ //int erosion_size = 5; int dilation_size = 5; System.loadLibrary(Core.NATIVE_LIBRARY_NAME); Mat source = Highgui.imread("D://teste.png", Highgui.CV_LOAD_IMAGE_COLOR); Mat destination = new Mat(source.rows(), source.cols(), source.type()); destination = source; Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2 * dilation_size + 1, 2 * dilation_size + 1)); Imgproc.dilate(source, destination, element); Highgui.imwrite("D://Dilation.jpg", destination); } catch (Exception e) { System.out.println("Exception: " + e.getMessage()); } }
From source file:br.cefetmg.lsi.opencv.multipleObjectTracking.processing.MultipleObjectTracking.java
License:Open Source License
private void morphOps(Mat thresh) { //create structuring element that will be used to "dilate" and "erode" image. //the element chosen here is a 3px by 3px rectangle Mat erodeElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 3)); //dilate with larger element so make sure object is nicely visible Mat dilateElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(8, 8)); Imgproc.erode(thresh, thresh, erodeElement); Imgproc.erode(thresh, thresh, erodeElement); Imgproc.dilate(thresh, thresh, dilateElement); Imgproc.dilate(thresh, thresh, dilateElement); }
From source file:by.zuyeu.deyestracker.core.detection.task.DetectPupilsTask.java
@Override public Point call() throws Exception { long startTime = System.nanoTime(); final Mat imageHSV = new Mat(frame.size(), Core.DEPTH_MASK_8U); Imgproc.cvtColor(frame, imageHSV, Imgproc.COLOR_BGR2GRAY); Imgproc.erode(imageHSV, imageHSV, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, STRUCT_ELEMENT_SIZE)); Imgproc.dilate(imageHSV, imageHSV, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, STRUCT_ELEMENT_SIZE)); Imgproc.GaussianBlur(imageHSV, imageHSV, STRUCT_ELEMENT_SIZE, GAUS_BLUR_DELTA); Core.MinMaxLocResult mmG = Core.minMaxLoc(imageHSV); long endTime = System.nanoTime(); LOG.debug("pupil detected = {}", mmG.minLoc); LOG.debug("detection time: {} ms", (float) (endTime - startTime) / 1000000); return mmG.minLoc; }
From source file:classes.BlobsFinder.java
public void findBlobContours() { Mat grayImage = new Mat(); Imgproc.cvtColor(image, grayImage, Imgproc.COLOR_BGR2GRAY); ImageUtils.saveImage(grayImage, outImageName + "_grayImage.png", request); Mat gaussianImage = new Mat(); Imgproc.GaussianBlur(grayImage, gaussianImage, new Size(0, 0), 3); Core.addWeighted(grayImage, 1.5, gaussianImage, -1, 0, gaussianImage); ImageUtils.saveImage(gaussianImage, outImageName + "_gaussianGrayImage.png", request); Mat binaryImage = new Mat(); Imgproc.adaptiveThreshold(gaussianImage, binaryImage, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY_INV, 15, 4); ImageUtils.saveImage(binaryImage, outImageName + "_binaryImage.png", request); Mat erodedImage = new Mat(); binaryImage.copyTo(erodedImage);/* w w w . j a v a2 s. c o m*/ Mat structuringElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 3)); Point anchor = new Point(-1, -1); Imgproc.morphologyEx(erodedImage, erodedImage, Imgproc.MORPH_CLOSE, structuringElement, anchor, 1); ImageUtils.saveImage(erodedImage, outImageName + "_erodedImage.png", request); List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); Imgproc.findContours(erodedImage, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); Mat originalContoursImage = new Mat(image.size(), CvType.CV_8UC1, new Scalar(0)); Scalar contourColor = new Scalar(255); int thickness = -1; // Thicknes should be lower than zero in order to drawn the filled contours Imgproc.drawContours(originalContoursImage, contours, -1, contourColor, thickness); // Drawing all the contours found ImageUtils.saveImage(originalContoursImage, outImageName + "_originalContoursImage.png", request); Mat erodedContoursImage = new Mat(); Imgproc.erode(originalContoursImage, erodedContoursImage, structuringElement, anchor, 1); ImageUtils.saveImage(erodedContoursImage, outImageName + "_erodedContoursImage.png", request); ArrayList<MatOfPoint> finalContours = new ArrayList<MatOfPoint>(); Mat finalContourImage = new Mat(image.size(), CvType.CV_8UC1, new Scalar(0)); Imgproc.findContours(erodedContoursImage, finalContours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); for (int i = 0; i < finalContours.size(); i++) { MatOfPoint currentContour = finalContours.get(i); double area = Imgproc.contourArea(currentContour); if (area > MIN_AREA) { validContours.add(currentContour); String fabricPath = generateFabricPathString(currentContour); contourPaths.add(fabricPath); Rect boundingRect = Imgproc.boundingRect(currentContour); topLeftCorners.add(boundingRect.tl()); contoursAreas.add(area); } } // Drawing ALL the valid contours Imgproc.drawContours(finalContourImage, validContours, -1, contourColor, thickness); ImageUtils.saveImage(finalContourImage, outImageName + "_finalContourImage.png", request); }
From source file:com.example.root.dipproj.MainActivity.java
@Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (resultCode == RESULT_OK) { if (requestCode == 1) { File f = new File(Environment.getExternalStorageDirectory().toString()); for (File temp : f.listFiles()) { if (temp.getName().equals("temp.jpg")) { f = temp;//from w ww .jav a 2 s . c o m break; } } try { Bitmap bitmap; BitmapFactory.Options bitmapOptions = new BitmapFactory.Options(); bitmap = BitmapFactory.decodeFile(f.getAbsolutePath(), bitmapOptions); viewImage.setImageBitmap(bitmap); String path = android.os.Environment.getExternalStorageDirectory() + File.separator + "Phoenix" + File.separator + "default"; f.delete(); OutputStream outFile = null; File file = new File(path, String.valueOf(System.currentTimeMillis()) + ".jpg"); try { outFile = new FileOutputStream(file); bitmap.compress(Bitmap.CompressFormat.JPEG, 85, outFile); outFile.flush(); outFile.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } } catch (Exception e) { e.printStackTrace(); } } else if (requestCode == 2) { Uri selectedImage = data.getData(); String[] filePath = { MediaStore.Images.Media.DATA }; Cursor c = getContentResolver().query(selectedImage, filePath, null, null, null); c.moveToFirst(); int columnIndex = c.getColumnIndex(filePath[0]); String picturePath = c.getString(columnIndex); c.close(); Bitmap thumbnail = (BitmapFactory.decodeFile(picturePath)); Log.w("path of image", picturePath + ""); Mat imgMat = new Mat(); Mat imgMat2 = new Mat(); Mat imgMat3 = new Mat(); Utils.bitmapToMat(thumbnail, imgMat); Imgproc.cvtColor(imgMat, imgMat, Imgproc.COLOR_RGB2GRAY); org.opencv.core.Size s = new Size(3, 3); Imgproc.createCLAHE(); Imgproc.GaussianBlur(imgMat, imgMat, s, 2); Imgproc.erode(imgMat, imgMat2, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2, 2))); Imgproc.dilate(imgMat2, imgMat3, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2, 2))); Imgproc.Sobel(imgMat, imgMat, CvType.CV_8UC1, 1, 0); Core.absdiff(imgMat, imgMat3, imgMat); Imgproc.threshold(imgMat, imgMat, 123, 255, Imgproc.THRESH_OTSU); Utils.matToBitmap(imgMat, thumbnail); viewImage.setImageBitmap(thumbnail); saveBitmaptoSDCard(thumbnail); } } }
From source file:com.example.sarthuak.opencv.MainActivity.java
public Mat onCameraFrame(CvCameraViewFrame inputFrame) { // TODO Auto-generated method stub final int viewMode = mViewMode; switch (viewMode) { case VIEW_MODE_RGBA: // input frame has RBGA format mRgba = inputFrame.rgba();//from ww w . ja v a 2s . com break; case VIEW_MODE_CANNY: // input frame has gray scale format mRgba = inputFrame.rgba(); Imgproc.Canny(inputFrame.gray(), mRgbaF, 80, 100); Imgproc.cvtColor(mRgbaF, mRgba, Imgproc.COLOR_GRAY2RGBA, 4); break; case VIEW_MODE_ocr: startActivity(new Intent(this, ScanLicensePlateActivity.class)); break; case VIEW_MODE_new: Mat mRgba; mRgba = inputFrame.rgba(); drawing = mRgba.clone(); mRgbaT = drawing; Imgproc.cvtColor(drawing, mRgbaT, Imgproc.COLOR_BGR2GRAY); org.opencv.core.Size s = new Size(1, 1); Imgproc.GaussianBlur(mRgbaT, mRgbaT, s, 0, 0); Imgproc.Canny(mRgbaT, mRgbaT, 100, 255); Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(5, 5)); Imgproc.dilate(mRgbaT, mRgbaT, element); List<MatOfPoint> contours = new ArrayList<>(); Imgproc.findContours(drawing, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0)); double maxArea = -1; int maxAreaIdx = -1; for (int idx = 0; idx < contours.size(); idx++) { Mat contour = contours.get(idx); double contourarea = Imgproc.contourArea(contour); if (contourarea > maxArea) { maxArea = contourarea; maxAreaIdx = idx; } } Imgproc.drawContours(mRgba, contours, maxAreaIdx, new Scalar(255, 0, 0), 5); } return mRgba; // This function must return }