List of usage examples for org.opencv.core CvType CV_8U
int CV_8U
To view the source code for org.opencv.core CvType CV_8U.
Click Source Link
From source file:ImageReade.java
public static void detectLetter(Mat img) { ArrayList<Rect> boundRect = new ArrayList<>(); Mat img_gray, img_sobel, img_threshold, element; img_gray = new Mat(); img_sobel = new Mat(); img_threshold = new Mat(); element = new Mat(); Imgproc.cvtColor(img, img_gray, Imgproc.COLOR_BGRA2GRAY); imshow("Rec img_gray", img_gray); Imgproc.Sobel(img_gray, img_sobel, CvType.CV_8U, 1, 0, 3, 1, 0, Imgproc.BORDER_DEFAULT); imshow("Rec img_sobel", img_sobel); Imgproc.threshold(img_sobel, img_threshold, 0, 255, CV_THRESH_OTSU + CV_THRESH_BINARY); imshow("Rec img_threshold", img_threshold); element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(16, 6)); Imgproc.morphologyEx(img_threshold, img_threshold, CV_MOP_CLOSE, element); imshow("Rec img_threshold second", img_threshold); List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); //Imgproc.findContours(img_threshold, contours, new Mat(), Imgproc.RETR_LIST,Imgproc.CHAIN_APPROX_SIMPLE); Imgproc.findContours(img_threshold, contours, new Mat(), 0, 1); for (int i = 0; i < contours.size(); i++) { System.out.println(Imgproc.contourArea(contours.get(i))); // if (Imgproc.contourArea(contours.get(i)) > 100) { // //Imgproc.approxPolyDP( contours.get(i), contours_poly[i], 3, true ); // Rect rect = Imgproc.boundingRect(contours.get(i)); // System.out.println(rect.height); // if (rect.width > rect.height) { // //System.out.println(rect.x +","+rect.y+","+rect.height+","+rect.width); // Core.rectangle(img, new Point(rect.x,rect.y), new Point(rect.x+rect.width,rect.y+rect.height),new Scalar(0,0,255)); // } // // // } if (Imgproc.contourArea(contours.get(i)) > 100) { MatOfPoint2f mMOP2f1 = new MatOfPoint2f(); MatOfPoint2f mMOP2f2 = new MatOfPoint2f(); contours.get(i).convertTo(mMOP2f1, CvType.CV_32FC2); Imgproc.approxPolyDP(mMOP2f1, mMOP2f2, 3, true); mMOP2f2.convertTo(contours.get(i), CvType.CV_32S); Rect rect = Imgproc.boundingRect(contours.get(i)); if (rect.width > rect.height) { Core.rectangle(img, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 0, 255)); }/*from w ww . j a v a2s .c o m*/ } } imshow("Rec Detected", img); }
From source file:angryhexclient.OurVision.java
License:Open Source License
/** * Detects the ground in the image.// w w w. j a va 2 s.co m * @return A list of blocks representing the ground. */ public List<Block> detectGround() { Mat binaryImage = new Mat(new Size(_nWidth, _nHeight), CvType.CV_8U, new Scalar(1)); // We only detect right of this margin. The slingshot has some ground // colors and would partly be detected as ground. This is not what we // want. Trajectories originate at the slingshot, and if there is ground // detected at the slingshot, the agent will think, that none of its // trajectories are valid. Therefore we start with detecting due right // of the slingshot. int startAtX = findSlingshot().x + findSlingshot().width * 2; // Now we create a binary image of the ground areas. White where there // is ground, black otherwise. for (int y = 0; y < _nHeight; y++) { for (int x = 0; x < _nWidth; x++) { if (x > startAtX && isGround(x, y)) binaryImage.put(y, x, 255); else binaryImage.put(y, x, 0); } } Mat smoothedImage = new Mat(new Size(_nWidth, _nHeight), CvType.CV_8U, new Scalar(1)); // This median filter improves the detection tremendously. There are a // whole lot of single pixels that carry ground colors spread all over // the image. We remove them here. Imgproc.medianBlur(binaryImage, smoothedImage, 7); List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); // We use OpenCV to find the contours. Contours are lines, that // represent the boundaries of the objects in the binary image. Imgproc.findContours(smoothedImage, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); ArrayList<Block> result = new ArrayList<Block>(); //Now for every contour, we convert it to blocks for communicating them to DLV. for (MatOfPoint mp : contours) { org.opencv.core.Point[] pts = mp.toArray(); for (int i = 0; i < pts.length - 1; i++) { Block b = new Block((int) pts[i].x, (int) pts[i].y); b.add((int) pts[i + 1].x, (int) pts[i + 1].y); result.add(b); } //One block for the first vertex to the last vertex. Block b = new Block((int) pts[pts.length - 1].x, (int) pts[pts.length - 1].y); b.add((int) pts[0].x, (int) pts[0].y); result.add(b); } return result; }
From source file:ch.zhaw.facerecognitionlibrary.Helpers.MatXml.java
License:Open Source License
public Mat readMat(String tag) { if (isWrite) { System.err.println("Try read from file with write flags"); return null; }/* www. j a va 2s .c o m*/ NodeList nodelist = doc.getElementsByTagName(tag); Mat readMat = null; for (int i = 0; i < nodelist.getLength(); i++) { Node node = nodelist.item(i); if (node.getNodeType() == Node.ELEMENT_NODE) { Element element = (Element) node; String type_id = element.getAttribute("type_id"); if ("opencv-matrix".equals(type_id) == false) { System.out.println("Fault type_id "); } String rowsStr = element.getElementsByTagName("rows").item(0).getTextContent(); String colsStr = element.getElementsByTagName("cols").item(0).getTextContent(); String dtStr = element.getElementsByTagName("dt").item(0).getTextContent(); String dataStr = element.getElementsByTagName("data").item(0).getTextContent(); int rows = Integer.parseInt(rowsStr); int cols = Integer.parseInt(colsStr); int type = CvType.CV_8U; Scanner s = new Scanner(dataStr); if ("f".equals(dtStr)) { type = CvType.CV_32F; readMat = new Mat(rows, cols, type); float fs[] = new float[1]; for (int r = 0; r < rows; r++) { for (int c = 0; c < cols; c++) { if (s.hasNextFloat()) { fs[0] = s.nextFloat(); } else { fs[0] = 0; System.err.println("Unmatched number of float value at rows=" + r + " cols=" + c); } readMat.put(r, c, fs); } } } else if ("i".equals(dtStr)) { type = CvType.CV_32S; readMat = new Mat(rows, cols, type); int is[] = new int[1]; for (int r = 0; r < rows; r++) { for (int c = 0; c < cols; c++) { if (s.hasNextInt()) { is[0] = s.nextInt(); } else { is[0] = 0; System.err.println("Unmatched number of int value at rows=" + r + " cols=" + c); } readMat.put(r, c, is); } } } else if ("s".equals(dtStr)) { type = CvType.CV_16S; readMat = new Mat(rows, cols, type); short ss[] = new short[1]; for (int r = 0; r < rows; r++) { for (int c = 0; c < cols; c++) { if (s.hasNextShort()) { ss[0] = s.nextShort(); } else { ss[0] = 0; System.err.println("Unmatched number of int value at rows=" + r + " cols=" + c); } readMat.put(r, c, ss); } } } else if ("b".equals(dtStr)) { readMat = new Mat(rows, cols, type); byte bs[] = new byte[1]; for (int r = 0; r < rows; r++) { for (int c = 0; c < cols; c++) { if (s.hasNextByte()) { bs[0] = s.nextByte(); } else { bs[0] = 0; System.err.println("Unmatched number of byte value at rows=" + r + " cols=" + c); } readMat.put(r, c, bs); } } } } } return readMat; }
From source file:ch.zhaw.facerecognitionlibrary.Helpers.MatXml.java
License:Open Source License
public void writeMat(String tag, Mat mat) { try {// w w w .j a v a 2 s . co m if (isWrite == false) { System.err.println("Try write to file with no write flags"); return; } Element matrix = doc.createElement(tag); matrix.setAttribute("type_id", "opencv-matrix"); rootElement.appendChild(matrix); Element rows = doc.createElement("rows"); rows.appendChild(doc.createTextNode(String.valueOf(mat.rows()))); Element cols = doc.createElement("cols"); cols.appendChild(doc.createTextNode(String.valueOf(mat.cols()))); Element dt = doc.createElement("dt"); String dtStr; int type = mat.type(); if (type == CvType.CV_32F) { // type == CvType.CV_32FC1 dtStr = "f"; } else if (type == CvType.CV_32S) { // type == CvType.CV_32SC1 dtStr = "i"; } else if (type == CvType.CV_16S) { // type == CvType.CV_16SC1 dtStr = "s"; } else if (type == CvType.CV_8U) { // type == CvType.CV_8UC1 dtStr = "b"; } else { dtStr = "unknown"; } dt.appendChild(doc.createTextNode(dtStr)); Element data = doc.createElement("data"); String dataStr = dataStringBuilder(mat); data.appendChild(doc.createTextNode(dataStr)); // append all to matrix matrix.appendChild(rows); matrix.appendChild(cols); matrix.appendChild(dt); matrix.appendChild(data); } catch (Exception e) { e.printStackTrace(); } }
From source file:ch.zhaw.facerecognitionlibrary.Helpers.MatXml.java
License:Open Source License
private String dataStringBuilder(Mat mat) { StringBuilder sb = new StringBuilder(); int rows = mat.rows(); int cols = mat.cols(); int type = mat.type(); if (type == CvType.CV_32F) { float fs[] = new float[1]; for (int r = 0; r < rows; r++) { for (int c = 0; c < cols; c++) { mat.get(r, c, fs);// w w w . jav a 2 s . c om sb.append(String.valueOf(fs[0])); sb.append(' '); } sb.append('\n'); } } else if (type == CvType.CV_32S) { int is[] = new int[1]; for (int r = 0; r < rows; r++) { for (int c = 0; c < cols; c++) { mat.get(r, c, is); sb.append(String.valueOf(is[0])); sb.append(' '); } sb.append('\n'); } } else if (type == CvType.CV_16S) { short ss[] = new short[1]; for (int r = 0; r < rows; r++) { for (int c = 0; c < cols; c++) { mat.get(r, c, ss); sb.append(String.valueOf(ss[0])); sb.append(' '); } sb.append('\n'); } } else if (type == CvType.CV_8U) { byte bs[] = new byte[1]; for (int r = 0; r < rows; r++) { for (int c = 0; c < cols; c++) { mat.get(r, c, bs); sb.append(String.valueOf(bs[0])); sb.append(' '); } sb.append('\n'); } } else { sb.append("unknown type\n"); } return sb.toString(); }
From source file:ch.zhaw.facerecognitionlibrary.PreProcessor.BrightnessCorrection.GammaCorrection.java
License:Open Source License
public PreProcessor preprocessImage(PreProcessor preProcessor) { List<Mat> images = preProcessor.getImages(); List<Mat> processed = new ArrayList<Mat>(); for (Mat img : images) { img.convertTo(img, CvType.CV_32F); Core.divide(img, INT_MAX, img);/* w w w . j a v a 2 s .c o m*/ Core.pow(img, gamma, img); Core.multiply(img, INT_MAX, img); img.convertTo(img, CvType.CV_8U); processed.add(img); } preProcessor.setImages(processed); return preProcessor; }
From source file:ch.zhaw.facerecognitionlibrary.PreProcessor.ContrastAdjustment.HistogrammEqualization.java
License:Open Source License
public PreProcessor preprocessImage(PreProcessor preProcessor) { List<Mat> images = preProcessor.getImages(); List<Mat> processed = new ArrayList<Mat>(); for (Mat img : images) { img.convertTo(img, CvType.CV_8U); Imgproc.equalizeHist(img, img);/*w ww.ja v a 2 s.co m*/ processed.add(img); } preProcessor.setImages(processed); return preProcessor; }
From source file:ch.zhaw.facerecognitionlibrary.Recognition.Eigenfaces.java
License:Open Source License
public String recognize(Mat img, String expectedLabel) { // Ignore// www . ja v a 2 s .c o m img = img.reshape(1, 1); // Subtract mean img.convertTo(img, CvType.CV_32F); Core.subtract(img, Psi, img); // Project to subspace Mat projected = getFeatureVector(img); // Save all points of image for tSNE img.convertTo(img, CvType.CV_8U); addImage(projected, expectedLabel, true); //addImage(projected, expectedLabel); Mat distance = new Mat(Omega.rows(), 1, CvType.CV_64FC1); for (int i = 0; i < Omega.rows(); i++) { double dist = Core.norm(projected.row(0), Omega.row(i), Core.NORM_L2); distance.put(i, 0, dist); } Mat sortedDist = new Mat(Omega.rows(), 1, CvType.CV_8UC1); Core.sortIdx(distance, sortedDist, Core.SORT_EVERY_COLUMN + Core.SORT_ASCENDING); // Give back the name of the found person int index = (int) (sortedDist.get(0, 0)[0]); return labelMap.getKey(labelList.get(index)); }
From source file:classes.FloodFiller.java
private void fillFrom(Point seed, int lo, int up, Scalar backgroundColor, Scalar contourFillingColor) { Mat object = ObjectGenerator.extract(image, seed.x, seed.y, 10, 10); this.meanColor = Core.mean(object); Rect ccomp = new Rect(); Mat mask = Mat.zeros(image.rows() + 2, image.cols() + 2, CvType.CV_8UC1); int connectivity = 4; int newMaskVal = 255; int ffillMode = 1; int flags = connectivity + (newMaskVal << 8) + (ffillMode == 1 ? Imgproc.FLOODFILL_FIXED_RANGE : 0); Scalar newVal = new Scalar(0.299, 0.587, 0.114); Imgproc.threshold(mask, mask, 1, 128, Imgproc.THRESH_BINARY); filledArea = Imgproc.floodFill(image.clone(), mask, seed, newVal, ccomp, new Scalar(lo, lo, lo), new Scalar(up, up, up), flags); // Highgui.imwrite("mask.png", mask); ImageUtils.saveImage(mask, "mask.png", request); morphologicalImage = new Mat(image.size(), CvType.CV_8UC3); Mat element = new Mat(3, 3, CvType.CV_8U, new Scalar(1)); ArrayList<Mat> mask3 = new ArrayList<Mat>(); mask3.add(mask);/*from w w w.jav a2 s .com*/ mask3.add(mask); mask3.add(mask); Core.merge(mask3, mask); // Applying morphological filters Imgproc.erode(mask, morphologicalImage, element); Imgproc.morphologyEx(morphologicalImage, morphologicalImage, Imgproc.MORPH_CLOSE, element, new Point(-1, -1), 9); Imgproc.morphologyEx(morphologicalImage, morphologicalImage, Imgproc.MORPH_OPEN, element, new Point(-1, -1), 2); Imgproc.resize(morphologicalImage, morphologicalImage, image.size()); // Highgui.imwrite("morphologicalImage.png", morphologicalImage); ImageUtils.saveImage(morphologicalImage, "morphologicalImage.png", request); List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); Core.split(mask, mask3); Mat binarymorphologicalImage = mask3.get(0); Imgproc.findContours(binarymorphologicalImage.clone(), contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_NONE); contoursImage = new Mat(image.size(), CvType.CV_8UC3, backgroundColor); int thickness = -1; // Thicknes should be lower than zero in order to drawn the filled contours Imgproc.drawContours(contoursImage, contours, -1, contourFillingColor, thickness); // Drawing all the contours found // Highgui.imwrite("allContoursImage.png", contoursImage); ImageUtils.saveImage(contoursImage, "allContoursImage.png", request); if (contours.size() > 1) { int minContourWith = 20; int minContourHeight = 20; int maxContourWith = 6400 / 2; int maxContourHeight = 4800 / 2; contours = filterContours(contours, minContourWith, minContourHeight, maxContourWith, maxContourHeight); } if (contours.size() > 0) { MatOfPoint biggestContour = contours.get(0); // getting the biggest contour contourArea = Imgproc.contourArea(biggestContour); if (contours.size() > 1) { biggestContour = Collections.max(contours, new ContourComparator()); // getting the biggest contour in case there are more than one } Point[] points = biggestContour.toArray(); path = "M " + (int) points[0].x + " " + (int) points[0].y + " "; for (int i = 1; i < points.length; ++i) { Point v = points[i]; path += "L " + (int) v.x + " " + (int) v.y + " "; } path += "Z"; biggestContourImage = new Mat(image.size(), CvType.CV_8UC3, backgroundColor); Imgproc.drawContours(biggestContourImage, contours, 0, contourFillingColor, thickness); // Highgui.imwrite("biggestContourImage.png", biggestContourImage); ImageUtils.saveImage(biggestContourImage, "biggestContourImage.png", request); Mat maskForColorExtraction = biggestContourImage.clone(); if (isWhite(backgroundColor)) { Imgproc.dilate(maskForColorExtraction, maskForColorExtraction, new Mat(), new Point(-1, -1), 3); } else { Imgproc.erode(maskForColorExtraction, maskForColorExtraction, new Mat(), new Point(-1, -1), 3); } // Highgui.imwrite("maskForColorExtraction.png", maskForColorExtraction); ImageUtils.saveImage(maskForColorExtraction, "maskForColorExtraction.png", request); Mat extractedColor = new Mat(); if (isBlack(backgroundColor) && isWhite(contourFillingColor)) { Core.bitwise_and(maskForColorExtraction, image, extractedColor); } else { Core.bitwise_or(maskForColorExtraction, image, extractedColor); } // Highgui.imwrite("extractedColor.png", extractedColor); ImageUtils.saveImage(extractedColor, "extractedColor.png", request); computedSearchWindow = Imgproc.boundingRect(biggestContour); topLeftCorner = computedSearchWindow.tl(); Rect croppingRect = new Rect(computedSearchWindow.x, computedSearchWindow.y, computedSearchWindow.width - 1, computedSearchWindow.height - 1); Mat imageForTextRecognition = new Mat(extractedColor.clone(), croppingRect); // Highgui.imwrite(outImageName, imageForTextRecognition); ImageUtils.saveImage(imageForTextRecognition, outImageName, request); // // // Mat data = new Mat(imageForTextRecognition.size(), CvType.CV_8UC3, backgroundColor); // imageForTextRecognition.copyTo(data); // data.convertTo(data, CvType.CV_8UC3); // // // The meanColor variable represents the color in the GBR space, the following line transforms this to the RGB color space, which // // is assumed in the prepareImage method of the TextRecognitionPreparer class // Scalar userColor = new Scalar(meanColor.val[2], meanColor.val[1], meanColor.val[0]); // // ArrayList<String> recognizableImageNames = TextRecognitionPreparer.generateRecognizableImagesNames(data, backgroundColor, userColor); // for (String imageName : recognizableImageNames) { // // try { // // First recognition step // String recognizedText = TextRecognizer.recognize(imageName, true).trim(); // if (recognizedText != null && !recognizedText.isEmpty()) { // recognizedStrings.add(recognizedText); // } // // Second recognition step // recognizedText = TextRecognizer.recognize(imageName, false).trim(); // if (recognizedText != null && !recognizedText.isEmpty()) { // recognizedStrings.add(recognizedText); // } // // } catch (Exception e) { // } // } // //// ArrayList<BufferedImage> recognizableBufferedImages = TextRecognitionPreparer.generateRecognizableBufferedImages(data, backgroundColor, userColor); //// for (BufferedImage bufferedImage : recognizableBufferedImages) { //// try { //// // First recognition step //// String recognizedText = TextRecognizer.recognize(bufferedImage, true).trim(); //// if (recognizedText != null && !recognizedText.isEmpty()) { //// recognizedStrings.add(recognizedText); //// } //// // Second recognition step //// recognizedText = TextRecognizer.recognize(bufferedImage, false).trim(); //// if (recognizedText != null && !recognizedText.isEmpty()) { //// recognizedStrings.add(recognizedText); //// } //// //// } catch (Exception e) { //// } //// } // // // // compute all moments Moments mom = Imgproc.moments(biggestContour); massCenter = new Point(mom.get_m10() / mom.get_m00(), mom.get_m01() / mom.get_m00()); // draw black dot Core.circle(contoursImage, massCenter, 4, contourFillingColor, 8); } }
From source file:classes.ObjectFinder.java
private void applyMorphologicalFilters() { Mat element = new Mat(3, 3, CvType.CV_8U, new Scalar(1)); Imgproc.erode(thresholdedBackprojection, morphologicalImage, element); Imgproc.morphologyEx(morphologicalImage, morphologicalImage, Imgproc.MORPH_CLOSE, element, new Point(-1, -1), 2); Imgproc.morphologyEx(morphologicalImage, morphologicalImage, Imgproc.MORPH_OPEN, element, new Point(-1, -1), 2);//from w w w.j av a 2 s. c om }