List of usage examples for org.opencv.core Core addWeighted
public static void addWeighted(Mat src1, double alpha, Mat src2, double beta, double gamma, Mat dst)
From source file:abc.RomanCharacterPicture.java
public int evaluatePicture() { try {//from w w w .j ava 2s .co m ITesseract instance = new Tesseract(); MatToBufImg webcamImageBuff = new MatToBufImg(); webcamImageBuff.setMatrix(webcam_image, ".jpg"); double heightRatio = (double) webcamImageBuff.getBufferedImage().getHeight() / (double) webcam_image.height(); double widthRatio = (double) webcamImageBuff.getBufferedImage().getWidth() / (double) webcam_image.width(); int x1 = this.leftRectangle.getxPos(); int y1 = this.leftRectangle.getyPos(); int x2 = this.rightRectangle.getxPos(); int y2 = this.rightRectangle.getyPos(); Rect rect = new Rect(leftRectangle.getxPos(), leftRectangle.getyPos(), (rightRectangle.getxPos() - leftRectangle.getxPos()), (rightRectangle.getyPos() - leftRectangle.getyPos())); //Rect rect = new Rect(new Point(leftRectangle.getxPos(), leftRectangle.getyPos()), new Point(leftRectangle.getxPos(), rightRectangle.getyPos()), , (rightRectangle.getxPos()-leftRectangle.getxPos())); Mat subImageMat = webcam_image.submat(rect); BufferedImage romanCharacter = webcamImageBuff.getBufferedImage().getSubimage((int) (x1 * widthRatio), (int) (y1 * heightRatio), (int) (widthRatio * (x2 - x1)), (int) (heightRatio * (y2 - y1))); //int[] pixels = ((DataBufferInt) romanCharacter.getRaster().getDataBuffer()).getData(); //Mat subImageMat = new Mat(romanCharacter.getHeight(), romanCharacter.getWidth(), CvType.CV_8UC3); //subImageMat.put(0, 0, pixels); Mat hsv_image = new Mat(); Imgproc.cvtColor(subImageMat, hsv_image, Imgproc.COLOR_BGR2HSV); Mat lower_black_hue_range = new Mat(); Mat upper_black_hue_range = new Mat(); Core.inRange(hsv_image, new Scalar(0, 0, 0), new Scalar(180, 255, 30), lower_black_hue_range); Core.inRange(hsv_image, new Scalar(0, 0, 20), new Scalar(180, 255, 40), upper_black_hue_range); Mat black_hue_image = new Mat(); Core.addWeighted(lower_black_hue_range, 1.0, upper_black_hue_range, 1.0, 0.0, black_hue_image); Imgproc.GaussianBlur(black_hue_image, black_hue_image, new Size(9, 9), 2, 2); MatToBufImg blackImageBuff = new MatToBufImg(); blackImageBuff.setMatrix(black_hue_image, ".jpg"); BufferedImage test = blackImageBuff.getBufferedImage(); //ImageIO.write(test, "PNG", new FileOutputStream((Math.round(Math.random()*1000))+"dst.png")); String result = instance.doOCR(test); int counterI = 0; for (int i = 0; i < result.length(); i++) { if (result.charAt(i) == 'I' || result.charAt(i) == 'l' || result.charAt(i) == '1' || result.charAt(i) == 'i' || result.charAt(i) == 'L' || result.charAt(i) == 'j' || result.charAt(i) == 'J') { counterI++; } } int counterV = 0; for (int i = 0; i < result.length(); i++) { if (result.charAt(i) == 'V' || result.charAt(i) == 'v' || result.charAt(i) == 'W' || result.charAt(i) == 'w' || result.contains("\\//")) { counterV++; } } //System.out.println("Result: "+result+ " calc:" + (counterI + (counterV * 5))); return (counterI + (counterV * 5)); } catch (Exception ex) { //System.out.println(ex.getMessage()); ex.printStackTrace(); return 0; } }
From source file:classes.BlobsFinder.java
public void findBlobContours() { Mat grayImage = new Mat(); Imgproc.cvtColor(image, grayImage, Imgproc.COLOR_BGR2GRAY); ImageUtils.saveImage(grayImage, outImageName + "_grayImage.png", request); Mat gaussianImage = new Mat(); Imgproc.GaussianBlur(grayImage, gaussianImage, new Size(0, 0), 3); Core.addWeighted(grayImage, 1.5, gaussianImage, -1, 0, gaussianImage); ImageUtils.saveImage(gaussianImage, outImageName + "_gaussianGrayImage.png", request); Mat binaryImage = new Mat(); Imgproc.adaptiveThreshold(gaussianImage, binaryImage, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY_INV, 15, 4); ImageUtils.saveImage(binaryImage, outImageName + "_binaryImage.png", request); Mat erodedImage = new Mat(); binaryImage.copyTo(erodedImage);//from w w w. ja v a 2 s . co m Mat structuringElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 3)); Point anchor = new Point(-1, -1); Imgproc.morphologyEx(erodedImage, erodedImage, Imgproc.MORPH_CLOSE, structuringElement, anchor, 1); ImageUtils.saveImage(erodedImage, outImageName + "_erodedImage.png", request); List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); Imgproc.findContours(erodedImage, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); Mat originalContoursImage = new Mat(image.size(), CvType.CV_8UC1, new Scalar(0)); Scalar contourColor = new Scalar(255); int thickness = -1; // Thicknes should be lower than zero in order to drawn the filled contours Imgproc.drawContours(originalContoursImage, contours, -1, contourColor, thickness); // Drawing all the contours found ImageUtils.saveImage(originalContoursImage, outImageName + "_originalContoursImage.png", request); Mat erodedContoursImage = new Mat(); Imgproc.erode(originalContoursImage, erodedContoursImage, structuringElement, anchor, 1); ImageUtils.saveImage(erodedContoursImage, outImageName + "_erodedContoursImage.png", request); ArrayList<MatOfPoint> finalContours = new ArrayList<MatOfPoint>(); Mat finalContourImage = new Mat(image.size(), CvType.CV_8UC1, new Scalar(0)); Imgproc.findContours(erodedContoursImage, finalContours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); for (int i = 0; i < finalContours.size(); i++) { MatOfPoint currentContour = finalContours.get(i); double area = Imgproc.contourArea(currentContour); if (area > MIN_AREA) { validContours.add(currentContour); String fabricPath = generateFabricPathString(currentContour); contourPaths.add(fabricPath); Rect boundingRect = Imgproc.boundingRect(currentContour); topLeftCorners.add(boundingRect.tl()); contoursAreas.add(area); } } // Drawing ALL the valid contours Imgproc.drawContours(finalContourImage, validContours, -1, contourColor, thickness); ImageUtils.saveImage(finalContourImage, outImageName + "_finalContourImage.png", request); }
From source file:classes.TextRecognitionPreparer.java
public static ArrayList<String> generateRecognizableImagesNames(Mat img, Scalar userPickedColor, String imageID, HttpServletRequest request) {//from w w w . j av a 2 s . c o m ArrayList<String> imageNames = new ArrayList<String>(); Mat filledImage = img.clone(); Scalar newVal = new Scalar(userPickedColor.val[2], userPickedColor.val[1], userPickedColor.val[0]); Imgproc.floodFill(filledImage, new Mat(), new Point(0, 0), newVal); String file1 = imageID + "_filledImage.png"; // Highgui.imwrite(file1, filledImage); imageNames.add(ImageUtils.saveImage(filledImage, file1, request)); Mat filledGrayImage = new Mat(); Imgproc.cvtColor(filledImage, filledGrayImage, Imgproc.COLOR_BGR2GRAY); String file2 = imageID + "_filledGrayImage.png"; // Highgui.imwrite(file2, filledGrayImage); imageNames.add(ImageUtils.saveImage(filledGrayImage, file2, request)); Mat gaussianGrayImage = new Mat(); Imgproc.GaussianBlur(filledGrayImage, gaussianGrayImage, new Size(0, 0), 3); Core.addWeighted(filledGrayImage, 3.5, gaussianGrayImage, -1, 0, gaussianGrayImage); String file3 = imageID + "_sharpenedImage.png"; // Highgui.imwrite(file3, gaussianGrayImage); imageNames.add(ImageUtils.saveImage(gaussianGrayImage, file3, request)); // Mat filledBinarizedImage2 = new Mat(); // Imgproc.adaptiveThreshold(filledGrayImage, filledBinarizedImage2, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 75, 10); // String file5 = imageID + "_filledBinarizedImage2.png"; //// Highgui.imwrite(file11, filledBinarizedImage2); // imageNames.add(ImageUtils.saveImage(filledBinarizedImage2, file5)); // // Mat filledBinarizedImage1 = new Mat(); // Imgproc.adaptiveThreshold(filledGrayImage, filledBinarizedImage1, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 15, 4); // String file4 = imageID + "_filledBinarizedImage1.png"; //// Highgui.imwrite(file4, filledBinarizedImage1); // imageNames.add(ImageUtils.saveImage(filledBinarizedImage1, file4)); return imageNames; }
From source file:classes.TextRecognitionPreparer.java
public static ArrayList<BufferedImage> generateRecognizableBufferedImages(Mat img, Scalar backgroundColor, Scalar userPickedColor) {//from w w w . jav a 2s . c o m ArrayList<BufferedImage> images = new ArrayList<BufferedImage>(); Mat filledImage = img.clone(); Scalar newVal = new Scalar(userPickedColor.val[2], userPickedColor.val[1], userPickedColor.val[0]); Imgproc.floodFill(filledImage, new Mat(), new Point(0, 0), newVal); images.add(Util.mat2Img(filledImage)); Mat filledGrayImage = new Mat(); Imgproc.cvtColor(filledImage, filledGrayImage, Imgproc.COLOR_BGR2GRAY); images.add(Util.mat2Img(filledGrayImage)); Mat gaussianGrayImage = new Mat(); Imgproc.GaussianBlur(filledGrayImage, gaussianGrayImage, new Size(0, 0), 3); Core.addWeighted(filledGrayImage, 3.5, gaussianGrayImage, -1, 0, gaussianGrayImage); images.add(Util.mat2Img(gaussianGrayImage)); Mat filledBinarizedImage2 = new Mat(); Imgproc.adaptiveThreshold(filledGrayImage, filledBinarizedImage2, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 75, 10); images.add(Util.mat2Img(filledBinarizedImage2)); Mat filledBinarizedImage1 = new Mat(); Imgproc.adaptiveThreshold(filledGrayImage, filledBinarizedImage1, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 15, 4); images.add(Util.mat2Img(filledBinarizedImage1)); return images; }
From source file:classes.TextRecognitionPreparer.java
public static ArrayList<Mat> generateRecognizableImages(Mat img, Scalar backgroundColor, Scalar userPickedColor) {/*from w ww . j a v a2 s .c o m*/ ArrayList<Mat> images = new ArrayList<Mat>(); Mat filledImage = img.clone(); Scalar newVal = new Scalar(userPickedColor.val[2], userPickedColor.val[1], userPickedColor.val[0]); Imgproc.floodFill(filledImage, new Mat(), new Point(0, 0), newVal); String file1 = "filledImage.png"; // Highgui.imwrite(file1, filledImage); images.add(filledImage); Mat filledGrayImage = new Mat(); Imgproc.cvtColor(filledImage, filledGrayImage, Imgproc.COLOR_BGR2GRAY); String file2 = "filledGrayImage.png"; // Highgui.imwrite(file2, filledGrayImage); images.add(filledGrayImage); Mat gaussianGrayImage = new Mat(); Imgproc.GaussianBlur(filledGrayImage, gaussianGrayImage, new Size(0, 0), 3); Core.addWeighted(filledGrayImage, 3.5, gaussianGrayImage, -1, 0, gaussianGrayImage); // Core.addWeighted(filledGrayImage, 2.5, gaussianGrayImage, -0.5, 0, gaussianGrayImage); String file3 = "sharpenedImage.png"; // Highgui.imwrite(file3, gaussianGrayImage); images.add(gaussianGrayImage); Mat filledBinarizedImage = new Mat(); Imgproc.adaptiveThreshold(filledGrayImage, filledBinarizedImage, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 15, 4); String file4 = "filledBinarizedImage.png"; // Highgui.imwrite(file4, filledBinarizedImage); images.add(filledBinarizedImage); // BackgroundSubtractorMOG2 backgroundSubtractorMOG2 = new BackgroundSubtractorMOG2(); // Mat foregroundMask = new Mat(); // backgroundSubtractorMOG2.apply(img, foregroundMask); // Highgui.imwrite("mFGMask.png", foregroundMask); Scalar fillingColor = cluster(userPickedColor, img, 3); Mat replacedColor = replaceColor(img, backgroundColor, fillingColor); String file5 = "replacedColor.png"; // Highgui.imwrite(file5, replacedColor); images.add(replacedColor); Mat grayImage = new Mat(); Imgproc.cvtColor(replacedColor, grayImage, Imgproc.COLOR_BGR2GRAY); String file6 = "grayImage.png"; // Highgui.imwrite(file6, grayImage); images.add(grayImage); Mat binarized = new Mat(); Imgproc.adaptiveThreshold(grayImage, binarized, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 15, 4); String file7 = "binarized.png"; // Highgui.imwrite(file7, binarized); images.add(binarized); Mat colorReplacedEqualized = equalizeIntensity(replacedColor); String file8 = "colorReplacedEqualized.png"; // Highgui.imwrite(file8, colorReplacedEqualized); images.add(colorReplacedEqualized); Mat colorReducedImage = reduceColor(replacedColor, 64); String file9 = "replacedColorColorReduced.png"; // Highgui.imwrite(file9, colorReducedImage); images.add(colorReducedImage); // Equalizing image Mat colorReducedEqualized = equalizeIntensity(colorReducedImage); String file10 = "colorReducedEqualized.png"; // Highgui.imwrite(file10, colorReducedEqualized); images.add(colorReducedEqualized); return images; }
From source file:com.mitzuli.core.ocr.OcrPreprocessor.java
License:Open Source License
/** * Binarizes and cleans the input image for OCR, saving debugging images in the given directory. * * @param input the input image, which is recycled by this method, so the caller should make a defensive copy of it if necessary. * @param debugDir the directory to write the debugging images to, or null to disable debugging. * @return the preprocessed image./*from w w w . j ava 2 s. com*/ */ static Image preprocess(final Image input, final File debugDir) { // TODO Temporary workaround to allow to manually enable debugging (the global final variable should be used) boolean DEBUG = debugDir != null; // Initialization final Mat mat = input.toGrayscaleMat(); final Mat debugMat = DEBUG ? input.toRgbMat() : null; input.recycle(); final Mat aux = new Mat(mat.size(), CvType.CV_8UC1); final Mat binary = new Mat(mat.size(), CvType.CV_8UC1); if (DEBUG) Image.fromMat(mat).write(new File(debugDir, "1_input.jpg")); // Binarize the input image in mat through adaptive Gaussian thresholding Imgproc.adaptiveThreshold(mat, binary, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY, 51, 13); // Imgproc.adaptiveThreshold(mat, binary, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY, 31, 7); // Edge detection Imgproc.morphologyEx(mat, mat, Imgproc.MORPH_OPEN, KERNEL_3X3); // Open Imgproc.morphologyEx(mat, aux, Imgproc.MORPH_CLOSE, KERNEL_3X3); // Close Core.addWeighted(mat, 0.5, aux, 0.5, 0, mat); // Average Imgproc.morphologyEx(mat, mat, Imgproc.MORPH_GRADIENT, KERNEL_3X3); // Gradient Imgproc.threshold(mat, mat, 0, 255, Imgproc.THRESH_BINARY | Imgproc.THRESH_OTSU); // Edge map if (DEBUG) Image.fromMat(mat).write(new File(debugDir, "2_edges.jpg")); // Extract word level connected-components from the dilated edge map Imgproc.dilate(mat, mat, KERNEL_3X3); if (DEBUG) Image.fromMat(mat).write(new File(debugDir, "3_dilated_edges.jpg")); final List<MatOfPoint> wordCCs = new ArrayList<MatOfPoint>(); Imgproc.findContours(mat, wordCCs, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); // Filter word level connected-components individually and calculate their average attributes final List<MatOfPoint> individuallyFilteredWordCCs = new ArrayList<MatOfPoint>(); final List<MatOfPoint> removedWordCCs = new ArrayList<MatOfPoint>(); double avgWidth = 0, avgHeight = 0, avgArea = 0; for (MatOfPoint cc : wordCCs) { final Rect boundingBox = Imgproc.boundingRect(cc); if (boundingBox.height >= 6 // bounding box height >= 6 && boundingBox.area() >= 50 // bounding box area >= 50 && (double) boundingBox.width / (double) boundingBox.height >= 0.25 // bounding box aspect ratio >= 1:4 && boundingBox.width <= 0.75 * mat.width() // bounding box width <= 0.75 image width && boundingBox.height <= 0.75 * mat.height()) // bounding box height <= 0.75 image height { individuallyFilteredWordCCs.add(cc); avgWidth += boundingBox.width; avgHeight += boundingBox.height; avgArea += boundingBox.area(); } else { if (DEBUG) removedWordCCs.add(cc); } } wordCCs.clear(); avgWidth /= individuallyFilteredWordCCs.size(); avgHeight /= individuallyFilteredWordCCs.size(); avgArea /= individuallyFilteredWordCCs.size(); if (DEBUG) { Imgproc.drawContours(debugMat, removedWordCCs, -1, BLUE, -1); removedWordCCs.clear(); } // Filter word level connected-components in relation to their average attributes final List<MatOfPoint> filteredWordCCs = new ArrayList<MatOfPoint>(); for (MatOfPoint cc : individuallyFilteredWordCCs) { final Rect boundingBox = Imgproc.boundingRect(cc); if (boundingBox.width >= 0.125 * avgWidth // bounding box width >= 0.125 average width && boundingBox.width <= 8 * avgWidth // bounding box width <= 8 average width && boundingBox.height >= 0.25 * avgHeight // bounding box height >= 0.25 average height && boundingBox.height <= 4 * avgHeight) // bounding box height <= 4 average height { filteredWordCCs.add(cc); } else { if (DEBUG) removedWordCCs.add(cc); } } individuallyFilteredWordCCs.clear(); if (DEBUG) { Imgproc.drawContours(debugMat, filteredWordCCs, -1, GREEN, -1); Imgproc.drawContours(debugMat, removedWordCCs, -1, PURPLE, -1); removedWordCCs.clear(); } // Extract paragraph level connected-components mat.setTo(BLACK); Imgproc.drawContours(mat, filteredWordCCs, -1, WHITE, -1); final List<MatOfPoint> paragraphCCs = new ArrayList<MatOfPoint>(); Imgproc.morphologyEx(mat, aux, Imgproc.MORPH_CLOSE, KERNEL_30X30); Imgproc.findContours(aux, paragraphCCs, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); // Filter paragraph level connected-components according to the word level connected-components inside final List<MatOfPoint> textCCs = new ArrayList<MatOfPoint>(); for (MatOfPoint paragraphCC : paragraphCCs) { final List<MatOfPoint> wordCCsInParagraphCC = new ArrayList<MatOfPoint>(); aux.setTo(BLACK); Imgproc.drawContours(aux, Collections.singletonList(paragraphCC), -1, WHITE, -1); Core.bitwise_and(mat, aux, aux); Imgproc.findContours(aux, wordCCsInParagraphCC, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); final Rect boundingBox = Imgproc.boundingRect(paragraphCC); final double center = mat.size().width / 2; final double distToCenter = center > boundingBox.x + boundingBox.width ? center - boundingBox.x - boundingBox.width : center < boundingBox.x ? boundingBox.x - center : 0.0; if (DEBUG) { System.err.println("****************************************"); System.err.println("\tArea: " + boundingBox.area()); System.err.println("\tDistance to center: " + distToCenter); System.err.println("\tCCs inside: " + wordCCsInParagraphCC.size()); } if ((wordCCsInParagraphCC.size() >= 10 || wordCCsInParagraphCC.size() >= 0.3 * filteredWordCCs.size()) && mat.size().width / distToCenter >= 4) { textCCs.addAll(wordCCsInParagraphCC); if (DEBUG) { System.err.println("\tText: YES"); Imgproc.drawContours(debugMat, Collections.singletonList(paragraphCC), -1, DARK_GREEN, 5); } } else { if (DEBUG) { System.err.println("\tText: NO"); Imgproc.drawContours(debugMat, Collections.singletonList(paragraphCC), -1, DARK_RED, 5); } } } filteredWordCCs.clear(); paragraphCCs.clear(); mat.setTo(WHITE); Imgproc.drawContours(mat, textCCs, -1, BLACK, -1); textCCs.clear(); if (DEBUG) Image.fromMat(debugMat).write(new File(debugDir, "4_filtering.jpg")); // Obtain the final text mask from the filtered connected-components Imgproc.erode(mat, mat, KERNEL_15X15); Imgproc.morphologyEx(mat, mat, Imgproc.MORPH_OPEN, KERNEL_30X30); if (DEBUG) Image.fromMat(mat).write(new File(debugDir, "5_text_mask.jpg")); // Apply the text mask to the binarized image if (DEBUG) Image.fromMat(binary).write(new File(debugDir, "6_binary.jpg")); binary.setTo(WHITE, mat); if (DEBUG) Image.fromMat(binary).write(new File(debugDir, "7_binary_text.jpg")); // Dewarp the text using Leptonica Pix pixs = Image.fromMat(binary).toGrayscalePix(); Pix pixsDewarp = Dewarp.dewarp(pixs, 0, Dewarp.DEFAULT_SAMPLING, 5, true); final Image result = Image.fromGrayscalePix(pixsDewarp); if (DEBUG) result.write(new File(debugDir, "8_dewarp.jpg")); // Clean up pixs.recycle(); mat.release(); aux.release(); binary.release(); if (debugMat != null) debugMat.release(); return result; }
From source file:com.trandi.opentld.tld.PatchGenerator.java
License:Apache License
/** * /*from w ww.ja v a 2 s . co m*/ * @param image * @param T * @param patch OUTPUT * @param patchSize */ void generate(final Mat image, final Mat T, Mat patch, Size patchSize, final RNG rng) { patch.create(patchSize, image.type()); if (backgroundMin != backgroundMax) { Core.randu(patch, backgroundMin, backgroundMax); // TODO if that null scalar OK or should it be new Scalar(0) ? Imgproc.warpAffine(image, patch, T, patchSize, Imgproc.INTER_LINEAR, Core.BORDER_TRANSPARENT, null); } else { Imgproc.warpAffine(image, patch, T, patchSize, Imgproc.INTER_LINEAR, Core.BORDER_CONSTANT, new Scalar(backgroundMin)); } int ksize = randomBlur ? rng.nextInt() % 9 - 5 : 0; if (ksize > 0) { ksize = ksize * 2 + 1; Imgproc.GaussianBlur(patch, patch, new Size(ksize, ksize), 0, 0); } if (noiseRange > 0) { final Mat noise = new Mat(patchSize, image.type()); int delta = (image.depth() == CvType.CV_8U ? 128 : (image.depth() == CvType.CV_16U ? 32768 : 0)); Core.randn(noise, delta, noiseRange); // TODO this was different !! Core.addWeighted(patch, 1, noise, 1, -delta, patch); // if( backgroundMin != backgroundMax ) // addWeighted(patch, 1, noise, 1, -delta, patch); // else // { // for( int i = 0; i < patchSize.height; i++ ) // { // uchar* prow = patch.ptr<uchar>(i); // const uchar* nrow = noise.ptr<uchar>(i); // for( int j = 0; j < patchSize.width; j++ ) // if( prow[j] != backgroundMin ) // prow[j] = saturate_cast<uchar>(prow[j] + nrow[j] - delta); // } // } } }
From source file:cpsd.ImageGUI.java
private void enhanceSharpness(double alpha) { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); // System.load("/usr/local/share/OpenCV/java/libopencv_java249.so"); Mat source = ImageClass.getInstance().getImage(); Mat destination = new Mat(source.rows(), source.cols(), source.type()); /// GaussianBlur(Mat src, Mat dst, Size ksize, double sigmaX); Imgproc.GaussianBlur(source, destination, new org.opencv.core.Size(0, 0), 10); ///addWeighted(Mat src1, double alpha, Mat src2, double beta, double gamma, Mat dst); Core.addWeighted(source, alpha, destination, -0.5, 0, destination); ImageClass.getInstance().setImage(destination); sharpSlider.setValue((int) (alpha * 10)); }
From source file:dfmDrone.examples.fitEllipseExample.java
private static Mat findAndDrawEllipse(Mat sourceImg) { Mat grayScaleImg = new Mat(); Mat hsvImg = new Mat(); Imgproc.cvtColor(sourceImg, hsvImg, Imgproc.COLOR_BGR2HSV); Mat lower_hue_range = new Mat(); Mat upper_hue_range = new Mat(); Core.inRange(hsvImg, new Scalar(0, 100, 45), new Scalar(15, 255, 255), lower_hue_range); Core.inRange(hsvImg, new Scalar(160, 100, 45), new Scalar(180, 255, 255), upper_hue_range); Mat red_hue_image = new Mat(); Core.addWeighted(lower_hue_range, 1.0, upper_hue_range, 1.0, 0, red_hue_image); Mat dilateElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(24, 24)); Mat erodeElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(10, 10)); Imgproc.blur(red_hue_image, red_hue_image, new Size(11, 11)); // init//ww w . j a va 2s .c o m List<MatOfPoint> contours = new ArrayList<>(); Mat hierarchy = new Mat(); // find contours Imgproc.findContours(red_hue_image, contours, hierarchy, Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_SIMPLE); System.out.println("After findcontours"); // if any contour exist... if (hierarchy.size().height > 0 && hierarchy.size().width > 0) { // for each contour, display it in blue for (int idx = 0; idx >= 0; idx = (int) hierarchy.get(0, idx)[0]) { System.out.println(idx); // Imgproc.drawContours(frame, contours, idx, new Scalar(250, 0, 0), 3); } } MatOfPoint2f approxCurve = new MatOfPoint2f(); //For each contour found MatOfPoint2f contour2f = null; RotatedRect rotatedrect = null; for (MatOfPoint contour : contours) { //Convert contours(i) from MatOfPoint to MatOfPoint2f if (contour2f == null) contour2f = new MatOfPoint2f(contour.toArray()); if (contour.size().area() > contour2f.size().area()) { contour2f = new MatOfPoint2f(contour.toArray()); } } try { Imgproc.fitEllipse(contour2f); rotatedrect = Imgproc.fitEllipse(contour2f); double approxDistance = Imgproc.arcLength(contour2f, true) * 0.02; Imgproc.approxPolyDP(contour2f, approxCurve, approxDistance, true); //Convert back to MatOfPoint MatOfPoint points = new MatOfPoint(approxCurve.toArray()); // Get bounding rect of contour Rect rect = Imgproc.boundingRect(points); // draw enclosing rectangle (all same color, but you could use variable i to make them unique) Imgproc.rectangle(sourceImg, rect.tl(), rect.br(), new Scalar(255, 0, 0), 1, 8, 0); Imgproc.ellipse(sourceImg, rotatedrect, new Scalar(255, 192, 203), 4, 8); } catch (CvException e) { e.printStackTrace(); System.out.println("Ingen ellipse fundet"); } return sourceImg; }
From source file:javaapplication1.Ocv.java
public void blendWithGray50(String input, String output) { // load the image and read it into a matrix File f2 = new File(input); Mat image = Highgui.imread(this.input); // clone the image, and convert it to grayscale Mat gray = image.clone();// w w w.j a v a2 s . c o m Imgproc.cvtColor(gray, gray, Imgproc.COLOR_BGR2GRAY, 1); Imgproc.cvtColor(gray, gray, Imgproc.COLOR_GRAY2BGR, 3); // blend the two images (equal weight) into a new matrix and save it Mat dst = new Mat(); Core.addWeighted(image, .5f, gray, .5f, 0.0, dst); Highgui.imwrite(this.output, dst); }