List of usage examples for org.opencv.core Core minMaxLoc
public static MinMaxLocResult minMaxLoc(Mat src)
From source file:imageanalysis.Analyzer.java
public boolean compareRoiAgainstPattern(int xcoord, int ycoord, int width, int height) { screen = ImgTools.getImageFromClipboard(); // Crops roi around chosen mouse point Rect roi = new Rect(xcoord - width / 2, ycoord - height / 2, width, height); Mat actionButton = screen.submat(roi); // Preprocessing Imgproc.cvtColor(actionButton, actionButton, Imgproc.COLOR_BGR2GRAY); // Imgproc.medianBlur(actionButton, actionButton, 5); // Referent pattern Mat bonefoodPattern = Patterns.getBonefoodPattern(); // Imgproc.medianBlur(bonefoodPattern, bonefoodPattern, 5); // Match template // ... result should be the refPoint Mat result = new Mat(); Imgproc.matchTemplate(actionButton, bonefoodPattern, result, Imgproc.TM_SQDIFF); Point p = Core.minMaxLoc(result).minLoc; // System.out.println(p.toString()); return p.equals(refPoint); }
From source file:imageanalysis.Analyzer.java
public Point checkForX() { screen = ImgTools.getImageFromClipboard(); Mat image = screen.clone();//from w ww .ja v a 2 s. c om // Preprocessing Imgproc.cvtColor(image, image, Imgproc.COLOR_BGR2GRAY); // Referent pattern Mat xPattern = Patterns.getXbuttPattern(); // Match template // ... result should be the refPoint Mat result = new Mat(); Imgproc.matchTemplate(image, xPattern, result, Imgproc.TM_SQDIFF); Core.MinMaxLocResult mm = Core.minMaxLoc(result); Point p = mm.minLoc; double val = mm.minVal; if (val < 1000000) { p.x += 10; p.y += 10; return p; } else { return null; } }
From source file:imageprocess.HistogramProcessor.java
public static Mat getHistogramImage(Mat image) { // Compute histogram first Mat hist = getGrayHistogram(image);/*from w ww .j a va 2 s . c om*/ // Get min and max bin values MinMaxLocResult locPeak = Core.minMaxLoc(hist); double maxVal = locPeak.maxVal; double minVal = locPeak.minVal; // Image on which to display histogram Mat histImg = new Mat(image.rows(), image.rows(), CV_8U, new Scalar(255)); // set highest point at 90% of nbins int hpt = (int) (0.9 * 256); // Draw vertical line for each bin for (int h = 0; h < 256; h++) { double[] f = hist.get(h, 0); float binVal = (float) f[0]; int intensity = (int) (binVal * hpt / maxVal); Core.line(histImg, new Point(h, 256.0d), new Point(h, 256.0d - intensity), Scalar.all(0)); } return histImg; }
From source file:io.appium.java_client.ScreenshotState.java
License:Apache License
/** * Compares two valid java bitmaps and calculates similarity score between them. * * @param refImage reference image/* w w w. j a v a2s . c o m*/ * @param tplImage template * @param resizeMode one of possible enum values. Set it either to <em>TEMPLATE_TO_REFERENCE_RESOLUTION</em> or * <em>REFERENCE_TO_TEMPLATE_RESOLUTION</em> if given bitmaps have different dimensions * @return similarity score value in range (-1.0, 1.0). 1.0 is returned if the images are equal * @throws ScreenshotComparisonError if provided images are not valid or have * different resolution, but resizeMode has been set to <em>NO_RESIZE</em> */ public static double getOverlapScore(BufferedImage refImage, BufferedImage tplImage, ResizeMode resizeMode) { Mat ref = prepareImageForComparison(refImage); if (ref.empty()) { throw new ScreenshotComparisonError("Reference image cannot be converted for further comparison"); } Mat tpl = prepareImageForComparison(tplImage); if (tpl.empty()) { throw new ScreenshotComparisonError("Template image cannot be converted for further comparison"); } switch (resizeMode) { case TEMPLATE_TO_REFERENCE_RESOLUTION: tpl = resizeFirstMatrixToSecondMatrixResolution(tpl, ref); break; case REFERENCE_TO_TEMPLATE_RESOLUTION: ref = resizeFirstMatrixToSecondMatrixResolution(ref, tpl); break; default: // do nothing } if (ref.width() != tpl.width() || ref.height() != tpl.height()) { throw new ScreenshotComparisonError( "Resolutions of template and reference images are expected to be equal. " + "Try different resizeMode value."); } Mat res = new Mat(ref.rows() - tpl.rows() + 1, ref.cols() - tpl.cols() + 1, CvType.CV_32FC1); Imgproc.matchTemplate(ref, tpl, res, Imgproc.TM_CCOEFF_NORMED); return Core.minMaxLoc(res).maxVal; }
From source file:logic.helpclass.Util.java
/** * Track template within the image/*from ww w.ja v a2s. c om*/ * @param grayFrame * @param rect * @param temp * @return */ static public Rect trackTemplate(Mat grayFrame, Rect rect, Mat temp) { Rect searchRect = new Rect(new Point(rect.x - rect.width / 2, rect.y - rect.height / 2), new Point(rect.x + rect.width * 3 / 2, rect.y + rect.height * 3 / 2)); Mat dst = new Mat(searchRect.width - temp.width() + 1, searchRect.height - temp.height() + 1, CV_32FC1); if ((searchRect.x < 0 || searchRect.y < 0) || (searchRect.x + searchRect.width > grayFrame.cols() || searchRect.y + searchRect.height > grayFrame.rows())) return null; Imgproc.matchTemplate(grayFrame.submat(searchRect), temp, dst, Imgproc.TM_SQDIFF_NORMED); Core.MinMaxLocResult result = Core.minMaxLoc(dst); //check new location: if coordinates change so variously, remain previous location if (true) { rect.x = (int) (searchRect.x + result.minLoc.x); rect.y = (int) (searchRect.y + result.minLoc.y); return rect; } else { return null; } }
From source file:opencv.CaptchaDetection.java
/*** * ?/*from w w w . ja v a 2s. c o m*/ * @param src * @return */ private static String dect_number(List<Mat> src) { String answer = ""; for (Mat numRoi : src) { Mat zoomNum = new Mat(numRoi.rows() * 2, numRoi.cols() * 2, CvType.CV_8UC1, new Scalar(0)); numRoi.copyTo( zoomNum.submat(new Rect(numRoi.cols() / 2, numRoi.rows() / 2, numRoi.cols(), numRoi.rows()))); double matchMin = Double.MAX_VALUE; int matchSample = 0; for (Map.Entry<Integer, List<Mat>> iter : sampleMat.entrySet()) { for (Mat sample : iter.getValue()) { int result_cols = zoomNum.cols() - sample.cols() + 1; int result_rows = zoomNum.rows() - sample.rows() + 1; Mat resultImg = new Mat(result_rows, result_cols, CvType.CV_32FC1); Imgproc.matchTemplate(zoomNum, sample, resultImg, Imgproc.TM_SQDIFF); Core.MinMaxLocResult mmr = Core.minMaxLoc(resultImg); if (matchMin > mmr.minVal) { matchMin = mmr.minVal; matchSample = iter.getKey(); } } } answer += matchSample / 2; //out.println("NumRio\tmatch sample : " + matchSample + "\tmatch value : " + matchMin); } //out.println("Answer is : " + answer); return answer; }
From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.PreviewUtil.java
License:Open Source License
@NonNull public static double[] getDiffLuminosity(Mat mat) { //find min and max luminosity Core.MinMaxLocResult result = Core.minMaxLoc(mat); return new double[] { result.minVal, result.maxVal }; }
From source file:org.ar.rubik.MonoChromatic.java
License:Open Source License
/** * Create submatrix using bytearray, then Mat.minmax(). * This solution consumes about 10 seconds per frame. * // w w w. ja v a 2s . c o m * @param original_image * @return */ private static Mat monochromaticMedianImageFilterUtilizingOpenCv3(Mat original_image) { final Size imageSize = original_image.size(); Mat monochromatic_image = new Mat(imageSize, CvType.CV_8UC1); Mat hsv_image = new Mat(imageSize, CvType.CV_8UC3); Imgproc.cvtColor(original_image, hsv_image, Imgproc.COLOR_RGB2HLS); // Log.i(Constants.TAG, "HSV Image: " + hsv_image); // CV_8UC3 // Try RGB below // hsv_image = result; // Get hue channel into simple byte array for speed efficiency. final int numColumns = (int) original_image.size().width; final int numRows = (int) original_image.size().height; final int span = (int) 7; final int accuracy = (int) 5; List<Mat> channels = new LinkedList<Mat>(); Core.split(hsv_image, channels); Mat hueMat = channels.get(0); Mat lumMat = channels.get(1); Mat satMat = channels.get(2); final int bufferSize = numColumns * numRows; byte[] hueByteArray = new byte[bufferSize]; byte[] lumByteArray = new byte[bufferSize]; byte[] satByteArray = new byte[bufferSize]; hueMat.get(0, 0, hueByteArray); // get all the pixels lumMat.get(0, 0, lumByteArray); // get all the pixels satMat.get(0, 0, satByteArray); // get all the pixels // Output byte array for speed efficiency byte[] monochromaticByteArray = new byte[bufferSize]; Mat subimageMat = new Mat(span, span, CvType.CV_8UC1); byte[] subimageByteArray = new byte[span * span]; for (int row = 0; row < numRows; row++) { byte result_pixel = 0; for (int col = 0; col < numColumns; col++) { if (col < span || (col >= numColumns - span)) result_pixel = 0; // Just put in black else if (row < span || (row >= numRows - span)) result_pixel = 0; // Just put in black else { // Copy a row (or column) for (int i = 0; i < span; i++) { // copy span bytes from (row + i) * numCol + col int srcPos = (row + i) * numColumns + col; int dstPos = i * span; System.arraycopy(hueByteArray, srcPos, subimageByteArray, dstPos, span); } subimageMat.put(0, 0, subimageByteArray); Core.MinMaxLocResult minMaxResult = Core.minMaxLoc(subimageMat); if (((minMaxResult.maxVal - minMaxResult.maxVal) < accuracy)) //&& (lum_max - lum_min < accuracy) && (sat_max - sat_min < accuracy) ) result_pixel = (byte) 128; else result_pixel = (byte) 0; // Log.i(Constants.TAG, String.format("Lum %d %d", lum_min, lum_max)); } // End of else if ((col >= span / 2) && (row >= span / 2)) monochromaticByteArray[(row - span / 2) * numColumns + (col - span / 2)] = result_pixel; // int test = (int)(satByteArray[row * numColumns + col]) & 0xFF; // monochromaticByteArray[row * numColumns + (col - span/2)] = (byte) test; } // End of column sweep } // End of row sweep Log.i(Constants.TAG, "Completed MonoChromatic CV"); monochromatic_image.put(0, 0, monochromaticByteArray); return monochromatic_image; }
From source file:org.ar.rubik.MonoChromatic.java
License:Open Source License
/** * Use mask operation and then min max./* w w w . j ava 2s. c om*/ * This solution consumes about 20 minutes per frame! * * @param original_image * @return */ @SuppressWarnings("unused") private static Mat monochromaticMedianImageFilterUtilizingOpenCv2(Mat original_image) { final Size imageSize = original_image.size(); final int numColumns = (int) original_image.size().width; final int numRows = (int) original_image.size().height; final int bufferSize = numColumns * numRows; final int span = (int) 7; final int accuracy = (int) 5; Mat hsv_image = new Mat(imageSize, CvType.CV_8UC3); Imgproc.cvtColor(original_image, hsv_image, Imgproc.COLOR_RGB2HLS); List<Mat> channels = new LinkedList<Mat>(); Core.split(hsv_image, channels); Mat hueMat = channels.get(0); Mat lumMat = channels.get(1); Mat satMat = channels.get(2); // Output byte array for speed efficiency Mat monochromatic_image = new Mat(imageSize, CvType.CV_8UC1); byte[] monochromaticByteArray = new byte[bufferSize]; Mat mask = Mat.zeros(numRows, numColumns, CvType.CV_8UC1); Log.i(Constants.TAG, "Begin MonoChromatic CV"); for (int row = 0; row < numRows; row++) { byte result_pixel = 0; for (int col = 0; col < numColumns; col++) { if (col < span || (col >= numColumns - span)) result_pixel = 0; // Just put in black else if (row < span || (row >= numRows - span)) result_pixel = 0; // Just put in black else { // Log.i(Constants.TAG, "Creating Mask at " + row +"," + col); Core.rectangle(mask, new Point(row, col), new Point(row + span, col + span), new Scalar(1, 1, 1)); // Core.MinMaxLocResult minMaxResult = Core.minMaxLoc(hueMat, mask); Mat subset = new Mat(); hueMat.copyTo(subset, mask); Core.MinMaxLocResult minMaxResult = Core.minMaxLoc(subset); if (((minMaxResult.maxVal - minMaxResult.maxVal) < accuracy)) //&& (lum_max - lum_min < accuracy) && (sat_max - sat_min < accuracy) ) result_pixel = (byte) 128; else result_pixel = (byte) 0; // Log.i(Constants.TAG, "Completed Mask at " + row +"," + col); Core.rectangle(mask, new Point(row, col), new Point(row + span, col + span), new Scalar(0, 0, 0)); } if ((col >= span / 2) && (row >= span / 2)) monochromaticByteArray[(row - span / 2) * numColumns + (col - span / 2)] = result_pixel; } Log.i(Constants.TAG, "Completed Row: " + row); } monochromatic_image.put(0, 0, monochromaticByteArray); Log.i(Constants.TAG, "Completed MonoChromatic CV"); // System.exit(0); return monochromatic_image; }
From source file:org.openpnp.machine.reference.vision.OpenCvVisionProvider.java
License:Open Source License
/** * Attempt to find matches of the given template within the current camera * frame. Matches are returned as TemplateMatch objects which contain * a Location in Camera coordinates. The results are sorted best score * to worst score.//from w w w .j a v a 2 s. c o m * @param template * @return */ public List<TemplateMatch> getTemplateMatches(BufferedImage template) { // TODO: ROI BufferedImage image = camera.capture(); // Convert the camera image and template image to the same type. This // is required by the cvMatchTemplate call. template = OpenCvUtils.convertBufferedImage(template, BufferedImage.TYPE_BYTE_GRAY); image = OpenCvUtils.convertBufferedImage(image, BufferedImage.TYPE_BYTE_GRAY); Mat templateMat = OpenCvUtils.toMat(template); Mat imageMat = OpenCvUtils.toMat(image); Mat resultMat = new Mat(); Imgproc.matchTemplate(imageMat, templateMat, resultMat, Imgproc.TM_CCOEFF_NORMED); Mat debugMat = null; if (logger.isDebugEnabled()) { debugMat = imageMat.clone(); } MinMaxLocResult mmr = Core.minMaxLoc(resultMat); double maxVal = mmr.maxVal; // TODO: Externalize? double threshold = 0.7f; double corr = 0.85f; double rangeMin = Math.max(threshold, corr * maxVal); double rangeMax = maxVal; List<TemplateMatch> matches = new ArrayList<TemplateMatch>(); for (Point point : matMaxima(resultMat, rangeMin, rangeMax)) { TemplateMatch match = new TemplateMatch(); int x = point.x; int y = point.y; match.score = resultMat.get(y, x)[0] / maxVal; if (logger.isDebugEnabled()) { Core.rectangle(debugMat, new org.opencv.core.Point(x, y), new org.opencv.core.Point(x + templateMat.cols(), y + templateMat.rows()), new Scalar(255)); Core.putText(debugMat, "" + match.score, new org.opencv.core.Point(x + templateMat.cols(), y + templateMat.rows()), Core.FONT_HERSHEY_PLAIN, 1.0, new Scalar(255)); } Location offsets = getPixelCenterOffsets(x + (templateMat.cols() / 2), y + (templateMat.rows() / 2)); match.location = camera.getLocation().subtract(offsets); matches.add(match); } Collections.sort(matches, new Comparator<TemplateMatch>() { @Override public int compare(TemplateMatch o1, TemplateMatch o2) { return ((Double) o2.score).compareTo(o1.score); } }); saveDebugImage("template", templateMat); saveDebugImage("camera", imageMat); saveDebugImage("result", resultMat); saveDebugImage("debug", debugMat); return matches; }