Example usage for org.opencv.imgproc Imgproc matchTemplate

List of usage examples for org.opencv.imgproc Imgproc matchTemplate

Introduction

In this page you can find the example usage for org.opencv.imgproc Imgproc matchTemplate.

Prototype

public static void matchTemplate(Mat image, Mat templ, Mat result, int method) 

Source Link

Usage

From source file:imageanalysis.Analyzer.java

public Point checkForX() {
    screen = ImgTools.getImageFromClipboard();

    Mat image = screen.clone();//from   w  w  w .java 2 s  .  c om

    // Preprocessing
    Imgproc.cvtColor(image, image, Imgproc.COLOR_BGR2GRAY);

    // Referent pattern
    Mat xPattern = Patterns.getXbuttPattern();

    // Match template
    // ... result should be the refPoint
    Mat result = new Mat();
    Imgproc.matchTemplate(image, xPattern, result, Imgproc.TM_SQDIFF);

    Core.MinMaxLocResult mm = Core.minMaxLoc(result);

    Point p = mm.minLoc;
    double val = mm.minVal;

    if (val < 1000000) {
        p.x += 10;
        p.y += 10;
        return p;
    } else {
        return null;
    }
}

From source file:io.appium.java_client.ScreenshotState.java

License:Apache License

/**
 * Compares two valid java bitmaps and calculates similarity score between them.
 *
 * @param refImage   reference image/*from  w  ww.j a  va2 s.co m*/
 * @param tplImage   template
 * @param resizeMode one of possible enum values. Set it either to <em>TEMPLATE_TO_REFERENCE_RESOLUTION</em> or
 *                   <em>REFERENCE_TO_TEMPLATE_RESOLUTION</em> if given bitmaps have different dimensions
 * @return similarity score value in range (-1.0, 1.0). 1.0 is returned if the images are equal
 * @throws ScreenshotComparisonError if provided images are not valid or have
 *                                   different resolution, but resizeMode has been set to <em>NO_RESIZE</em>
 */
public static double getOverlapScore(BufferedImage refImage, BufferedImage tplImage, ResizeMode resizeMode) {
    Mat ref = prepareImageForComparison(refImage);
    if (ref.empty()) {
        throw new ScreenshotComparisonError("Reference image cannot be converted for further comparison");
    }
    Mat tpl = prepareImageForComparison(tplImage);
    if (tpl.empty()) {
        throw new ScreenshotComparisonError("Template image cannot be converted for further comparison");
    }
    switch (resizeMode) {
    case TEMPLATE_TO_REFERENCE_RESOLUTION:
        tpl = resizeFirstMatrixToSecondMatrixResolution(tpl, ref);
        break;
    case REFERENCE_TO_TEMPLATE_RESOLUTION:
        ref = resizeFirstMatrixToSecondMatrixResolution(ref, tpl);
        break;
    default:
        // do nothing
    }

    if (ref.width() != tpl.width() || ref.height() != tpl.height()) {
        throw new ScreenshotComparisonError(
                "Resolutions of template and reference images are expected to be equal. "
                        + "Try different resizeMode value.");
    }

    Mat res = new Mat(ref.rows() - tpl.rows() + 1, ref.cols() - tpl.cols() + 1, CvType.CV_32FC1);
    Imgproc.matchTemplate(ref, tpl, res, Imgproc.TM_CCOEFF_NORMED);
    return Core.minMaxLoc(res).maxVal;
}

From source file:logic.helpclass.Util.java

/**
 * Track template within the image//from   www  .j  av  a 2 s  . c  o m
 * @param grayFrame
 * @param rect
 * @param temp
 * @return 
 */
static public Rect trackTemplate(Mat grayFrame, Rect rect, Mat temp) {
    Rect searchRect = new Rect(new Point(rect.x - rect.width / 2, rect.y - rect.height / 2),
            new Point(rect.x + rect.width * 3 / 2, rect.y + rect.height * 3 / 2));

    Mat dst = new Mat(searchRect.width - temp.width() + 1, searchRect.height - temp.height() + 1, CV_32FC1);

    if ((searchRect.x < 0 || searchRect.y < 0) || (searchRect.x + searchRect.width > grayFrame.cols()
            || searchRect.y + searchRect.height > grayFrame.rows()))
        return null;

    Imgproc.matchTemplate(grayFrame.submat(searchRect), temp, dst, Imgproc.TM_SQDIFF_NORMED);

    Core.MinMaxLocResult result = Core.minMaxLoc(dst);

    //check new location: if coordinates change so variously, remain previous location
    if (true) {
        rect.x = (int) (searchRect.x + result.minLoc.x);
        rect.y = (int) (searchRect.y + result.minLoc.y);
        return rect;
    } else {
        return null;
    }
}

From source file:opencv.CaptchaDetection.java

/***
 * ?/*from  w  w w  . j a  va 2 s . c o m*/
 * @param src
 * @return 
 */
private static String dect_number(List<Mat> src) {
    String answer = "";

    for (Mat numRoi : src) {
        Mat zoomNum = new Mat(numRoi.rows() * 2, numRoi.cols() * 2, CvType.CV_8UC1, new Scalar(0));
        numRoi.copyTo(
                zoomNum.submat(new Rect(numRoi.cols() / 2, numRoi.rows() / 2, numRoi.cols(), numRoi.rows())));

        double matchMin = Double.MAX_VALUE;
        int matchSample = 0;

        for (Map.Entry<Integer, List<Mat>> iter : sampleMat.entrySet()) {
            for (Mat sample : iter.getValue()) {
                int result_cols = zoomNum.cols() - sample.cols() + 1;
                int result_rows = zoomNum.rows() - sample.rows() + 1;

                Mat resultImg = new Mat(result_rows, result_cols, CvType.CV_32FC1);

                Imgproc.matchTemplate(zoomNum, sample, resultImg, Imgproc.TM_SQDIFF);

                Core.MinMaxLocResult mmr = Core.minMaxLoc(resultImg);

                if (matchMin > mmr.minVal) {
                    matchMin = mmr.minVal;
                    matchSample = iter.getKey();
                }
            }
        }
        answer += matchSample / 2;
        //out.println("NumRio\tmatch sample :  " + matchSample + "\tmatch value : " + matchMin);
    }

    //out.println("Answer is : " + answer);
    return answer;
}

From source file:org.openpnp.machine.reference.vision.OpenCvVisionProvider.java

License:Open Source License

/**
 * Attempt to find matches of the given template within the current camera
 * frame. Matches are returned as TemplateMatch objects which contain
 * a Location in Camera coordinates. The results are sorted best score
 * to worst score./* w w  w.  j a  va2  s. c  o  m*/
 * @param template
 * @return
 */
public List<TemplateMatch> getTemplateMatches(BufferedImage template) {
    // TODO: ROI
    BufferedImage image = camera.capture();

    // Convert the camera image and template image to the same type. This
    // is required by the cvMatchTemplate call.
    template = OpenCvUtils.convertBufferedImage(template, BufferedImage.TYPE_BYTE_GRAY);
    image = OpenCvUtils.convertBufferedImage(image, BufferedImage.TYPE_BYTE_GRAY);

    Mat templateMat = OpenCvUtils.toMat(template);
    Mat imageMat = OpenCvUtils.toMat(image);
    Mat resultMat = new Mat();

    Imgproc.matchTemplate(imageMat, templateMat, resultMat, Imgproc.TM_CCOEFF_NORMED);

    Mat debugMat = null;
    if (logger.isDebugEnabled()) {
        debugMat = imageMat.clone();
    }

    MinMaxLocResult mmr = Core.minMaxLoc(resultMat);
    double maxVal = mmr.maxVal;

    // TODO: Externalize?
    double threshold = 0.7f;
    double corr = 0.85f;

    double rangeMin = Math.max(threshold, corr * maxVal);
    double rangeMax = maxVal;

    List<TemplateMatch> matches = new ArrayList<TemplateMatch>();
    for (Point point : matMaxima(resultMat, rangeMin, rangeMax)) {
        TemplateMatch match = new TemplateMatch();
        int x = point.x;
        int y = point.y;
        match.score = resultMat.get(y, x)[0] / maxVal;

        if (logger.isDebugEnabled()) {
            Core.rectangle(debugMat, new org.opencv.core.Point(x, y),
                    new org.opencv.core.Point(x + templateMat.cols(), y + templateMat.rows()), new Scalar(255));
            Core.putText(debugMat, "" + match.score,
                    new org.opencv.core.Point(x + templateMat.cols(), y + templateMat.rows()),
                    Core.FONT_HERSHEY_PLAIN, 1.0, new Scalar(255));
        }

        Location offsets = getPixelCenterOffsets(x + (templateMat.cols() / 2), y + (templateMat.rows() / 2));
        match.location = camera.getLocation().subtract(offsets);
        matches.add(match);
    }

    Collections.sort(matches, new Comparator<TemplateMatch>() {
        @Override
        public int compare(TemplateMatch o1, TemplateMatch o2) {
            return ((Double) o2.score).compareTo(o1.score);
        }
    });

    saveDebugImage("template", templateMat);
    saveDebugImage("camera", imageMat);
    saveDebugImage("result", resultMat);
    saveDebugImage("debug", debugMat);

    return matches;
}

From source file:org.openpnp.machine.reference.vision.OpenCvVisionProvider.java

License:Open Source License

@Override
public Point[] locateTemplateMatches(int roiX, int roiY, int roiWidth, int roiHeight, int coiX, int coiY,
        BufferedImage templateImage_) throws Exception {
    BufferedImage cameraImage_ = camera.capture();

    // Convert the camera image and template image to the same type. This
    // is required by the cvMatchTemplate call.
    templateImage_ = OpenCvUtils.convertBufferedImage(templateImage_, BufferedImage.TYPE_INT_ARGB);
    cameraImage_ = OpenCvUtils.convertBufferedImage(cameraImage_, BufferedImage.TYPE_INT_ARGB);

    Mat templateImage = OpenCvUtils.toMat(templateImage_);
    Mat cameraImage = OpenCvUtils.toMat(cameraImage_);
    Mat roiImage = new Mat(cameraImage, new Rect(roiX, roiY, roiWidth, roiHeight));

    // http://stackoverflow.com/questions/17001083/opencv-template-matching-example-in-android
    Mat resultImage = new Mat(roiImage.cols() - templateImage.cols() + 1,
            roiImage.rows() - templateImage.rows() + 1, CvType.CV_32FC1);
    Imgproc.matchTemplate(roiImage, templateImage, resultImage, Imgproc.TM_CCOEFF);

    MinMaxLocResult mmr = Core.minMaxLoc(resultImage);

    org.opencv.core.Point matchLoc = mmr.maxLoc;
    double matchValue = mmr.maxVal;

    // TODO: Figure out certainty and how to filter on it.

    logger.debug(/*from  www  .j a v  a  2 s.c  o  m*/
            String.format("locateTemplateMatches certainty %f at %f, %f", matchValue, matchLoc.x, matchLoc.y));
    locateTemplateMatchesDebug(roiImage, templateImage, matchLoc);

    return new Point[] { new Point(((int) matchLoc.x) + roiX, ((int) matchLoc.y) + roiY) };
}

From source file:org.sikuli.script.Finder.java

License:MIT License

private Mat doFindMatch(Probe probe, Mat base, Mat target) {
    Mat res = new Mat();
    Mat bi = new Mat();
    Mat pi = new Mat();
    if (!probe.img.isPlainColor()) {
        Imgproc.matchTemplate(base, target, res, Imgproc.TM_CCOEFF_NORMED);
    } else {/*  w  ww. ja v  a  2 s . c  om*/
        if (probe.img.isBlack()) {
            Core.bitwise_not(base, bi);
            Core.bitwise_not(target, pi);
        } else {
            bi = base;
            pi = target;
        }
        Imgproc.matchTemplate(bi, pi, res, Imgproc.TM_SQDIFF_NORMED);
        Core.subtract(Mat.ones(res.size(), CvType.CV_32F), res, res);
    }
    return res;
}

From source file:org.sikuli.script.ImageFind.java

License:MIT License

private Core.MinMaxLocResult doFindMatch(Mat base, Mat probe) {
    Mat res = new Mat();
    Mat bi = new Mat();
    Mat pi = new Mat();
    if (!isPlainColor) {
        Imgproc.matchTemplate(base, probe, res, Imgproc.TM_CCOEFF_NORMED);
    } else {/*from   w w  w . j  a v  a2 s. co m*/
        if (isBlack) {
            Core.bitwise_not(base, bi);
            Core.bitwise_not(probe, pi);
        } else {
            bi = base;
            pi = probe;
        }
        Imgproc.matchTemplate(bi, pi, res, Imgproc.TM_SQDIFF_NORMED);
        Core.subtract(Mat.ones(res.size(), CvType.CV_32F), res, res);
    }
    return Core.minMaxLoc(res);
}

From source file:Recognizer.Recognizer.java

public Image TemplateMatching(Image imQuery, Image imDB, int match_method) {
    System.out.println("Running Template Matching ...");

    //Mat img = Highgui.imread(inFile); // Image in which area has to be searched
    //Mat template_img = Highgui.imread(templateFile); // Search Image

    Mat matQuery = imQuery.Image3CtoMat_CV();
    Mat matDB = imDB.Image3CtoMat_CV();

    Mat hsvQ = new Mat(), hsvDB = new Mat();

    Imgproc.cvtColor(matQuery, hsvQ, COLOR_RGB2HSV);
    Imgproc.cvtColor(matDB, hsvDB, COLOR_RGB2HSV);

    // Create result image matrix
    int resultImg_cols = matDB.cols() - matQuery.cols() + 1;
    int resultImg_rows = matDB.rows() - matQuery.rows() + 1;

    Mat matRes = new Mat(resultImg_rows, resultImg_cols, CvType.CV_32FC1);

    // Template Matching with Normalization
    Imgproc.matchTemplate(hsvDB, hsvQ, matRes, match_method);
    Core.normalize(matRes, matRes, 0, 1, Core.NORM_MINMAX, -1, new Mat());

    // / Localizing the best match with minMaxLoc
    Core.MinMaxLocResult Location_Result = Core.minMaxLoc(matRes);
    Point matchLocation;//  w ww .j ava2s  .  c  om

    if (match_method == Imgproc.TM_SQDIFF || match_method == Imgproc.TM_SQDIFF_NORMED) {
        matchLocation = Location_Result.minLoc;
    } else {
        matchLocation = Location_Result.maxLoc;
    }

    // Display Area by Rectangle
    Core.rectangle(matDB, matchLocation,
            new Point(matchLocation.x + matQuery.cols(), matchLocation.y + matQuery.rows()),
            new Scalar(0, 255, 0));

    Image imOut = new Image(matDB.width(), matDB.height());
    //Image imOut = new Image(matQuery.cols(), matQuery.rows());

    //Mat m = new Mat(matDB);

    //m =//matDB.submat((int)matchLocation.y, (int)matchLocation.y + matQuery.rows(),(int)matchLocation.x, (int)matchLocation.x + matQuery.cols());

    imOut.Mat_CVtoImage3C(matDB);

    System.out.println("Location: " + Location_Result.minLoc.x + " " + Location_Result.minLoc.y + "   "
            + Location_Result.maxLoc.x + " " + Location_Result.maxLoc.y);

    return imOut;
}

From source file:syncleus.dann.data.video.LKTracker.java

License:Apache License

/**
 * @return real similarities errors/*from  w  w  w . j  a  v  a 2 s.  c om*/
 */
private float[] normCrossCorrelation(final Mat lastImg, final Mat currentImg, final Point[] lastPoints,
        final Point[] currentPoints, final byte[] status) {
    final float[] similarity = new float[lastPoints.length];

    final Mat lastPatch = new Mat(CROSS_CORR_PATCH_SIZE, CvType.CV_8U);
    final Mat currentPatch = new Mat(CROSS_CORR_PATCH_SIZE, CvType.CV_8U);
    final Mat res = new Mat(new Size(1, 1), CvType.CV_32F);

    for (int i = 0; i < lastPoints.length; i++) {
        if (status[i] == 1) {
            Imgproc.getRectSubPix(lastImg, CROSS_CORR_PATCH_SIZE, lastPoints[i], lastPatch);
            Imgproc.getRectSubPix(currentImg, CROSS_CORR_PATCH_SIZE, currentPoints[i], currentPatch);
            Imgproc.matchTemplate(lastPatch, currentPatch, res, Imgproc.TM_CCOEFF_NORMED);

            similarity[i] = TLDUtil.getFloat(0, 0, res);
        } else {
            similarity[i] = 0f;
        }
    }

    return similarity;
}