Example usage for org.opencv.imgproc Imgproc matchTemplate

List of usage examples for org.opencv.imgproc Imgproc matchTemplate

Introduction

In this page you can find the example usage for org.opencv.imgproc Imgproc matchTemplate.

Prototype

public static void matchTemplate(Mat image, Mat templ, Mat result, int method) 

Source Link

Usage

From source file:OCV_MatchTemplate.java

License:Open Source License

@Override
public void run(ImageProcessor ip) {
    // src/*from   ww w. j ava  2s. c  o  m*/
    byte[] arr_src = (byte[]) imp_src.getChannelProcessor().getPixels();
    int imw_src = imp_src.getWidth();
    int imh_src = imp_src.getHeight();
    Mat mat_src = new Mat(imh_src, imw_src, CvType.CV_8UC1);
    mat_src.put(0, 0, arr_src);

    // tmp
    byte[] arr_tmp = (byte[]) imp_tmp.getChannelProcessor().getPixels();
    int imw_tmp = imp_tmp.getWidth();
    int imh_tmp = imp_tmp.getHeight();
    Mat mat_tmp = new Mat(imh_tmp, imw_tmp, CvType.CV_8UC1);
    mat_tmp.put(0, 0, arr_tmp);

    // dst
    String title_dst = WindowManager.getUniqueName(title_src + "_MatchTemplate");
    int imw_dst = imw_src - imw_tmp + 1;
    int imh_dst = imh_src - imh_tmp + 1;
    ImagePlus imp_dst = new ImagePlus(title_dst, new FloatProcessor(imw_dst, imh_dst));
    float[] arr_dst = (float[]) imp_dst.getChannelProcessor().getPixels();
    Mat mat_dst = new Mat();

    // run
    Imgproc.matchTemplate(mat_src, mat_tmp, mat_dst, TYPE_VAL[ind_type]);
    mat_dst.get(0, 0, arr_dst);
    imp_dst.show();

    if (TYPE_VAL[ind_type] == Imgproc.TM_SQDIFF_NORMED) {
        substracted_from_one(arr_dst);
    }

    IJ.run(imp_dst, "Enhance Contrast", "saturated=0.35");

    // show data
    if (enResult) {
        if (enSearchMax) {
            showData_enSearchMaxPoint(imp_dst, thr_res, imw_tmp, imh_tmp);
        } else {
            showData(arr_dst, imw_dst, imh_dst, imw_tmp, imh_tmp);
        }
    }
}

From source file:ch.hslu.pren.t37.camera.BildAuswertungKorb.java

public int bildAuswerten() {

    //Bild in dem gesucht werden soll
    String inFile = "../camera.jpg";
    //das Bild dass im infile gesucht wird
    String templateFile = "../Bilder/korb.jpg";
    //Lsung wird in diesem Bild prsentiert
    String outFile = "../LoesungsBild.jpg";
    //berprfungswert wird gesetzt
    int match_method = Imgproc.TM_CCOEFF_NORMED;

    //das original Bild und das zu suchende werden geladen
    Mat img = Highgui.imread(inFile, Highgui.CV_LOAD_IMAGE_COLOR);
    Mat templ = Highgui.imread(templateFile, Highgui.CV_LOAD_IMAGE_COLOR);

    // Lsungsmatrix generieren
    int result_cols = img.cols() - templ.cols() + 1;
    int result_rows = img.rows() - templ.rows() + 1;
    Mat result = new Mat(result_rows, result_cols, CvType.CV_32FC1);

    // Suchen und normalisieren
    Imgproc.matchTemplate(img, templ, result, match_method);
    Core.normalize(result, result, 0, 1, Core.NORM_MINMAX, -1, new Mat());

    // Mit MinMax Logik wird der beste "Match" gesucht
    Core.MinMaxLocResult mmr = Core.minMaxLoc(result);

    Point matchLoc;//  w ww.ja  v  a2  s.  co  m
    if (match_method == Imgproc.TM_SQDIFF || match_method == Imgproc.TM_SQDIFF_NORMED) {
        matchLoc = mmr.minLoc;
    } else {
        matchLoc = mmr.maxLoc;
    }

    // Darstellen
    Core.rectangle(img, matchLoc, new Point(matchLoc.x + templ.cols(), matchLoc.y + templ.rows()),
            new Scalar(0, 255, 0), 10);

    // Alle 4 Eckpunkte speichern
    Point topLeft = new Point(matchLoc.x, matchLoc.y);
    Point topRight = new Point(matchLoc.x + templ.cols(), matchLoc.y);
    Point downLeft = new Point(matchLoc.x, matchLoc.y + templ.rows());
    Point downRight = new Point(matchLoc.x + templ.cols(), matchLoc.y + templ.rows());

    // Lsungsbild speichern
    Highgui.imwrite(outFile, img);

    //Mittelpunkt berechnen
    double mittePicture;
    double mitteKorb;
    double differnez;

    Mat sol = Highgui.imread(outFile, Highgui.CV_LOAD_IMAGE_COLOR);

    mittePicture = sol.width() / 2;
    mitteKorb = (topRight.x - topLeft.x) / 2;
    mitteKorb = topLeft.x + mitteKorb;
    differnez = mitteKorb - mittePicture;

    logger.log(PrenLogger.LogLevel.DEBUG, "Mitte Korb: " + mitteKorb);
    logger.log(PrenLogger.LogLevel.DEBUG, "Mitte Bild: " + mittePicture);
    logger.log(PrenLogger.LogLevel.DEBUG,
            "Differenz: " + differnez + "\nWenn Differnez negativ, nach rechts drehen");

    return (int) differnez;
}

From source file:com.raulh82vlc.face_detection_sample.opencv.domain.EyesDetectionInteractorImpl.java

License:Apache License

/**
 * Matches concrete point of the eye by using template with TM_SQDIFF_NORMED
 *//*from w  w w. j  av  a  2  s.  co m*/
private static void matchEye(Rect area, Mat builtTemplate, Mat matrixGray, Mat matrixRGBA) {
    Point matchLoc;
    try {
        // when there is not builtTemplate we skip it
        if (builtTemplate.cols() == 0 || builtTemplate.rows() == 0) {
            return;
        }
        Mat submatGray = matrixGray.submat(area);
        int cols = submatGray.cols() - builtTemplate.cols() + 1;
        int rows = submatGray.rows() - builtTemplate.rows() + 1;
        Mat outputTemplateMat = new Mat(cols, rows, CvType.CV_8U);

        Imgproc.matchTemplate(submatGray, builtTemplate, outputTemplateMat, Imgproc.TM_SQDIFF_NORMED);
        Core.MinMaxLocResult minMaxLocResult = Core.minMaxLoc(outputTemplateMat);
        // when is difference in matching methods, the best match is max / min value
        matchLoc = minMaxLocResult.minLoc;
        Point matchLocTx = new Point(matchLoc.x + area.x, matchLoc.y + area.y);
        Point matchLocTy = new Point(matchLoc.x + builtTemplate.cols() + area.x,
                matchLoc.y + builtTemplate.rows() + area.y);

        FaceDrawerOpenCV.drawMatchedEye(matchLocTx, matchLocTy, matrixRGBA);
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:com.seleniumtests.util.imaging.ImageDetector.java

License:Apache License

private MinMaxLocResult getBestTemplateMatching(int matchMethod, Mat sceneImageMat, Mat objectImageMat) {

    // / Create the result matrix
    int resultCols = sceneImageMat.cols() - objectImageMat.cols() + 1;
    int resultRows = sceneImageMat.rows() - objectImageMat.rows() + 1;
    Mat result = new Mat(resultRows, resultCols, CvType.CV_32FC1);

    // / Do the Matching and Normalize
    Imgproc.matchTemplate(sceneImageMat, objectImageMat, result, matchMethod);

    // / Localizing the best match with minMaxLoc        
    return Core.minMaxLoc(result);
}

From source file:com.sikulix.core.Finder.java

License:Open Source License

private Mat doFindMatch(Element target, Mat base, Mat probe) {
    if (SX.isNull(probe)) {
        probe = target.getContent();//from w ww. ja  va 2  s  . com
    }
    Mat result = new Mat();
    Mat plainBase = base;
    Mat plainProbe = probe;
    if (!target.isPlainColor()) {
        Imgproc.matchTemplate(base, probe, result, Imgproc.TM_CCOEFF_NORMED);
    } else {
        if (target.isBlack()) {
            Core.bitwise_not(base, plainBase);
            Core.bitwise_not(probe, plainProbe);
        }
        Imgproc.matchTemplate(plainBase, plainProbe, result, Imgproc.TM_SQDIFF_NORMED);
        Core.subtract(Mat.ones(result.size(), CvType.CV_32F), result, result);
    }
    return result;
}

From source file:com.trandi.opentld.tld.LKTracker.java

License:Apache License

/**
 * @return real similarities errors// ww  w  . ja  v a  2  s .com
 */
private float[] normCrossCorrelation(final Mat lastImg, final Mat currentImg, final Point[] lastPoints,
        final Point[] currentPoints, final byte[] status) {
    final float[] similarity = new float[lastPoints.length];

    final Mat lastPatch = new Mat(CROSS_CORR_PATCH_SIZE, CvType.CV_8U);
    final Mat currentPatch = new Mat(CROSS_CORR_PATCH_SIZE, CvType.CV_8U);
    final Mat res = new Mat(new Size(1, 1), CvType.CV_32F);

    for (int i = 0; i < lastPoints.length; i++) {
        if (status[i] == 1) {
            Imgproc.getRectSubPix(lastImg, CROSS_CORR_PATCH_SIZE, lastPoints[i], lastPatch);
            Imgproc.getRectSubPix(currentImg, CROSS_CORR_PATCH_SIZE, currentPoints[i], currentPatch);
            Imgproc.matchTemplate(lastPatch, currentPatch, res, Imgproc.TM_CCOEFF_NORMED);

            similarity[i] = Util.getFloat(0, 0, res);
        } else {
            similarity[i] = 0f;
        }
    }

    return similarity;
}

From source file:emotion.Eye.java

private void templatingOuterCorner(Mat eyeRegion, boolean rightEyeFlag) {
    //        Mat template=imread("E:\\Studia\\II YEAR\\Team Project\\"
    //                + "Face database\\eyecorners\\rightOuter.jpg",CV_8UC1);
    Mat template = imread("src\\Templates\\rightOuter.jpg", CV_8UC1);
    Mat temp = new Mat(eyeRegion.width(), eyeRegion.height(), CV_8UC1);
    cvtColor(eyeRegion, temp, Imgproc.COLOR_BGR2GRAY);
    temp = rightEyeFlag//from   w  w w .  j av  a2  s  .  c  om
            ? new Mat(temp, new Rect((int) (temp.width() * 0.5), 0, (int) (temp.width() * 0.5), temp.height()))
            : new Mat(temp, new Rect(0, 0, (int) (temp.width() * 0.5), temp.height()));
    Mat result = new Mat(eyeRegion.width(), eyeRegion.height(), eyeRegion.type());

    //(9,9)- coordinates of eye outerCorner in the template
    if (rightEyeFlag) {
        imwrite("rightEyeForOuterTemplating.jpg", temp);
        Imgproc.matchTemplate(temp, template, result, Imgproc.TM_CCOEFF_NORMED);
        Core.normalize(result, result, 0, 100, Core.NORM_MINMAX);
        Core.MinMaxLocResult maxVal = Core.minMaxLoc(result);
        //(9,9)- coordinates of eye outerCorner in the template
        Point outerCorner = new Point(maxVal.maxLoc.x + 9, maxVal.maxLoc.y + 9);

        //Adjust coordinates according to whole face
        outerCorner.y += Eye.rightRect.y;
        outerCorner.x += Eye.rightRect.x;
        outerCorner.x += temp.width(); //We examine just right half on the right eye
        ////////////////////////////////////////////
        EyeRegion.rightOuterEyeCorner = outerCorner;
    } else {
        imwrite("leftEyeForOuterTemplating.jpg", temp);
        Core.flip(template, template, 1);
        Imgproc.matchTemplate(temp, template, result, Imgproc.TM_CCOEFF_NORMED);
        Core.normalize(result, result, 0, 100, Core.NORM_MINMAX);
        Core.MinMaxLocResult maxVal = Core.minMaxLoc(result);

        Point outerCorner = new Point(maxVal.maxLoc.x + 4, maxVal.maxLoc.y + 9);
        //Adjust coordinates according to whole face
        outerCorner.y += Eye.leftRect.y;
        outerCorner.x += Eye.leftRect.x;
        ////////////////////////////////////////////
        EyeRegion.leftOuterEyeCorner = outerCorner;
    }
    //Mat tempw=reg._face.clone();
    //Face.drawCross(tempw, outerCorner);
    //imwrite("checkcorner.png",tempw);

}

From source file:emotion.Eye.java

private void templatingInnerCorner(Mat eyeRegion, boolean rightEyeFlag) {
    //        Mat template=imread("E:\\Studia\\II YEAR\\Team Project\\"
    //                + "Face database\\eyecorners\\rightInner.jpg",CV_8UC1);
    Mat template = imread("src\\Templates\\rightInner.jpg", CV_8UC1);
    Mat temp = new Mat(eyeRegion.width(), eyeRegion.height(), CV_8UC1);
    cvtColor(eyeRegion, temp, Imgproc.COLOR_BGR2GRAY);
    temp = rightEyeFlag ? new Mat(temp, new Rect(0, 0, (int) (temp.width() * 0.5), temp.height()))
            : new Mat(temp, new Rect((int) (temp.width() * 0.5), 0, (int) (temp.width() * 0.5), temp.height()));
    Mat result = new Mat(eyeRegion.width(), eyeRegion.height(), eyeRegion.type());

    //(4,7)- coordinates of eye innerCorner in the template
    if (rightEyeFlag) {
        imwrite("template4righteye.jpg", template);
        imwrite("rightEyeForInnerTemplating.jpg", temp);
        Imgproc.matchTemplate(temp, template, result, Imgproc.TM_CCOEFF_NORMED);
        Core.normalize(result, result, 0, 100, Core.NORM_MINMAX);
        Core.MinMaxLocResult maxVal = Core.minMaxLoc(result);
        //(4,7)- coordinates of eye innerCorner in the template
        Point innerCorner = new Point(maxVal.maxLoc.x + 4, maxVal.maxLoc.y + 7);

        StaticFunctions.drawCross(temp, innerCorner, StaticFunctions.Features.EYE_CORNERS);
        imwrite("rightEyeForInnerTemplating.jpg", temp);
        //Adjust coordinates according to whole face
        innerCorner.y += Eye.rightRect.y;
        innerCorner.x += Eye.rightRect.x;
        //We examine just left half on the right eye
        ////////////////////////////////////////////
        EyeRegion.rightInnerEyeCorner = innerCorner;
    } else {//from  w  w w.  j a  va  2 s  .  co  m
        imwrite("leftEyeForInnerTemplating.jpg", temp);
        Core.flip(template, template, 1);
        Imgproc.matchTemplate(temp, template, result, Imgproc.TM_CCOEFF_NORMED);
        Core.normalize(result, result, 0, 100, Core.NORM_MINMAX);
        Core.MinMaxLocResult maxVal = Core.minMaxLoc(result);

        Point innerCorner = new Point(maxVal.maxLoc.x + 8, maxVal.maxLoc.y + 7);

        //Adjust coordinates according to whole face
        innerCorner.y += Eye.leftRect.y;
        innerCorner.x += Eye.leftRect.x;
        //We examine just right half on the left eye
        innerCorner.x += temp.width();
        ////////////////////////////////////////////
        EyeRegion.leftInnerEyeCorner = innerCorner;
    }
}

From source file:es.ugr.osgiliart.features.opencv.MatchImage.java

License:Open Source License

public double match(String path) {
    Mat img = Highgui.imread(path);/* ww w  . j a  va 2 s .c o  m*/
    Mat resizedImg = new Mat(SIZE, SIZE, img.type());
    //Mat blurredImg = new Mat();
    Imgproc.resize(img, resizedImg, new Size(SIZE, SIZE));
    //Imgproc.blur(resizedImg, blurredImg, new Size(FILTER_SIZE,FILTER_SIZE) );

    ArrayList<Mat> channels = new ArrayList<Mat>();

    Core.split(resizedImg, channels);

    int conta = 0;

    double corrcoef = 0;
    for (int i = 0; i < 1; ++i) {
        /*      
              for(int px = 0; px < SIZE; px++){
                 for(int py = 0; py < SIZE; py++){
                    if(resizedImg.get(px, py)[i]!=0.0){
          double im_orig = templateChannels.get(i).get(px, py)[0];
          double im_indi = resizedImg.get(px, py)[i];
                  
          corrcoef +=  Math.pow(im_orig ,2) - Math.pow(im_indi, 2);
          conta++;
                    }
                            
                            
                 }
              }*/

        Mat result = new Mat();
        Imgproc.matchTemplate(channels.get(i), templateChannels.get(i), result, Imgproc.TM_CCOEFF_NORMED);
        //Imgproc.matchTemplate(channels.get(i), templateChannels.get(i), result, Imgproc.TM_SQDIFF);
        corrcoef += result.get(0, 0)[0];
        //corrcoef += result.get(0, 0)[0];
    }
    corrcoef /= 3.0;
    //return (corrcoef/conta/(255*3));
    return (corrcoef);
}

From source file:imageanalysis.Analyzer.java

public boolean compareRoiAgainstPattern(int xcoord, int ycoord, int width, int height) {
    screen = ImgTools.getImageFromClipboard();

    // Crops roi around chosen mouse point
    Rect roi = new Rect(xcoord - width / 2, ycoord - height / 2, width, height);
    Mat actionButton = screen.submat(roi);

    // Preprocessing
    Imgproc.cvtColor(actionButton, actionButton, Imgproc.COLOR_BGR2GRAY);
    //        Imgproc.medianBlur(actionButton, actionButton, 5);

    // Referent pattern
    Mat bonefoodPattern = Patterns.getBonefoodPattern();
    //        Imgproc.medianBlur(bonefoodPattern, bonefoodPattern, 5);

    // Match template
    // ... result should be the refPoint
    Mat result = new Mat();
    Imgproc.matchTemplate(actionButton, bonefoodPattern, result, Imgproc.TM_SQDIFF);
    Point p = Core.minMaxLoc(result).minLoc;
    //        System.out.println(p.toString());

    return p.equals(refPoint);
}