Example usage for org.opencv.core Scalar Scalar

List of usage examples for org.opencv.core Scalar Scalar

Introduction

In this page you can find the example usage for org.opencv.core Scalar Scalar.

Prototype

public Scalar(double v0, double v1, double v2) 

Source Link

Usage

From source file:org.usfirst.frc.team2084.CMonster2016.vision.VisionParameters.java

License:Open Source License

public static Scalar getBoulderMaxThreshold() {
    return new Scalar(VISION_PARAMETERS.getNumber(BOULDER_H_MAX_KEY, DEFAULT_BOULDER_H_THRESHOLD.getMax()),
            VISION_PARAMETERS.getNumber(BOULDER_S_MAX_KEY, DEFAULT_BOULDER_S_THRESHOLD.getMax()),
            (int) VISION_PARAMETERS.getNumber(BOULDER_V_MAX_KEY, DEFAULT_BOULDER_V_THRESHOLD.getMax()));
}

From source file:org.vinesrobotics.bot.opmodes.VibotAutonomous.java

License:Open Source License

@Override
public void init_spec() {
    clawServos.setPosition(clawServoMax);
    jewelArmServos.setPosition(1);//from w w  w .j  a  v a  2  s . com
    /*switch (Position) {
    case BlueBack: {
        leftMotors.reverseDirection();
        rightMotors.reverseDirection();
    }
    break;
    case RedBack: {
    }
    break;
            
    case BlueFront: {
        leftMotors.reverseDirection();
        rightMotors.reverseDirection();
    }
    break;
    case RedFront: {
    }
    break;
    }*/

    cvmanager.initCV();
    redBlobDet.setColorRadius(new Scalar(25, 96, 127));
    redBlobDet.setHsvColor(new Scalar(255, 255, 255));
    redDarkBlobDet.setColorRadius(new Scalar(25, 96, 127));
    redDarkBlobDet.setHsvColor(new Scalar(0, 255, 255));
    blueBlobDet.setColorRadius(new Scalar(15, 96, 127));
    blueBlobDet.setHsvColor(new Scalar(150, 255, 255));
    cvmanager.registerBlobDetector(redDarkBlobDet);
    cvmanager.registerBlobDetector(redBlobDet);
    cvmanager.registerBlobDetector(blueBlobDet);

    /*
    VuforiaManager.init();
            
    telemetry.addLine("Vu inited");
    telemetry.update();*/
}

From source file:overwatchteampicker.OverwatchTeamPicker.java

public static ReturnValues findImage(String template, String source, int flag) {
    File lib = null;//  w ww.j  av a2s.co m
    BufferedImage image = null;
    try {
        image = ImageIO.read(new File(source));
    } catch (Exception e) {
        e.printStackTrace();
    }

    String os = System.getProperty("os.name");
    String bitness = System.getProperty("sun.arch.data.model");

    if (os.toUpperCase().contains("WINDOWS")) {
        if (bitness.endsWith("64")) {
            lib = new File("C:\\Users\\POWERUSER\\Downloads\\opencv\\build\\java\\x64\\"
                    + System.mapLibraryName("opencv_java2413"));
        } else {
            lib = new File("libs//x86//" + System.mapLibraryName("opencv_java2413"));
        }
    }
    System.load(lib.getAbsolutePath());
    String tempObject = "images\\hero_templates\\" + template + ".png";
    String source_pic = source;
    Mat objectImage = Highgui.imread(tempObject, Highgui.CV_LOAD_IMAGE_GRAYSCALE);
    Mat sceneImage = Highgui.imread(source_pic, Highgui.CV_LOAD_IMAGE_GRAYSCALE);

    MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint();
    FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.SURF);
    featureDetector.detect(objectImage, objectKeyPoints);
    KeyPoint[] keypoints = objectKeyPoints.toArray();
    MatOfKeyPoint objectDescriptors = new MatOfKeyPoint();
    DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.SURF);
    descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors);

    // Create the matrix for output image.
    Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Highgui.CV_LOAD_IMAGE_COLOR);
    Scalar newKeypointColor = new Scalar(255, 0, 0);
    Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0);

    // Match object image with the scene image
    MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint();
    MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint();
    featureDetector.detect(sceneImage, sceneKeyPoints);
    descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors);

    Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Highgui.CV_LOAD_IMAGE_COLOR);
    Scalar matchestColor = new Scalar(0, 255, 25);

    List<MatOfDMatch> matches = new LinkedList<MatOfDMatch>();
    DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
    descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2);

    LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>();

    float nndrRatio = .78f;

    for (int i = 0; i < matches.size(); i++) {
        MatOfDMatch matofDMatch = matches.get(i);
        DMatch[] dmatcharray = matofDMatch.toArray();
        DMatch m1 = dmatcharray[0];
        DMatch m2 = dmatcharray[1];

        if (m1.distance <= m2.distance * nndrRatio) {
            goodMatchesList.addLast(m1);

        }
    }

    if (goodMatchesList.size() >= 4) {

        List<KeyPoint> objKeypointlist = objectKeyPoints.toList();
        List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList();

        LinkedList<Point> objectPoints = new LinkedList<>();
        LinkedList<Point> scenePoints = new LinkedList<>();

        for (int i = 0; i < goodMatchesList.size(); i++) {
            objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt);
            scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt);
        }

        MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f();
        objMatOfPoint2f.fromList(objectPoints);
        MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f();
        scnMatOfPoint2f.fromList(scenePoints);

        Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3);

        Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2);
        Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2);

        obj_corners.put(0, 0, new double[] { 0, 0 });
        obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 });
        obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() });
        obj_corners.put(3, 0, new double[] { 0, objectImage.rows() });

        Core.perspectiveTransform(obj_corners, scene_corners, homography);

        Mat img = Highgui.imread(source_pic, Highgui.CV_LOAD_IMAGE_COLOR);

        Core.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)),
                new Scalar(0, 255, 255), 4);
        Core.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)),
                new Scalar(255, 255, 0), 4);
        Core.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)),
                new Scalar(0, 255, 0), 4);
        Core.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)),
                new Scalar(0, 255, 0), 4);

        MatOfDMatch goodMatches = new MatOfDMatch();
        goodMatches.fromList(goodMatchesList);

        Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches,
                matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2);
        if (new Point(scene_corners.get(0, 0)).x < new Point(scene_corners.get(1, 0)).x
                && new Point(scene_corners.get(0, 0)).y < new Point(scene_corners.get(2, 0)).y) {
            System.out.println("found " + template);
            Highgui.imwrite("points.jpg", outputImage);
            Highgui.imwrite("matches.jpg", matchoutput);
            Highgui.imwrite("final.jpg", img);

            if (flag == 0) {
                ReturnValues retVal = null;
                int y = (int) new Point(scene_corners.get(3, 0)).y;
                int yHeight = (int) new Point(scene_corners.get(3, 0)).y
                        - (int) new Point(scene_corners.get(2, 0)).y;
                if (y < image.getHeight() * .6) { //if found hero is in upper half of image then return point 3,0
                    retVal = new ReturnValues(y + (int) (image.getHeight() * .01), yHeight);
                } else { //if found hero is in lower half of image then return point 2,0
                    y = (int) new Point(scene_corners.get(2, 0)).y;
                    retVal = new ReturnValues(y + (int) (image.getHeight() * .3), yHeight);
                }
                return retVal;
            } else if (flag == 1) {
                int[] xPoints = new int[4];
                int[] yPoints = new int[4];

                xPoints[0] = (int) (new Point(scene_corners.get(0, 0)).x);
                xPoints[1] = (int) (new Point(scene_corners.get(1, 0)).x);
                xPoints[2] = (int) (new Point(scene_corners.get(2, 0)).x);
                xPoints[3] = (int) (new Point(scene_corners.get(3, 0)).x);

                yPoints[0] = (int) (new Point(scene_corners.get(0, 0)).y);
                yPoints[1] = (int) (new Point(scene_corners.get(1, 0)).y);
                yPoints[2] = (int) (new Point(scene_corners.get(2, 0)).y);
                yPoints[3] = (int) (new Point(scene_corners.get(3, 0)).y);

                ReturnValues retVal = new ReturnValues(xPoints, yPoints);
                return retVal;

            }
        }
    }
    return null;

}

From source file:processdata.ExperimentalDataProcessingUI.java

private void jButtonProcessImageActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonProcessImageActionPerformed
    try {//w  w w.  jav a  2s. co  m
        // TODO add your handling code here:
        //load library
        System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
        // TODO add your handling code here:
        folderName = textField2.getText();
        int currentFrameIndex = Integer.parseInt(initialFrameIndexBox.getText()) - 1;
        datasetIndex = Integer.parseInt(textField1.getText());
        String videoImageFileName = "./videoFrames//" + folderName + "//" + "frame_outVideo_"
                + currentFrameIndex + ".jpg";

        String depthFrameFileName = initialImagePath + datasetIndex + "//" + folderName + "//" + "depthData//"
                + "outDepthByte_" + currentFrameIndex;

        rgbFrame = Highgui.imread(videoImageFileName, Highgui.CV_LOAD_IMAGE_GRAYSCALE);

        depthFrame = depthDataProcessingUtilities.processDepthDataFile(depthFrameFileName, jSlider2.getValue(),
                jSlider1.getValue());

        Mat[] backgroundFrames = readBackground();
        rgbBackgroundFrame = backgroundFrames[0];
        depthBackgroundFrame = backgroundFrames[1];

        //subtract depth background
        Mat depthFrameBackgroundSubtracted = new Mat();
        Core.subtract(depthBackgroundFrame, depthFrame, depthFrameBackgroundSubtracted);
        Imgproc.threshold(depthFrameBackgroundSubtracted, depthFrameBackgroundSubtracted, 0, 255,
                Imgproc.THRESH_BINARY);
        displayImage(Mat2BufferedImage(
                videoProcessingUtilities.resizeImage(depthFrameBackgroundSubtracted, new Size(448, 234))),
                depthBckgSubtractedFrames);

        //remove the red-colored elements from depth image and leave only blue ones
        Mat depthImageCleaned = new Mat();
        Core.inRange(depthFrameBackgroundSubtracted, new Scalar(253, 0, 0), new Scalar(255, 0, 0),
                depthImageCleaned);

        //apply morphologic opening to remove noise
        Imgproc.morphologyEx(depthImageCleaned, depthImageCleaned, 2,
                Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 3)));
        displayImage(
                Mat2BufferedImage(videoProcessingUtilities.resizeImage(depthImageCleaned, new Size(448, 234))),
                depthCleanedFramesPanel);

        //apply the homographic transform to cleaned depth image
        Mat hDepthImageCleaned = videoProcessingUtilities.performHomographyTransformation(depthImageCleaned,
                new Size(1920, 1080));

        //extract all contours
        //sort all extracted contours and choose top 2
        //overlay top 2 contours on the image and fill them with white color
        //mask the rgb frame
        // do all necessary rotation operations
        //offer user to save the image

        //extract all suitable contours between MIN and MAX areas:
        MatOfPoint[] contours = videoProcessingUtilities.extractLargestContours(hDepthImageCleaned, 100000,
                160000);
        System.out.println("Number of contorus extracted " + contours.length);

        //draw contours
        List<MatOfPoint> tempContours = new ArrayList<MatOfPoint>();
        Mat hDepthImageCleanedContours = hDepthImageCleaned.clone();
        for (MatOfPoint cnt : contours) {
            System.out.println("Extracted Contour Area is " + Imgproc.contourArea(cnt));
            tempContours.add(cnt);
        }
        Imgproc.cvtColor(hDepthImageCleanedContours, hDepthImageCleanedContours, Imgproc.COLOR_GRAY2BGR);
        Imgproc.drawContours(hDepthImageCleanedContours, tempContours, -1, new Scalar(0, 0, 255), 5);
        displayImage(
                Mat2BufferedImage(
                        videoProcessingUtilities.resizeImage(hDepthImageCleanedContours, new Size(448, 234))),
                extractedContoursPanel);

        //prepare final mask
        Mat hDepthImageFilledContours = new Mat(hDepthImageCleaned.rows(), hDepthImageCleaned.cols(),
                hDepthImageCleaned.type());
        Imgproc.drawContours(hDepthImageFilledContours, tempContours, -1, new Scalar(255, 255, 255), -1);
        displayImage(
                Mat2BufferedImage(
                        videoProcessingUtilities.resizeImage(hDepthImageFilledContours, new Size(448, 234))),
                maskedContoursPanel);

        //subtract video background
        //            Mat rgbFrameBackgroundSubtracted = new Mat();
        //            Core.subtract(rgbBackgroundFrame,rgbFrame, rgbFrameBackgroundSubtracted, hDepthImageCleaned);
        //            displayImage(Mat2BufferedImage(videoProcessingUtilities.resizeImage(rgbFrameBackgroundSubtracted, new Size(448,234))),videoBckgSubtractedFrames);
        //            
        //mask
        Mat preMaskedRGBFrame = new Mat();
        rgbFrame.copyTo(preMaskedRGBFrame, hDepthImageCleaned);
        displayImage(
                Mat2BufferedImage(videoProcessingUtilities.resizeImage(preMaskedRGBFrame, new Size(448, 234))),
                videoBckgSubtractedFrames);

        //postmask
        Mat betterMaskedRGBFrame = new Mat();
        rgbFrame.copyTo(betterMaskedRGBFrame, hDepthImageFilledContours);
        displayImage(
                Mat2BufferedImage(
                        videoProcessingUtilities.resizeImage(betterMaskedRGBFrame, new Size(448, 234))),
                videoMaskedPanel);

        //clear ArrayList containig all processed images
        finalImages.clear();
        javax.swing.JLabel[] jLabArray = { extractedShoePanel1, extractedShoePanel2 };
        //segment all images
        int panelIdx = 0;
        for (MatOfPoint contour : tempContours) {
            MatOfPoint2f newMatOfPoint2fContour = new MatOfPoint2f(contour.toArray());
            RotatedRect finalROI = Imgproc.minAreaRect(newMatOfPoint2fContour);
            Mat newMask = videoProcessingUtilities.getContourMasked(hDepthImageFilledContours.clone(), contour);
            Mat imageROIRegistred = new Mat();
            betterMaskedRGBFrame.copyTo(imageROIRegistred, newMask);
            Mat maskedRGBFrameROI = videoProcessingUtilities.rotateExtractedShoeprint(imageROIRegistred,
                    finalROI, new Size(500, 750), 2);
            finalImages.add(maskedRGBFrameROI);
            displayImage(
                    Mat2BufferedImage(
                            videoProcessingUtilities.resizeImage(maskedRGBFrameROI, new Size(203, 250))),
                    jLabArray[panelIdx]);
            panelIdx++;
        }

        //MatOfInt parameters = new MatOfInt();
        //parameters.fromArray(Highgui.CV_IMWRITE_JPEG_QUALITY, 100);
        //Highgui.imwrite(".//backgrounds//"+"test.jpg", depthFrameBackgroundSubtracted, parameters);

    } catch (FileNotFoundException ex) {
        Logger.getLogger(ExperimentalDataProcessingUI.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:readnumber.ReadNumber.java

/**
 * detectFaces/*from w w w .  java2 s  . co m*/
 */
private void detectFaces() {
    // Save video to image           
    Imgcodecs.imwrite(filePath, webcamMatImage);

    // Detect faces in the image
    imagefase = Imgcodecs.imread(filePath);
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(imagefase, faceDetections);

    inform.setText("Information:");
    String cauntfase = "total face: " + faceDetections.toArray().length;
    caunttext.setText(cauntfase);

    // Draw a bounding box around each face
    for (Rect rect : faceDetections.toArray()) {
        Imgproc.rectangle(imagefase, new Point(rect.x, rect.y),
                new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0));
    }

    // Save video to image
    Imgcodecs.imwrite(filePath, imagefase);
    message.setText("Detect faces ...");

    // Output image to form (JLabel)
    imageView.setBounds(330, 61, 320, 240);
    newImage = Imgcodecs.imread(filePath);
    Image loadedImage = imageProcessor.toBufferedImage(newImage);
    ImageIcon imgIcon = new ImageIcon(loadedImage, "img");
    imageView.setIcon(imgIcon);
    textImage.setBounds(340, 295, 120, 30);
}

From source file:Recognizer.Recognizer.java

public Image TemplateMatching(Image imQuery, Image imDB, int match_method) {
    System.out.println("Running Template Matching ...");

    //Mat img = Highgui.imread(inFile); // Image in which area has to be searched
    //Mat template_img = Highgui.imread(templateFile); // Search Image

    Mat matQuery = imQuery.Image3CtoMat_CV();
    Mat matDB = imDB.Image3CtoMat_CV();

    Mat hsvQ = new Mat(), hsvDB = new Mat();

    Imgproc.cvtColor(matQuery, hsvQ, COLOR_RGB2HSV);
    Imgproc.cvtColor(matDB, hsvDB, COLOR_RGB2HSV);

    // Create result image matrix
    int resultImg_cols = matDB.cols() - matQuery.cols() + 1;
    int resultImg_rows = matDB.rows() - matQuery.rows() + 1;

    Mat matRes = new Mat(resultImg_rows, resultImg_cols, CvType.CV_32FC1);

    // Template Matching with Normalization
    Imgproc.matchTemplate(hsvDB, hsvQ, matRes, match_method);
    Core.normalize(matRes, matRes, 0, 1, Core.NORM_MINMAX, -1, new Mat());

    // / Localizing the best match with minMaxLoc
    Core.MinMaxLocResult Location_Result = Core.minMaxLoc(matRes);
    Point matchLocation;// w w  w .j av  a 2  s. c  o  m

    if (match_method == Imgproc.TM_SQDIFF || match_method == Imgproc.TM_SQDIFF_NORMED) {
        matchLocation = Location_Result.minLoc;
    } else {
        matchLocation = Location_Result.maxLoc;
    }

    // Display Area by Rectangle
    Core.rectangle(matDB, matchLocation,
            new Point(matchLocation.x + matQuery.cols(), matchLocation.y + matQuery.rows()),
            new Scalar(0, 255, 0));

    Image imOut = new Image(matDB.width(), matDB.height());
    //Image imOut = new Image(matQuery.cols(), matQuery.rows());

    //Mat m = new Mat(matDB);

    //m =//matDB.submat((int)matchLocation.y, (int)matchLocation.y + matQuery.rows(),(int)matchLocation.x, (int)matchLocation.x + matQuery.cols());

    imOut.Mat_CVtoImage3C(matDB);

    System.out.println("Location: " + Location_Result.minLoc.x + " " + Location_Result.minLoc.y + "   "
            + Location_Result.maxLoc.x + " " + Location_Result.maxLoc.y);

    return imOut;
}

From source file:Recognizer.Recognizer.java

public Image HistMatch(Image imQuery, Image imDB) {
    Image imOut = new Image(352, 288);

    Mat srcQ, srcDB;/*from   w w  w.ja v  a 2s . c  o m*/
    Mat hsvQ = new Mat(), hsvDB = new Mat();

    srcQ = imQuery.Image3CtoMat_CV();
    srcDB = imDB.Image3CtoMat_CV();

    //Convert To HSV
    Imgproc.cvtColor(srcQ, hsvQ, Imgproc.COLOR_RGB2HSV);
    Imgproc.cvtColor(srcDB, hsvDB, Imgproc.COLOR_RGB2HSV);

    java.util.List<Mat> matlistQ = Arrays.asList(hsvQ);
    java.util.List<Mat> matlistDB = Arrays.asList(hsvDB);

    //Use 100 bins for hue, 100 for Saturation
    int h_bins = 360, s_bins = 4;
    int[] histsize = { h_bins, s_bins };
    MatOfInt histSize = new MatOfInt(histsize);

    MatOfFloat Ranges = new MatOfFloat(0, 180, 0, 256);

    int[] channels = { 0, 1 };
    MatOfInt CH = new MatOfInt(channels);

    Mat hist_Q = new Mat();
    Mat hist_DB = new Mat();

    Imgproc.calcHist(matlistQ, CH, new Mat(), hist_Q, histSize, Ranges);
    Core.normalize(hist_Q, hist_Q, 0, 1, Core.NORM_MINMAX, -1, new Mat());

    float res;

    Mat[] hsvaLev1 = new Mat[4];
    Mat[] hsvaLev2 = new Mat[16];
    Mat[] hsvaLev3 = new Mat[64];
    // Mat[] hsvaLev4 = new Mat[256];

    float[] iaLev1 = new float[4];
    float[] iaLev2 = new float[16];
    float[] iaLev3 = new float[64];
    //float[] iaLev4 = new float[256];

    for (int i = 0; i < 2; i++) {
        for (int j = 0; j < 2; j++) {
            hsvaLev1[i * 2 + j] = hsvDB.submat(0 + i * 288 / 2, 143 + i * 288 / 2, 0 + j * 352 / 2,
                    175 + j * 352 / 2);
        }
    }

    for (int i = 0; i < 4; i++) {
        for (int j = 0; j < 4; j++) {
            hsvaLev2[i * 4 + j] = hsvDB.submat(0 + i * 288 / 4, 71 + i * 288 / 4, 0 + j * 352 / 4,
                    87 + j * 352 / 4);
        }
    }

    for (int i = 0; i < 8; i++) {
        for (int j = 0; j < 8; j++) {
            hsvaLev3[i * 8 + j] = hsvDB.submat(0 + i * 288 / 8, 35 + i * 288 / 8, 0 + j * 352 / 8,
                    43 + j * 352 / 8);
        }
    }

    System.out.println("Lev_1");
    for (int m = 0; m < 4; m++) {
        matlistDB = Arrays.asList(hsvaLev1[m]);
        Imgproc.calcHist(matlistDB, CH, new Mat(), hist_DB, histSize, Ranges);
        Core.normalize(hist_DB, hist_DB, 0, 1, Core.NORM_MINMAX, -1, new Mat());
        res = (float) Imgproc.compareHist(hist_Q, hist_DB, Imgproc.CV_COMP_BHATTACHARYYA);

        System.out.println("Res: " + res);
        iaLev1[m] = res;
    }

    System.out.println("Lev_2");
    for (int m = 0; m < 16; m++) {
        matlistDB = Arrays.asList(hsvaLev2[m]);
        Imgproc.calcHist(matlistDB, CH, new Mat(), hist_DB, histSize, Ranges);
        Core.normalize(hist_DB, hist_DB, 0, 1, Core.NORM_MINMAX, -1, new Mat());
        res = (float) Imgproc.compareHist(hist_Q, hist_DB, Imgproc.CV_COMP_BHATTACHARYYA);

        System.out.println("Res: " + res);
        iaLev2[m] = res;
    }

    System.out.println("Lev_3");
    for (int m = 0; m < 64; m++) {
        matlistDB = Arrays.asList(hsvaLev3[m]);
        Imgproc.calcHist(matlistDB, CH, new Mat(), hist_DB, histSize, Ranges);
        Core.normalize(hist_DB, hist_DB, 0, 1, Core.NORM_MINMAX, -1, new Mat());
        res = (float) Imgproc.compareHist(hist_Q, hist_DB, Imgproc.CV_COMP_BHATTACHARYYA);

        System.out.println("Res: " + res);
        iaLev3[m] = res;
    }

    int x = MinIndex(iaLev1);
    int i = x % 2;
    int j = x / 2;
    Core.rectangle(srcDB, new Point(0 + j * 352 / 2, 0 + i * 288 / 2),
            new Point(175 + j * 352 / 2, 143 + i * 288 / 2), new Scalar(0, 255, 0));

    x = MinIndex(iaLev2);
    i = x % 4;
    j = x / 4;
    Core.rectangle(srcDB, new Point(0 + j * 352 / 4, 0 + i * 288 / 4),
            new Point(87 + j * 352 / 4, 71 + i * 288 / 4), new Scalar(0, 0, 255));

    x = MinIndex(iaLev3);
    i = x % 8;
    j = x / 8;
    Core.rectangle(srcDB, new Point(0 + j * 352 / 8, 0 + i * 288 / 8),
            new Point(43 + j * 352 / 8, 35 + i * 288 / 8), new Scalar(255, 0, 0));

    imOut.Mat_CVtoImage3C(srcDB);

    return imOut;
}

From source file:Recognizer.Recognizer.java

public Image HistBlockCompare(Image imQuery, Image imDB, int m, int n) // SingleBlock Size mxn -> Eg: 88x72 -> m =88; n = 72
{
    // Initialzations
    Image imOut = new Image(352, 288);

    Mat srcQ, srcDB;/*from   w ww  . j av  a  2  s .c o  m*/
    Mat hsvQ = new Mat(), hsvDB = new Mat();

    srcQ = imQuery.Image3CtoMat_CV();
    srcDB = imDB.Image3CtoMat_CV();

    //Convert To HSV
    Imgproc.cvtColor(srcQ, hsvQ, Imgproc.COLOR_RGB2HSV);
    Imgproc.cvtColor(srcDB, hsvDB, Imgproc.COLOR_RGB2HSV);

    java.util.List<Mat> matlistQ = Arrays.asList(hsvQ);
    java.util.List<Mat> matlistDB = Arrays.asList(hsvDB);

    //Use 100 bins for hue, 100 for Saturation
    int h_bins = 180, s_bins = 2;
    int[] histsize = { h_bins, s_bins };
    MatOfInt histSize = new MatOfInt(histsize);

    MatOfFloat Ranges = new MatOfFloat(0, 180, 0, 256);

    int[] channels = { 0, 1 };
    MatOfInt CH = new MatOfInt(channels);

    Mat hist_Q = new Mat();
    Mat hist_DB = new Mat();

    Imgproc.calcHist(matlistQ, CH, new Mat(), hist_Q, histSize, Ranges);
    Core.normalize(hist_Q, hist_Q, 0, 1, Core.NORM_MINMAX, -1, new Mat());

    float[][] CompareHistResult = new float[352 - m][288 - n];

    for (int i = 0; i < (352 - m); i++) // width
    {
        for (int j = 0; j < (288 - n); j++) // height
        {
            // Get Indiaviadua Submatrix for Matching putrposes
            hist_DB = hsvDB.submat(j, (j + n), i, (i + m));
            // Now Compare Histogram using OpenCV functions
            matlistDB = Arrays.asList(hist_DB);
            Imgproc.calcHist(matlistDB, CH, new Mat(), hist_DB, histSize, Ranges);
            Core.normalize(hist_DB, hist_DB, 0, 1, Core.NORM_MINMAX, -1, new Mat());
            CompareHistResult[i][j] = (float) Imgproc.compareHist(hist_Q, hist_DB, Imgproc.CV_COMP_CHISQR);
        }
    }

    // Search min from result
    float min = CompareHistResult[0][0];
    int minIndex_i = 0;
    int minIndex_j = 0;
    for (int i = 0; i < (352 - m); i++) // width
    {
        for (int j = 0; j < (288 - n); j++) // height
        {
            if (CompareHistResult[i][j] < min) {
                min = CompareHistResult[i][j];
                minIndex_i = i;
                minIndex_j = j;
            }
        }
    }
    //
    Core.rectangle(srcDB, new Point(minIndex_i, minIndex_j), new Point(minIndex_i + m, minIndex_j + n),
            new Scalar(0, 255, 0));

    System.out.println("Result: " + CompareHistResult[minIndex_i][minIndex_j]);
    imOut.Mat_CVtoImage3C(srcDB);

    return imOut;
}

From source file:Reconhecimento.Circulo.java

/**
segmenta o circulo para acompanhamento do movimento 
**///from  w  ww . j  a  v  a  2s.co m
public static void segmentarCirculo(int minRaio, int maxRaio, int minThreshold, int maxThreshold,
        int medianBlurKernel) {

    class threadSegmentar extends Thread {

        public boolean closed = false;
        public double CentroX;
        public double CentroY;

        @Override
        public void run() {
            int contador = 0;

            File folder = new File("imagens/frames");
            if (folder.exists() == false) {
                folder.mkdir();
            }
            for (String file : folder.list()) {
                new File(folder, file).delete();
            }

            ind = (char) ((int) ind + 1);

            JFrame frame = new JFrame();
            JLabel label = new JLabel();
            frame.add(label);
            frame.setBounds(10, 10, 640, 480);
            label.setSize(640, 480);
            frame.setLocation(250, 250);
            frame.setVisible(true);
            closed = false;

            frame.addWindowListener(new WindowAdapter() {
                @Override
                public void windowClosing(WindowEvent e) {
                    closed = true;
                }
            });

            Mat img = new Mat();
            Mat circles = new Mat();
            Mat grayImg = new Mat();
            Mat gravar = new Mat();
            Mat element = new Mat();

            VideoCapture cap = new VideoCapture(Video.videoAtual);

            // capturar primeiro frame do video 
            cap.read(img);

            Imgproc.cvtColor(img, grayImg, Imgproc.COLOR_BGR2GRAY);

            Imgproc.medianBlur(grayImg, grayImg, 5);

            Imgproc.HoughCircles(grayImg, circles, Imgproc.CV_HOUGH_GRADIENT, 1, 100, 220, 10, minRaio,
                    maxRaio);

            double Circle[] = circles.get(0, 0);

            Point center = new Point(Math.round(Circle[0]), Math.round(Circle[1]));

            int radius = (int) Math.round(Circle[2]);

            CentroX = center.x;
            CentroY = center.y;

            cap.read(img);

            boolean continuar = true;

            while (continuar) {

                // passar a imagem para tons de cinza
                Imgproc.cvtColor(img, grayImg, Imgproc.COLOR_BGR2GRAY);

                // limiarizacao
                Imgproc.threshold(grayImg, grayImg, minThreshold, maxThreshold, THRESH_BINARY_INV);

                Core.bitwise_not(grayImg, grayImg);

                // filtro da mediana
                Imgproc.medianBlur(grayImg, grayImg, medianBlurKernel);

                // deteccao de vertices
                Imgproc.Canny(grayImg, grayImg, 100, 255);

                // aplicar transformada circular de hough
                Imgproc.HoughCircles(grayImg, circles, Imgproc.CV_HOUGH_GRADIENT, 1, 100, 220, 9, minRaio,
                        maxRaio);

                try {
                    for (int x = 0; x < circles.cols(); x++) {
                        double vCircle[] = circles.get(0, x);

                        center = new Point(Math.round(vCircle[0]), Math.round(vCircle[1]));
                        radius = (int) Math.round(vCircle[2]);

                        // analisa a distancia entre o circulo do frame anterior e atual
                        if (((center.x <= CentroX) || (center.x - CentroX <= 5))
                                && (Math.sqrt(CentroX * CentroX + CentroY * CentroY)
                                        - Math.sqrt(center.x * center.x + center.y * center.y) <= 70.0)
                                && (Math.sqrt(CentroX * CentroX + CentroY * CentroY)
                                        - Math.sqrt(center.x * center.x + center.y * center.y) >= -70.0)) {

                            Core.circle(img, center, radius, new Scalar(0, 0, 255), 3, 8, 0);

                            CentroX = center.x;
                            CentroY = center.y;
                        }
                    }
                } catch (Exception e) {
                }

                Imgproc.resize(img, gravar, new Size(640, 480));
                Highgui.imwrite("imagens/frames/houghcircles" + contador + ind + ".jpg", gravar);

                label.setIcon(new ImageIcon("imagens/frames/houghcircles" + contador + ind + ".jpg"));

                contador++;

                continuar = cap.read(img) && !closed;
            }
        }
    }

    if (Video.videoAtual == null) {
        JOptionPane.showMessageDialog(null, "Selecione um arquivo de video!", "Nenhum vdeo selecionado",
                JOptionPane.WARNING_MESSAGE);
        Video.abrirVideo();
    }

    threadSegmentar t = new threadSegmentar();
    t.start();
}

From source file:Reconhecimento.Regua.java

public static void segmentarRegua() {

    long tempoInicio = System.currentTimeMillis();

    // coordenadas do retangulo de selecao 
    int x0 = TelaSegmentarRegua.localizarReguaPanel1.x0;
    int y0 = TelaSegmentarRegua.localizarReguaPanel1.y0;
    int x = TelaSegmentarRegua.localizarReguaPanel1.xf;
    int y = TelaSegmentarRegua.localizarReguaPanel1.yf;

    if (x0 > x) {
        int aux = x0;
        x0 = x;/*from w  w  w. j av  a 2s .  co m*/
        x = aux;
    }

    if (y0 > y) {
        int aux = y0;
        y0 = y;
        y = aux;
    }

    Mat bigImage = Highgui.imread(TelaSegmentarRegua.localizarReguaPanel1.imagem);
    // cortar imagem de acordo com a selecao
    Mat img = new Mat(bigImage, new Rect(x0, y0, x - x0, y - y0));

    Mat grayImg = new Mat();
    // passar imagem para tons de cinza
    Imgproc.cvtColor(img, grayImg, Imgproc.COLOR_BGR2GRAY);
    // limiarizacao 
    Imgproc.threshold(grayImg, grayImg, 190, 255, THRESH_BINARY_INV);
    Core.bitwise_not(grayImg, grayImg);

    List<Point> pontos = new ArrayList<Point>();

    // adicionar todos os pontos da referentes a regua em um vetor
    for (int i = 0; i < grayImg.rows(); i++) {
        for (int j = 0; j < grayImg.cols(); j++) {
            if (Arrays.toString(grayImg.get(i, j)).equals("[255.0]")) {
                pontos.add(new Point(j, i));
                Core.line(img, new Point(j, i), new Point(j, i), new Scalar(255, 0, 0));
            }
        }
    }

    String filename = "imagens/regua_segmentada" + Math.random() * 1000 + ".jpg";

    Mat img2 = new Mat();
    Imgproc.resize(img, img2, new Size(img.size().width * 3.0, img.size().height * 3.0));
    Highgui.imwrite(filename, img2);

    int xMin = 5000, yMin = 5000;
    int xMax = 0, yMax = 0;

    // pontos extremos da regua
    for (Point ponto : pontos) {
        if (ponto.x > xMax) {
            xMax = (int) ponto.x;
        }
        if (ponto.x < xMin) {
            xMin = (int) ponto.x;
        }
        if (ponto.y > yMax) {
            yMax = (int) ponto.y;
        }
        if (ponto.y < yMin) {
            yMin = (int) ponto.y;
        }
    }

    // regua na posicao horizontal
    if (xMax - xMin > yMax - yMin) {
        /*
        a proporcao da imagem utilizada no processamento torna necessario
        a multiplicacao por 2 para manter a proporcao das medidas 
        */
        larguraPixels = (xMax - xMin) * 2;
    }
    // regua na posicao vertical
    else {
        larguraPixels = (yMax - yMin) * 2;
    }

    long tempoFim = System.currentTimeMillis() - tempoInicio;

    centimetrosPorPixel = 30.0 / larguraPixels;

    TelaSegmentarRegua2 telaResposta = new TelaSegmentarRegua2();
    telaResposta.jLabel1.setIcon(new ImageIcon(filename));
    telaResposta.jLabel4.setText(larguraPixels + " pixels");
    telaResposta.jLabel5.setText(String.valueOf(centimetrosPorPixel).substring(0, 5));
    telaResposta.jLabel7.setText(tempoFim + " ms");
    telaResposta.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE);
    telaResposta.setLocation(200, 200);
    telaResposta.setVisible(true);

}