Example usage for org.opencv.core Mat copyTo

List of usage examples for org.opencv.core Mat copyTo

Introduction

In this page you can find the example usage for org.opencv.core Mat copyTo.

Prototype

public void copyTo(Mat m) 

Source Link

Usage

From source file:LicenseDetection.java

public void run() {

    // ------------------ set up tesseract for later use ------------------
    ITesseract tessInstance = new Tesseract();
    tessInstance.setDatapath("/Users/BradWilliams/Downloads/Tess4J");
    tessInstance.setLanguage("eng");

    // ------------------  Save image first ------------------
    Mat img;/*  www .j av a  2s  .co m*/
    img = Imgcodecs.imread(getClass().getResource("/resources/car_2_shopped2.jpg").getPath());
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/True_Image.png", img);

    // ------------------ Convert to grayscale ------------------
    Mat imgGray = new Mat();
    Imgproc.cvtColor(img, imgGray, Imgproc.COLOR_BGR2GRAY);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/Gray.png", imgGray);

    // ------------------ Blur so edge detection wont pick up noise ------------------
    Mat imgGaussianBlur = new Mat();
    Imgproc.GaussianBlur(imgGray, imgGaussianBlur, new Size(3, 3), 0);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/gaussian_blur.png", imgGaussianBlur);

    // ****************** Create image that will be cropped at end of program before OCR ***************************

    // ------------------ Binary theshold for OCR (used later)------------------
    Mat imgThresholdOCR = new Mat();
    Imgproc.adaptiveThreshold(imgGaussianBlur, imgThresholdOCR, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C,
            Imgproc.THRESH_BINARY, 7, 10);
    //Imgproc.threshold(imgSobel,imgThreshold,120,255,Imgproc.THRESH_TOZERO);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgThresholdOCR.png", imgThresholdOCR);

    // ------------------ Erosion operation------------------
    Mat kern = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_CROSS, new Size(3, 3));
    Mat imgErodeOCR = new Mat();
    Imgproc.morphologyEx(imgThresholdOCR, imgErodeOCR, Imgproc.MORPH_DILATE, kern); //Imgproc.MORPH_DILATE is performing erosion, wtf?
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgErodeOCR.png", imgErodeOCR);

    //------------------ Dilation operation  ------------------
    Mat kernall = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(3, 3));
    Mat imgDilateOCR = new Mat();
    Imgproc.morphologyEx(imgErodeOCR, imgDilateOCR, Imgproc.MORPH_ERODE, kernall);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgDilateOCR.png", imgDilateOCR);

    // *************************************************************************************************************

    //        // ------------------ Close operation (dilation followed by erosion) to reduce noise ------------------
    //        Mat k = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(3, 3));
    //        Mat imgCloseOCR = new Mat();
    //        Imgproc.morphologyEx(imgThresholdOCR,imgCloseOCR,1,k);
    //        Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgCloseOCR.png", imgCloseOCR);

    // ------------------ Sobel vertical edge detection ------------------
    Mat imgSobel = new Mat();
    Imgproc.Sobel(imgGaussianBlur, imgSobel, -1, 1, 0);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgSobel.png", imgSobel);

    // ------------------ Binary theshold ------------------
    Mat imgThreshold = new Mat();
    Imgproc.adaptiveThreshold(imgSobel, imgThreshold, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C,
            Imgproc.THRESH_BINARY, 99, -60);
    //Imgproc.threshold(imgSobel,imgThreshold,120,255,Imgproc.THRESH_TOZERO);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgThreshold.png", imgThreshold);

    //        // ------------------ Open operation (erosion followed by dilation) ------------------
    //        Mat ker = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_CROSS, new Size(3, 2));
    //        Mat imgOpen = new Mat();
    //        Imgproc.morphologyEx(imgThreshold,imgOpen,0,ker);
    //        Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgOpen.png", imgOpen);

    // ------------------ Close operation (dilation followed by erosion) to reduce noise ------------------
    Mat kernel = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(22, 8));
    Mat imgClose = new Mat();
    Imgproc.morphologyEx(imgThreshold, imgClose, 1, kernel);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgClose.png", imgClose);

    // ------------------ Find contours ------------------
    List<MatOfPoint> contours = new ArrayList<>();

    Imgproc.findContours(imgClose, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);

    // **************************** DEBUG CODE **************************

    Mat contourImg = new Mat(imgClose.size(), imgClose.type());
    for (int i = 0; i < contours.size(); i++) {
        Imgproc.drawContours(contourImg, contours, i, new Scalar(255, 255, 255), -1);
    }

    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/contours.png", contourImg);

    // ******************************************************************

    // --------------  Convert contours --------------------

    //Convert to MatOfPoint2f so that minAreaRect can be called
    List<MatOfPoint2f> newContours = new ArrayList<>();

    for (MatOfPoint mat : contours) {

        MatOfPoint2f newPoint = new MatOfPoint2f(mat.toArray());
        newContours.add(newPoint);

    }

    //Get minAreaRects
    List<RotatedRect> minAreaRects = new ArrayList<>();

    for (MatOfPoint2f mat : newContours) {

        RotatedRect rect = Imgproc.minAreaRect(mat);

        /*
         --------------- BUG WORK AROUND ------------
                
        Possible bug:
        When converting from MatOfPoint2f to RotatectRect the width height were reversed and the
        angle was -90 degrees from what it would be if the width and height were correct.
                
        When painting rectangle in image, the correct boxes were produced, but performing calculations on rect.angle
        rect.width, or rect.height yielded unwanted results.
                
        The following work around is buggy but works for my purpose
         */

        if (rect.size.width < rect.size.height) {
            double temp;

            temp = rect.size.width;
            rect.size.width = rect.size.height;
            rect.size.height = temp;
            rect.angle = rect.angle + 90;

        }

        //check aspect ratio and area and angle
        if (rect.size.width / rect.size.height > 1 && rect.size.width / rect.size.height < 5
                && rect.size.width * rect.size.height > 10000 && rect.size.width * rect.size.height < 50000
                && Math.abs(rect.angle) < 20) {
            minAreaRects.add(rect);
        }

        //minAreaRects.add(rect);
    }

    // **************************** DEBUG CODE **************************
    /*
    The following code is used to draw the rectangles on top of the original image for debugging purposes
     */
    //Draw Rotated Rects
    Point[] vertices = new Point[4];

    Mat imageWithBoxes = img;

    // Draw color rectangles on top of binary contours
    //        Mat imageWithBoxes = new Mat();
    //        Mat temp = imgDilateOCR;
    //        Imgproc.cvtColor(temp, imageWithBoxes, Imgproc.COLOR_GRAY2RGB);

    for (RotatedRect rect : minAreaRects) {

        rect.points(vertices);

        for (int i = 0; i < 4; i++) {
            Imgproc.line(imageWithBoxes, vertices[i], vertices[(i + 1) % 4], new Scalar(0, 0, 255), 2);
        }

    }

    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgWithBoxes.png", imageWithBoxes);

    // ******************************************************************

    // **************************** DEBUG CODE **************************
    //        for(RotatedRect rect : minAreaRects) {
    //            System.out.println(rect.toString());
    //        }
    // ******************************************************************

    /*
    In order to rotate image without cropping it:
            
    1. Create new square image with dimension = diagonal of initial image.
    2. Draw initial image into the center of new image.
     Insert initial image at ROI (Region of Interest) in new image
    3. Rotate new image
     */

    //Find diagonal/hypotenuse
    int hypotenuse = (int) Math.sqrt((img.rows() * img.rows()) + (img.cols() * img.cols()));

    //New Mat with hypotenuse as height and width
    Mat rotateSpace = new Mat(hypotenuse, hypotenuse, 0);

    int ROI_x = (rotateSpace.width() - imgClose.width()) / 2; //x start of ROI
    int ROI_y = (rotateSpace.height() - imgClose.height()) / 2; //x start of ROI

    //designate region of interest
    Rect r = new Rect(ROI_x, ROI_y, imgClose.width(), imgClose.height());

    //Insert image into region of interest
    imgDilateOCR.copyTo(rotateSpace.submat(r));

    Mat rotatedTemp = new Mat(); //Mat to hold temporarily rotated mat
    Mat rectMat = new Mat();//Mat to hold rect contents (needed for looping through pixels)
    Point[] rectVertices = new Point[4];//Used to build rect to make ROI
    Rect rec = new Rect();

    List<RotatedRect> edgeDensityRects = new ArrayList<>(); //populate new arraylist with rects that satisfy edge density

    int count = 0;

    //Loop through Rotated Rects and find edge density
    for (RotatedRect rect : minAreaRects) {

        count++;

        rect.center = new Point((float) ROI_x + rect.center.x, (float) ROI_y + rect.center.y);

        //rotate image to math orientation of rotated rect
        rotate(rotateSpace, rotatedTemp, rect.center, rect.angle);

        //remove rect rotation
        rect.angle = 0;

        //get vertices from rotatedRect
        rect.points(rectVertices);

        // **************************** DEBUG CODE **************************
        //
        //            for (int k = 0; k < 4; k++) {
        //                System.out.println(rectVertices[k]);
        //                Imgproc.line(rotatedTemp, rectVertices[k], rectVertices[(k + 1) % 4], new Scalar(0, 0, 255), 2);
        //            }
        //
        //            Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rotated" + count + ".png", rotatedTemp);

        // *****************************************************************

        //build rect to use as ROI
        rec = new Rect(rectVertices[1], rectVertices[3]);

        rectMat = rotatedTemp.submat(rec);

        Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/extracted" + count + ".png", rectMat);

        //find edge density

        //            // ------------------------ edge density check NOT IMPLEMENTED --------------------
        //            /*
        //            Checking for edge density was not necessary for this image so it was not implemented due to lack of time
        //             */
        //            for(int i = 0; i < rectMat.rows(); ++i){
        //                for(int j = 0; j < rectMat.cols(); ++j){
        //
        //                  //add up white pixels
        //                }
        //            }
        //
        //            //check number of white pixels against total pixels
        //            //only add rects to new arraylist that satisfy threshold

        edgeDensityRects.add(rect);
    }

    // **************************** DEBUG CODE **************************

    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rotatedSpace.png", rotateSpace);
    //Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rotatedSpaceROTATED.png", rotatedTemp);

    //System.out.println(imgGray.type());

    // *****************************************************************

    // if there is only one rectangle left, its the license plate
    if (edgeDensityRects.size() == 1) {

        String result = ""; //Hold result from OCR
        BufferedImage bimg;
        Mat cropped;

        cropped = rectMat.submat(new Rect(20, 50, rectMat.width() - 40, rectMat.height() - 70));

        Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rectMatCropped.png", cropped);

        bimg = matToBufferedImage(cropped);

        BufferedImage image = bimg;

        try {
            result = tessInstance.doOCR(image);
        } catch (TesseractException e) {
            System.err.println(e.getMessage());
        }

        for (int i = 0; i < 10; ++i) {

        }

        result = result.replace("\n", "");

        System.out.println(result);

        CarProfDBImpl db = new CarProfDBImpl();

        db.connect("localhost:3306/computer_vision", "root", "*******");

        CarProf c = db.getCarProf(result);

        System.out.print(c.toString());

        db.close();

    }

}

From source file:br.cefetmg.lsi.opencv.multipleObjectTracking.processing.MultipleObjectTracking.java

License:Open Source License

private void trackFilteredObject(Ball theBall, Mat threshold, Mat cameraFeed) {
    List<Ball> balls = new ArrayList<Ball>();

    Mat temp = new Mat();
    threshold.copyTo(temp);

    // The two variables below are the return of "findContours" processing.
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Mat hierarchy = new Mat();

    // find contours of filtered image using openCV findContours function      
    Imgproc.findContours(temp, contours, hierarchy, Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_SIMPLE);

    // use moments method to find our filtered object
    boolean objectFound = false;

    if (contours.size() > 0) {
        int numObjects = contours.size();

        //if number of objects greater than MAX_NUM_OBJECTS we have a noisy filter
        if (numObjects < MAX_NUM_OBJECTS) {

            for (int i = 0; i < contours.size(); i++) {
                Moments moment = Imgproc.moments(contours.get(i));
                double area = moment.get_m00();

                //if the area is less than 20 px by 20px then it is probably just noise
                //if the area is the same as the 3/2 of the image size, probably just a bad filter
                //we only want the object with the largest area so we safe a reference area each
                //iteration and compare it to the area in the next iteration.
                if (area > MIN_OBJECT_AREA) {
                    Ball ball = new Ball();
                    ball.setXPos((int) (moment.get_m10() / area));
                    ball.setYPos((int) (moment.get_m01() / area));

                    if (theBall != null) {
                        ball.setType(theBall.getType());
                        ball.setColour(theBall.getColour());
                    }/*from  w w  w .j  a v  a  2  s.  com*/

                    balls.add(ball);

                    objectFound = true;
                } else {
                    objectFound = false;
                }

            }

            //let user know you found an object
            if (objectFound) {
                //draw object location on screen
                drawObject(balls, cameraFeed);
            }

        } else {
            Core.putText(cameraFeed, "TOO MUCH NOISE! ADJUST FILTER", new Point(0, 50), 1, 2,
                    new Scalar(0, 0, 255), 2);
        }

    }

}

From source file:by.zuyeu.deyestracker.core.detection.detector.BaseDetector.java

protected Rect[] detectWithClassifier(final Mat inputframe, final CascadeClassifier classifier) {
    LOG.debug("detectWithClassifier - start;");

    final Mat mRgba = new Mat();
    final Mat mGrey = new Mat();
    final MatOfRect detectedObjects = new MatOfRect();
    inputframe.copyTo(mRgba);
    inputframe.copyTo(mGrey);//from ww w  .  ja  va2s .  c  o  m
    Imgproc.cvtColor(mRgba, mGrey, Imgproc.COLOR_BGR2GRAY);
    Imgproc.equalizeHist(mGrey, mGrey);
    classifier.detectMultiScale(mGrey, detectedObjects);

    LOG.debug("detectWithClassifier - end;");
    return detectedObjects.toArray();
}

From source file:ch.zhaw.facerecognition.Activities.AddPersonPreviewActivity.java

License:Open Source License

@Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
    Mat imgRgba = inputFrame.rgba();
    Mat imgCopy = new Mat();
    imgRgba.copyTo(imgCopy);
    // Selfie / Mirror mode
    if (front_camera) {
        Core.flip(imgRgba, imgRgba, 1);/*from  w  w w . jav a2  s.  co  m*/
    }

    long time = new Date().getTime();
    if ((method == MANUALLY) || (method == TIME) && (lastTime + timerDiff < time)) {
        lastTime = time;

        // Check that only 1 face is found. Skip if any or more than 1 are found.
        Mat img = ppF.getCroppedImage(imgCopy);
        if (img != null) {
            Rect[] faces = ppF.getFacesForRecognition();
            //Only proceed if 1 face has been detected, ignore if 0 or more than 1 face have been detected
            if ((faces != null) && (faces.length == 1)) {
                faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition());
                if (((method == MANUALLY) && capturePressed) || (method == TIME)) {
                    MatName m = new MatName(name + "_" + total, img);
                    if (folder.equals("Test")) {
                        String wholeFolderPath = fh.TEST_PATH + name + "/" + subfolder;
                        new File(wholeFolderPath).mkdirs();
                        fh.saveMatToImage(m, wholeFolderPath + "/");
                    } else {
                        String wholeFolderPath = fh.TRAINING_PATH + name;
                        new File(wholeFolderPath).mkdirs();
                        fh.saveMatToImage(m, wholeFolderPath + "/");
                    }

                    for (int i = 0; i < faces.length; i++) {
                        MatOperation.drawRectangleAndLabelOnPreview(imgRgba, faces[i], String.valueOf(total),
                                front_camera);
                    }

                    total++;

                    // Stop after numberOfPictures (settings option)
                    if (total >= numberOfPictures) {
                        Intent intent = new Intent(getApplicationContext(), AddPersonActivity.class);
                        intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
                        startActivity(intent);
                    }
                    capturePressed = false;
                } else {
                    for (int i = 0; i < faces.length; i++) {
                        MatOperation.drawRectangleOnPreview(imgRgba, faces[i], front_camera);
                    }
                }
            }
        }
    }

    return imgRgba;
}

From source file:ch.zhaw.facerecognition.Activities.RecognitionActivity.java

License:Open Source License

public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
    Mat imgRgba = inputFrame.rgba();
    Mat img = new Mat();
    imgRgba.copyTo(img);
    List<Mat> images = ppF.getProcessedImage(img);
    Rect[] faces = ppF.getFacesForRecognition();
    // Selfie / Mirror mode
    if (front_camera) {
        Core.flip(imgRgba, imgRgba, 1);//from  www  .ja v  a 2 s. c  om
    }
    if (images == null || images.size() == 0 || faces == null || faces.length == 0
            || !(images.size() == faces.length)) {
        // skip
        return imgRgba;
    } else {
        faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition());
        for (int i = 0; i < faces.length; i++) {
            MatOperation.drawRectangleAndLabelOnPreview(imgRgba, faces[i], rec.recognize(images.get(i), ""),
                    front_camera);
        }
        return imgRgba;
    }
}

From source file:ch.zhaw.facerecognition.Activities.TrainingActivity.java

License:Open Source License

@Override
public void onResume() {
    super.onResume();

    final Handler handler = new Handler(Looper.getMainLooper());
    thread = new Thread(new Runnable() {
        public void run() {
            if (!Thread.currentThread().isInterrupted()) {
                PreProcessorFactory ppF = new PreProcessorFactory();
                String algorithm = PreferencesHelper.getClassificationMethod();

                FileHelper fileHelper = new FileHelper();
                fileHelper.createDataFolderIfNotExsiting();
                final File[] persons = fileHelper.getTrainingList();
                if (persons.length > 0) {
                    Recognition rec = RecognitionFactory.getRecognitionAlgorithm(Recognition.TRAINING,
                            algorithm);/*from  w  ww  .  j a  va2s  .  c o m*/
                    for (File person : persons) {
                        if (person.isDirectory()) {
                            File[] files = person.listFiles();
                            int counter = 1;
                            for (File file : files) {
                                if (FileHelper.isFileAnImage(file)) {
                                    Mat imgRgb = Imgcodecs.imread(file.getAbsolutePath());
                                    Imgproc.cvtColor(imgRgb, imgRgb, Imgproc.COLOR_BGRA2RGBA);
                                    Mat processedImage = new Mat();
                                    imgRgb.copyTo(processedImage);
                                    List<Mat> images = ppF.getProcessedImage(processedImage);
                                    if (images == null || images.size() > 1) {
                                        // More than 1 face detected --> cannot use this file for training
                                        continue;
                                    } else {
                                        processedImage = images.get(0);
                                    }
                                    if (processedImage.empty()) {
                                        continue;
                                    }
                                    // The last token is the name --> Folder name = Person name
                                    String[] tokens = file.getParent().split("/");
                                    final String name = tokens[tokens.length - 1];

                                    MatName m = new MatName("processedImage", processedImage);
                                    fileHelper.saveMatToImage(m, FileHelper.DATA_PATH);

                                    rec.addImage(processedImage, name, false);

                                    //                                      fileHelper.saveCroppedImage(imgRgb, ppF, file, name, counter);

                                    // Update screen to show the progress
                                    final int counterPost = counter;
                                    final int filesLength = files.length;
                                    progress.post(new Runnable() {
                                        @Override
                                        public void run() {
                                            progress.append("Image " + counterPost + " of " + filesLength
                                                    + " from " + name + " imported.\n");
                                        }
                                    });

                                    counter++;
                                }
                            }
                        }
                    }
                    final Intent intent = new Intent(getApplicationContext(), MainActivity.class);
                    intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
                    if (rec.train()) {
                        intent.putExtra("training", "Training successful");
                    } else {
                        intent.putExtra("training", "Training failed");
                    }
                    handler.post(new Runnable() {
                        @Override
                        public void run() {
                            startActivity(intent);
                        }
                    });
                } else {
                    Thread.currentThread().interrupt();
                }
            }
        }
    });
    thread.start();
}

From source file:ch.zhaw.facerecognitionlibrary.Helpers.FileHelper.java

License:Open Source License

public void saveCroppedImage(Mat img, PreProcessorFactory ppF, File file, String name, int number) {
    // Save cropped image if not already existing
    File croppedFile = new File(file.getParentFile().getAbsolutePath() + "/cropped/" + name + "_" + number);
    if (!croppedFile.exists()) {
        // Create folder if not existing
        File croppedFolder = new File(file.getParentFile().getAbsolutePath() + "/cropped");
        croppedFolder.mkdir();/*w w w.ja v a 2  s.c o  m*/
        Mat copy = new Mat();
        img.copyTo(copy);
        copy = ppF.getCroppedImage(copy);
        MatName mat = new MatName(name + "_" + number, copy);
        saveMatToImage(mat, file.getParentFile().getAbsolutePath() + "/cropped/");
    }
}

From source file:classes.BlobsFinder.java

public void findBlobContours() {

    Mat grayImage = new Mat();
    Imgproc.cvtColor(image, grayImage, Imgproc.COLOR_BGR2GRAY);
    ImageUtils.saveImage(grayImage, outImageName + "_grayImage.png", request);

    Mat gaussianImage = new Mat();
    Imgproc.GaussianBlur(grayImage, gaussianImage, new Size(0, 0), 3);
    Core.addWeighted(grayImage, 1.5, gaussianImage, -1, 0, gaussianImage);
    ImageUtils.saveImage(gaussianImage, outImageName + "_gaussianGrayImage.png", request);

    Mat binaryImage = new Mat();
    Imgproc.adaptiveThreshold(gaussianImage, binaryImage, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C,
            Imgproc.THRESH_BINARY_INV, 15, 4);
    ImageUtils.saveImage(binaryImage, outImageName + "_binaryImage.png", request);

    Mat erodedImage = new Mat();

    binaryImage.copyTo(erodedImage);

    Mat structuringElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 3));
    Point anchor = new Point(-1, -1);

    Imgproc.morphologyEx(erodedImage, erodedImage, Imgproc.MORPH_CLOSE, structuringElement, anchor, 1);
    ImageUtils.saveImage(erodedImage, outImageName + "_erodedImage.png", request);

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();

    Imgproc.findContours(erodedImage, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

    Mat originalContoursImage = new Mat(image.size(), CvType.CV_8UC1, new Scalar(0));
    Scalar contourColor = new Scalar(255);
    int thickness = -1; // Thicknes should be lower than zero in order to drawn the filled contours
    Imgproc.drawContours(originalContoursImage, contours, -1, contourColor, thickness); // Drawing all the contours found
    ImageUtils.saveImage(originalContoursImage, outImageName + "_originalContoursImage.png", request);

    Mat erodedContoursImage = new Mat();
    Imgproc.erode(originalContoursImage, erodedContoursImage, structuringElement, anchor, 1);
    ImageUtils.saveImage(erodedContoursImage, outImageName + "_erodedContoursImage.png", request);

    ArrayList<MatOfPoint> finalContours = new ArrayList<MatOfPoint>();
    Mat finalContourImage = new Mat(image.size(), CvType.CV_8UC1, new Scalar(0));
    Imgproc.findContours(erodedContoursImage, finalContours, new Mat(), Imgproc.RETR_EXTERNAL,
            Imgproc.CHAIN_APPROX_SIMPLE);

    for (int i = 0; i < finalContours.size(); i++) {
        MatOfPoint currentContour = finalContours.get(i);
        double area = Imgproc.contourArea(currentContour);
        if (area > MIN_AREA) {

            validContours.add(currentContour);

            String fabricPath = generateFabricPathString(currentContour);
            contourPaths.add(fabricPath);

            Rect boundingRect = Imgproc.boundingRect(currentContour);
            topLeftCorners.add(boundingRect.tl());

            contoursAreas.add(area);//from   www. j a v  a 2  s. c  o  m
        }
    }

    // Drawing ALL the valid contours
    Imgproc.drawContours(finalContourImage, validContours, -1, contourColor, thickness);
    ImageUtils.saveImage(finalContourImage, outImageName + "_finalContourImage.png", request);

}

From source file:com.astrocytes.core.operationsengine.OperationsImpl.java

License:Open Source License

@Override
public Mat applyCannyEdgeDetection(Integer minThreshold, Integer maxThreshold, Boolean useImage) {
    CoreOperations.cannyFilter(currentImage, minThreshold, maxThreshold).copyTo(currentImage);

    if (useImage != null && useImage) {
        Mat result = sourceImage.clone();

        for (int col = 0; col < result.cols(); col++) {
            for (int row = 0; row < result.rows(); row++) {
                if (currentImage.get(row, col)[0] != 0) {
                    result.put(row, col, 255.0, 0.0, 0.0);
                }//from ww w.jav a2 s. c o  m
            }
        }

        result.copyTo(currentImage);
    }

    return currentImage;
}

From source file:com.astrocytes.core.operationsengine.OperationsImpl.java

License:Open Source License

@Override
public Mat applyMathMorphology(Integer radius) {
    Mat dest = new Mat();
    int instrumentSize = radius * 2 + 1;
    Mat kernel = getStructuringElement(Imgproc.CV_SHAPE_ELLIPSE, new Size(instrumentSize, instrumentSize),
            new Point(radius, radius));

    Imgproc.morphologyEx(currentImage, dest, MORPH_CLOSE, kernel, new Point(-1, -1), 1);

    dest.copyTo(currentImage);
    dest.release();//from w  ww. j av  a  2s.c o  m
    return currentImage;
}