Example usage for org.opencv.core Scalar Scalar

List of usage examples for org.opencv.core Scalar Scalar

Introduction

In this page you can find the example usage for org.opencv.core Scalar Scalar.

Prototype

public Scalar(double v0, double v1, double v2) 

Source Link

Usage

From source file:ImageReade.java

public static void detectLetter(Mat img) {
    ArrayList<Rect> boundRect = new ArrayList<>();
    Mat img_gray, img_sobel, img_threshold, element;
    img_gray = new Mat();
    img_sobel = new Mat();
    img_threshold = new Mat();
    element = new Mat();
    Imgproc.cvtColor(img, img_gray, Imgproc.COLOR_BGRA2GRAY);
    imshow("Rec img_gray", img_gray);
    Imgproc.Sobel(img_gray, img_sobel, CvType.CV_8U, 1, 0, 3, 1, 0, Imgproc.BORDER_DEFAULT);
    imshow("Rec img_sobel", img_sobel);
    Imgproc.threshold(img_sobel, img_threshold, 0, 255, CV_THRESH_OTSU + CV_THRESH_BINARY);
    imshow("Rec img_threshold", img_threshold);

    element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(16, 6));

    Imgproc.morphologyEx(img_threshold, img_threshold, CV_MOP_CLOSE, element);
    imshow("Rec img_threshold second", img_threshold);

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();

    //Imgproc.findContours(img_threshold, contours, new Mat(), Imgproc.RETR_LIST,Imgproc.CHAIN_APPROX_SIMPLE);
    Imgproc.findContours(img_threshold, contours, new Mat(), 0, 1);

    for (int i = 0; i < contours.size(); i++) {
        System.out.println(Imgproc.contourArea(contours.get(i)));
        //            if (Imgproc.contourArea(contours.get(i)) > 100) {
        //                //Imgproc.approxPolyDP( contours.get(i), contours_poly[i], 3, true );
        //                Rect rect = Imgproc.boundingRect(contours.get(i));
        //                System.out.println(rect.height);
        //                if (rect.width > rect.height) {
        //                    //System.out.println(rect.x +","+rect.y+","+rect.height+","+rect.width);
        //                    Core.rectangle(img, new Point(rect.x,rect.y), new Point(rect.x+rect.width,rect.y+rect.height),new Scalar(0,0,255));
        //                }
        //                    
        //                    
        //            }
        if (Imgproc.contourArea(contours.get(i)) > 100) {
            MatOfPoint2f mMOP2f1 = new MatOfPoint2f();
            MatOfPoint2f mMOP2f2 = new MatOfPoint2f();
            contours.get(i).convertTo(mMOP2f1, CvType.CV_32FC2);
            Imgproc.approxPolyDP(mMOP2f1, mMOP2f2, 3, true);
            mMOP2f2.convertTo(contours.get(i), CvType.CV_32S);
            Rect rect = Imgproc.boundingRect(contours.get(i));
            if (rect.width > rect.height) {
                Core.rectangle(img, new Point(rect.x, rect.y),
                        new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 0, 255));
            }//from   ww  w.j  av a2  s  . c  o  m
        }
    }
    imshow("Rec Detected", img);
}

From source file:Fiji_OpenCV.java

License:Creative Commons License

public void process(int[] pixels) {
    int channels = 3;
    byte[] buf = new byte[pixels.length * channels];
    for (int i = 0; i < pixels.length; i++) {
        buf[i * channels] = (byte) (0x000000ff & (pixels[i]));
        buf[i * channels + 1] = (byte) (0x000000ff & (pixels[i] >>> 8));
        buf[i * channels + 2] = (byte) (0x000000ff & (pixels[i] >>> 16));
    }// w  ww . ja va2s .c  o m

    Mat image = new Mat(width, height, CvType.CV_8UC3);
    image.put(0, 0, buf);

    // Create a face detector from the cascade file in the resources
    // directory.
    CascadeClassifier faceDetector = new CascadeClassifier(
            getClass().getResource("/opencv/data/haarcascades/haarcascade_frontalface_alt2.xml").getPath());

    // Detect faces in the image.
    // MatOfRect is a special container class for Rect.
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);

    System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));

    // Draw a bounding box around each face.
    for (Rect rect : faceDetections.toArray()) {
        Core.rectangle(image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height),
                new Scalar(0, 255, 0));
    }

    image.get(0, 0, buf);
    for (int i = 0; i < pixels.length; i++) {
        pixels[i] = 0x80000000 + ((int) (buf[i * channels + 2]) << 16) + ((int) (buf[i * channels + 1]) << 8)
                + ((int) (buf[i * channels + 0]));
    }
    this.ip = new ColorProcessor(width, height, pixels);
}

From source file:ThirdTry.java

public static void detectLetter(Mat img, Mat m2) {
    ArrayList<Rect> boundRect = new ArrayList<>();
    Mat img_gray, img_sobel, img_threshold, element;
    img_gray = new Mat();
    img_sobel = new Mat();
    img_threshold = new Mat();
    element = new Mat();
    Imgproc.cvtColor(img, img_gray, Imgproc.COLOR_BGRA2GRAY);
    //imshow("Rec img_gray", img_gray);
    Imgproc.Sobel(img_gray, img_sobel, CvType.CV_8UC1, 1, 0, 3, 1, 0, Imgproc.BORDER_DEFAULT);
    //imshow("Rec img_sobel", img_sobel);
    Imgproc.threshold(m2, img_threshold, 0, 255, CV_THRESH_OTSU + CV_THRESH_BINARY);
    //imshow("Rec img_threshold", img_threshold);

    element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 2));

    Imgproc.morphologyEx(m2, img_threshold, CV_MOP_CLOSE, element);
    imshow("Rec img_threshold second", img_threshold);

    element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(12, 12));
    Imgproc.morphologyEx(img_threshold, img_threshold, CV_MOP_CLOSE, element);
    //imshow("Rec img_threshold second", img_threshold);

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();

    //Imgproc.findContours(img_threshold, contours, new Mat(), Imgproc.RETR_LIST,Imgproc.CHAIN_APPROX_SIMPLE);
    Imgproc.findContours(img_threshold, contours, new Mat(), 0, 1);

    for (int i = 0; i < contours.size(); i++) {
        System.out.println(Imgproc.contourArea(contours.get(i)));
        //            if (Imgproc.contourArea(contours.get(i)) > 100) {
        //                //Imgproc.approxPolyDP( contours.get(i), contours_poly[i], 3, true );
        //                Rect rect = Imgproc.boundingRect(contours.get(i));
        //                System.out.println(rect.height);
        //                if (rect.width > rect.height) {
        //                    //System.out.println(rect.x +","+rect.y+","+rect.height+","+rect.width);
        //                    Core.rectangle(img, new Point(rect.x,rect.y), new Point(rect.x+rect.width,rect.y+rect.height),new Scalar(0,0,255));
        //                }
        //                    
        //                    
        //            }
        if (Imgproc.contourArea(contours.get(i)) > 100) {
            MatOfPoint2f mMOP2f1 = new MatOfPoint2f();
            MatOfPoint2f mMOP2f2 = new MatOfPoint2f();
            contours.get(i).convertTo(mMOP2f1, CvType.CV_32FC2);
            Imgproc.approxPolyDP(mMOP2f1, mMOP2f2, 3, true);
            mMOP2f2.convertTo(contours.get(i), CvType.CV_32S);
            Rect rect = Imgproc.boundingRect(contours.get(i));
            if (rect.width > rect.height) {
                Core.rectangle(img, new Point(rect.x, rect.y),
                        new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 0, 255));
            }// ww  w . jav a  2s  .c  o  m
        }
    }
    //imshow("Rec Detected", img);
}

From source file:LicenseDetection.java

public void run() {

    // ------------------ set up tesseract for later use ------------------
    ITesseract tessInstance = new Tesseract();
    tessInstance.setDatapath("/Users/BradWilliams/Downloads/Tess4J");
    tessInstance.setLanguage("eng");

    // ------------------  Save image first ------------------
    Mat img;//from   w w w .ja v  a 2s  . c  om
    img = Imgcodecs.imread(getClass().getResource("/resources/car_2_shopped2.jpg").getPath());
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/True_Image.png", img);

    // ------------------ Convert to grayscale ------------------
    Mat imgGray = new Mat();
    Imgproc.cvtColor(img, imgGray, Imgproc.COLOR_BGR2GRAY);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/Gray.png", imgGray);

    // ------------------ Blur so edge detection wont pick up noise ------------------
    Mat imgGaussianBlur = new Mat();
    Imgproc.GaussianBlur(imgGray, imgGaussianBlur, new Size(3, 3), 0);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/gaussian_blur.png", imgGaussianBlur);

    // ****************** Create image that will be cropped at end of program before OCR ***************************

    // ------------------ Binary theshold for OCR (used later)------------------
    Mat imgThresholdOCR = new Mat();
    Imgproc.adaptiveThreshold(imgGaussianBlur, imgThresholdOCR, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C,
            Imgproc.THRESH_BINARY, 7, 10);
    //Imgproc.threshold(imgSobel,imgThreshold,120,255,Imgproc.THRESH_TOZERO);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgThresholdOCR.png", imgThresholdOCR);

    // ------------------ Erosion operation------------------
    Mat kern = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_CROSS, new Size(3, 3));
    Mat imgErodeOCR = new Mat();
    Imgproc.morphologyEx(imgThresholdOCR, imgErodeOCR, Imgproc.MORPH_DILATE, kern); //Imgproc.MORPH_DILATE is performing erosion, wtf?
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgErodeOCR.png", imgErodeOCR);

    //------------------ Dilation operation  ------------------
    Mat kernall = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(3, 3));
    Mat imgDilateOCR = new Mat();
    Imgproc.morphologyEx(imgErodeOCR, imgDilateOCR, Imgproc.MORPH_ERODE, kernall);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgDilateOCR.png", imgDilateOCR);

    // *************************************************************************************************************

    //        // ------------------ Close operation (dilation followed by erosion) to reduce noise ------------------
    //        Mat k = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(3, 3));
    //        Mat imgCloseOCR = new Mat();
    //        Imgproc.morphologyEx(imgThresholdOCR,imgCloseOCR,1,k);
    //        Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgCloseOCR.png", imgCloseOCR);

    // ------------------ Sobel vertical edge detection ------------------
    Mat imgSobel = new Mat();
    Imgproc.Sobel(imgGaussianBlur, imgSobel, -1, 1, 0);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgSobel.png", imgSobel);

    // ------------------ Binary theshold ------------------
    Mat imgThreshold = new Mat();
    Imgproc.adaptiveThreshold(imgSobel, imgThreshold, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C,
            Imgproc.THRESH_BINARY, 99, -60);
    //Imgproc.threshold(imgSobel,imgThreshold,120,255,Imgproc.THRESH_TOZERO);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgThreshold.png", imgThreshold);

    //        // ------------------ Open operation (erosion followed by dilation) ------------------
    //        Mat ker = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_CROSS, new Size(3, 2));
    //        Mat imgOpen = new Mat();
    //        Imgproc.morphologyEx(imgThreshold,imgOpen,0,ker);
    //        Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgOpen.png", imgOpen);

    // ------------------ Close operation (dilation followed by erosion) to reduce noise ------------------
    Mat kernel = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(22, 8));
    Mat imgClose = new Mat();
    Imgproc.morphologyEx(imgThreshold, imgClose, 1, kernel);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgClose.png", imgClose);

    // ------------------ Find contours ------------------
    List<MatOfPoint> contours = new ArrayList<>();

    Imgproc.findContours(imgClose, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);

    // **************************** DEBUG CODE **************************

    Mat contourImg = new Mat(imgClose.size(), imgClose.type());
    for (int i = 0; i < contours.size(); i++) {
        Imgproc.drawContours(contourImg, contours, i, new Scalar(255, 255, 255), -1);
    }

    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/contours.png", contourImg);

    // ******************************************************************

    // --------------  Convert contours --------------------

    //Convert to MatOfPoint2f so that minAreaRect can be called
    List<MatOfPoint2f> newContours = new ArrayList<>();

    for (MatOfPoint mat : contours) {

        MatOfPoint2f newPoint = new MatOfPoint2f(mat.toArray());
        newContours.add(newPoint);

    }

    //Get minAreaRects
    List<RotatedRect> minAreaRects = new ArrayList<>();

    for (MatOfPoint2f mat : newContours) {

        RotatedRect rect = Imgproc.minAreaRect(mat);

        /*
         --------------- BUG WORK AROUND ------------
                
        Possible bug:
        When converting from MatOfPoint2f to RotatectRect the width height were reversed and the
        angle was -90 degrees from what it would be if the width and height were correct.
                
        When painting rectangle in image, the correct boxes were produced, but performing calculations on rect.angle
        rect.width, or rect.height yielded unwanted results.
                
        The following work around is buggy but works for my purpose
         */

        if (rect.size.width < rect.size.height) {
            double temp;

            temp = rect.size.width;
            rect.size.width = rect.size.height;
            rect.size.height = temp;
            rect.angle = rect.angle + 90;

        }

        //check aspect ratio and area and angle
        if (rect.size.width / rect.size.height > 1 && rect.size.width / rect.size.height < 5
                && rect.size.width * rect.size.height > 10000 && rect.size.width * rect.size.height < 50000
                && Math.abs(rect.angle) < 20) {
            minAreaRects.add(rect);
        }

        //minAreaRects.add(rect);
    }

    // **************************** DEBUG CODE **************************
    /*
    The following code is used to draw the rectangles on top of the original image for debugging purposes
     */
    //Draw Rotated Rects
    Point[] vertices = new Point[4];

    Mat imageWithBoxes = img;

    // Draw color rectangles on top of binary contours
    //        Mat imageWithBoxes = new Mat();
    //        Mat temp = imgDilateOCR;
    //        Imgproc.cvtColor(temp, imageWithBoxes, Imgproc.COLOR_GRAY2RGB);

    for (RotatedRect rect : minAreaRects) {

        rect.points(vertices);

        for (int i = 0; i < 4; i++) {
            Imgproc.line(imageWithBoxes, vertices[i], vertices[(i + 1) % 4], new Scalar(0, 0, 255), 2);
        }

    }

    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgWithBoxes.png", imageWithBoxes);

    // ******************************************************************

    // **************************** DEBUG CODE **************************
    //        for(RotatedRect rect : minAreaRects) {
    //            System.out.println(rect.toString());
    //        }
    // ******************************************************************

    /*
    In order to rotate image without cropping it:
            
    1. Create new square image with dimension = diagonal of initial image.
    2. Draw initial image into the center of new image.
     Insert initial image at ROI (Region of Interest) in new image
    3. Rotate new image
     */

    //Find diagonal/hypotenuse
    int hypotenuse = (int) Math.sqrt((img.rows() * img.rows()) + (img.cols() * img.cols()));

    //New Mat with hypotenuse as height and width
    Mat rotateSpace = new Mat(hypotenuse, hypotenuse, 0);

    int ROI_x = (rotateSpace.width() - imgClose.width()) / 2; //x start of ROI
    int ROI_y = (rotateSpace.height() - imgClose.height()) / 2; //x start of ROI

    //designate region of interest
    Rect r = new Rect(ROI_x, ROI_y, imgClose.width(), imgClose.height());

    //Insert image into region of interest
    imgDilateOCR.copyTo(rotateSpace.submat(r));

    Mat rotatedTemp = new Mat(); //Mat to hold temporarily rotated mat
    Mat rectMat = new Mat();//Mat to hold rect contents (needed for looping through pixels)
    Point[] rectVertices = new Point[4];//Used to build rect to make ROI
    Rect rec = new Rect();

    List<RotatedRect> edgeDensityRects = new ArrayList<>(); //populate new arraylist with rects that satisfy edge density

    int count = 0;

    //Loop through Rotated Rects and find edge density
    for (RotatedRect rect : minAreaRects) {

        count++;

        rect.center = new Point((float) ROI_x + rect.center.x, (float) ROI_y + rect.center.y);

        //rotate image to math orientation of rotated rect
        rotate(rotateSpace, rotatedTemp, rect.center, rect.angle);

        //remove rect rotation
        rect.angle = 0;

        //get vertices from rotatedRect
        rect.points(rectVertices);

        // **************************** DEBUG CODE **************************
        //
        //            for (int k = 0; k < 4; k++) {
        //                System.out.println(rectVertices[k]);
        //                Imgproc.line(rotatedTemp, rectVertices[k], rectVertices[(k + 1) % 4], new Scalar(0, 0, 255), 2);
        //            }
        //
        //            Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rotated" + count + ".png", rotatedTemp);

        // *****************************************************************

        //build rect to use as ROI
        rec = new Rect(rectVertices[1], rectVertices[3]);

        rectMat = rotatedTemp.submat(rec);

        Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/extracted" + count + ".png", rectMat);

        //find edge density

        //            // ------------------------ edge density check NOT IMPLEMENTED --------------------
        //            /*
        //            Checking for edge density was not necessary for this image so it was not implemented due to lack of time
        //             */
        //            for(int i = 0; i < rectMat.rows(); ++i){
        //                for(int j = 0; j < rectMat.cols(); ++j){
        //
        //                  //add up white pixels
        //                }
        //            }
        //
        //            //check number of white pixels against total pixels
        //            //only add rects to new arraylist that satisfy threshold

        edgeDensityRects.add(rect);
    }

    // **************************** DEBUG CODE **************************

    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rotatedSpace.png", rotateSpace);
    //Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rotatedSpaceROTATED.png", rotatedTemp);

    //System.out.println(imgGray.type());

    // *****************************************************************

    // if there is only one rectangle left, its the license plate
    if (edgeDensityRects.size() == 1) {

        String result = ""; //Hold result from OCR
        BufferedImage bimg;
        Mat cropped;

        cropped = rectMat.submat(new Rect(20, 50, rectMat.width() - 40, rectMat.height() - 70));

        Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rectMatCropped.png", cropped);

        bimg = matToBufferedImage(cropped);

        BufferedImage image = bimg;

        try {
            result = tessInstance.doOCR(image);
        } catch (TesseractException e) {
            System.err.println(e.getMessage());
        }

        for (int i = 0; i < 10; ++i) {

        }

        result = result.replace("\n", "");

        System.out.println(result);

        CarProfDBImpl db = new CarProfDBImpl();

        db.connect("localhost:3306/computer_vision", "root", "*******");

        CarProf c = db.getCarProf(result);

        System.out.print(c.toString());

        db.close();

    }

}

From source file:KoImgProc.java

License:Open Source License

/**
 * Draws a straight line from the top (or rightmost) coordinate to the bottom (or leftmost)
 * coordinate along the line that corresponds to the average origin of the the trendline.
 * Also draws a filled circle at the top and bottom of the trendline to indicate its length.
 * For example, for a trendline oriented in the y direction, the x-coordinate of the line
 * corresponds to the average x-coordinate of the constituents of the trendline.
 * @param trendline// w  w w .  ja  v a 2 s .co  m
 *             Trendline to draw.
 * @param image
 *             Mat image to draw the trendline onto.
 * @param circleConstituents
 *             If true, also draw a circle around each of the constituents of the trendline.
 */
public static void drawTrendline(KoTrendline trendline, Mat image, boolean circleConstituents) {
    Point top = trendline.getTopPoint();
    Point bottom = trendline.getBottomPoint();
    Scalar blue = new Scalar(255, 0, 0);
    // Draw a line between the highest (right-most) stone and the lowest (left-most) stone
    Core.line(image, bottom, top, blue, TRENDLINE_THICKNESS);
    // Draw a filled circle at the top and bottom of the trendline.
    Core.circle(image, bottom, TRENDLINE_DELINEATOR_RADIUS, blue, CIRCLE_FILL_THICKNESS, CIRCLE_LINE_TYPE, 0);
    Core.circle(image, top, TRENDLINE_DELINEATOR_RADIUS, blue, CIRCLE_FILL_THICKNESS, CIRCLE_LINE_TYPE, 0);
    if (circleConstituents) {
        for (KoCircle stone : trendline.getConstituents()) {
            outlineCircle(image, stone, blue, 20, false);
        }
    }
}

From source file:abc.RomanCharacterPicture.java

public int evaluatePicture() {
    try {// w w  w  .j  a  v a  2s .c o  m
        ITesseract instance = new Tesseract();

        MatToBufImg webcamImageBuff = new MatToBufImg();

        webcamImageBuff.setMatrix(webcam_image, ".jpg");
        double heightRatio = (double) webcamImageBuff.getBufferedImage().getHeight()
                / (double) webcam_image.height();
        double widthRatio = (double) webcamImageBuff.getBufferedImage().getWidth()
                / (double) webcam_image.width();
        int x1 = this.leftRectangle.getxPos();
        int y1 = this.leftRectangle.getyPos();
        int x2 = this.rightRectangle.getxPos();
        int y2 = this.rightRectangle.getyPos();
        Rect rect = new Rect(leftRectangle.getxPos(), leftRectangle.getyPos(),
                (rightRectangle.getxPos() - leftRectangle.getxPos()),
                (rightRectangle.getyPos() - leftRectangle.getyPos()));
        //Rect rect = new Rect(new Point(leftRectangle.getxPos(), leftRectangle.getyPos()), new Point(leftRectangle.getxPos(), rightRectangle.getyPos()), , (rightRectangle.getxPos()-leftRectangle.getxPos()));
        Mat subImageMat = webcam_image.submat(rect);

        BufferedImage romanCharacter = webcamImageBuff.getBufferedImage().getSubimage((int) (x1 * widthRatio),
                (int) (y1 * heightRatio), (int) (widthRatio * (x2 - x1)), (int) (heightRatio * (y2 - y1)));

        //int[] pixels = ((DataBufferInt) romanCharacter.getRaster().getDataBuffer()).getData();
        //Mat subImageMat = new Mat(romanCharacter.getHeight(), romanCharacter.getWidth(), CvType.CV_8UC3);
        //subImageMat.put(0, 0, pixels);

        Mat hsv_image = new Mat();
        Imgproc.cvtColor(subImageMat, hsv_image, Imgproc.COLOR_BGR2HSV);

        Mat lower_black_hue_range = new Mat();
        Mat upper_black_hue_range = new Mat();

        Core.inRange(hsv_image, new Scalar(0, 0, 0), new Scalar(180, 255, 30), lower_black_hue_range);
        Core.inRange(hsv_image, new Scalar(0, 0, 20), new Scalar(180, 255, 40), upper_black_hue_range);

        Mat black_hue_image = new Mat();
        Core.addWeighted(lower_black_hue_range, 1.0, upper_black_hue_range, 1.0, 0.0, black_hue_image);

        Imgproc.GaussianBlur(black_hue_image, black_hue_image, new Size(9, 9), 2, 2);

        MatToBufImg blackImageBuff = new MatToBufImg();

        blackImageBuff.setMatrix(black_hue_image, ".jpg");
        BufferedImage test = blackImageBuff.getBufferedImage();

        //ImageIO.write(test, "PNG", new FileOutputStream((Math.round(Math.random()*1000))+"dst.png"));
        String result = instance.doOCR(test);
        int counterI = 0;
        for (int i = 0; i < result.length(); i++) {
            if (result.charAt(i) == 'I' || result.charAt(i) == 'l' || result.charAt(i) == '1'
                    || result.charAt(i) == 'i' || result.charAt(i) == 'L' || result.charAt(i) == 'j'
                    || result.charAt(i) == 'J') {
                counterI++;
            }
        }

        int counterV = 0;
        for (int i = 0; i < result.length(); i++) {
            if (result.charAt(i) == 'V' || result.charAt(i) == 'v' || result.charAt(i) == 'W'
                    || result.charAt(i) == 'w' || result.contains("\\//")) {
                counterV++;
            }
        }
        //System.out.println("Result: "+result+ " calc:" + (counterI + (counterV * 5)));
        return (counterI + (counterV * 5));
    } catch (Exception ex) {
        //System.out.println(ex.getMessage());
        ex.printStackTrace();
        return 0;
    }

}

From source file:ac.robinson.ticqr.TickBoxImageParserTask.java

License:Apache License

@Override
protected ArrayList<PointF> doInBackground(Void... unused) {
    Log.d(TAG, "Searching for tick boxes of " + mBoxSize + " size");

    // we look for *un-ticked* boxes, rather than ticked, as they are uniform in appearance (and hence easier to
    // detect) - they show up as a box within a box
    ArrayList<PointF> centrePoints = new ArrayList<>();
    int minimumOuterBoxArea = (int) Math.round(Math.pow(mBoxSize, 2));
    int maximumOuterBoxArea = (int) Math.round(Math.pow(mBoxSize * 1.35f, 2));
    int minimumInnerBoxArea = (int) Math.round(Math.pow(mBoxSize * 0.5f, 2));

    // image adjustment - blurSize, blurSTDev and adaptiveThresholdSize must not be even numbers
    int blurSize = 9;
    int blurSTDev = 3;
    int adaptiveThresholdSize = Math.round(mBoxSize * 3); // (oddness ensured below)
    int adaptiveThresholdC = 4; // value to add to the mean (can be negative or zero)
    adaptiveThresholdSize = adaptiveThresholdSize % 2 == 0 ? adaptiveThresholdSize + 1 : adaptiveThresholdSize;

    // how similar the recognised polygon must be to its actual contour - lower is more similar
    float outerPolygonSimilarity = 0.045f;
    float innerPolygonSimilarity = 0.075f; // don't require as much accuracy for the inner part of the tick box

    // how large the maximum internal angle can be (e.g., for checking square shape)
    float maxOuterAngleCos = 0.3f;
    float maxInnerAngleCos = 0.4f;

    // use OpenCV to recognise boxes that have a box inside them - i.e. an un-ticked tick box
    // see: http://stackoverflow.com/a/11427501
    // Bitmap newBitmap = mBitmap.copy(Bitmap.Config.RGB_565, true); // not needed
    Mat bitMat = new Mat();
    Utils.bitmapToMat(mBitmap, bitMat);/*from w  w w  . j a  v a2  s.  c o m*/

    // blur and convert to grey
    // alternative (less flexible): Imgproc.medianBlur(bitMat, bitMat, blurSize);
    Imgproc.GaussianBlur(bitMat, bitMat, new Size(blurSize, blurSize), blurSTDev, blurSTDev);
    Imgproc.cvtColor(bitMat, bitMat, Imgproc.COLOR_RGB2GRAY); // need 8uC1 (1 channel, unsigned char) image type

    // perform adaptive thresholding to detect edges
    // alternative (slower): Imgproc.Canny(bitMat, bitMat, 10, 20, 3, false);
    Imgproc.adaptiveThreshold(bitMat, bitMat, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY,
            adaptiveThresholdSize, adaptiveThresholdC);

    // get the contours in the image, and their hierarchy
    Mat hierarchyMat = new Mat();
    List<MatOfPoint> contours = new ArrayList<>();
    Imgproc.findContours(bitMat, contours, hierarchyMat, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE);
    if (DEBUG) {
        Imgproc.drawContours(bitMat, contours, -1, new Scalar(30, 255, 255), 1);
    }

    // parse the contours and look for a box containing another box, with similar enough sizes
    int numContours = contours.size();
    ArrayList<Integer> searchedContours = new ArrayList<>();
    Log.d(TAG, "Found " + numContours + " possible tick box areas");
    if (numContours > 0 && !hierarchyMat.empty()) {
        for (int i = 0; i < numContours; i++) {

            // the original detected contour
            MatOfPoint boxPoints = contours.get(i);

            // hierarchy key: 0 = next sibling num, 1 = previous sibling num, 2 = first child num, 3 = parent num
            int childBox = (int) hierarchyMat.get(0, i)[2]; // usually the largest child (as we're doing RETR_TREE)
            if (childBox == -1) { // we only want elements that have children
                continue;
            } else {
                if (searchedContours.contains(childBox)) {
                    if (DEBUG) {
                        Log.d(TAG, "Ignoring duplicate box at first stage: " + childBox);
                    }
                    continue;
                } else {
                    searchedContours.add(childBox);
                }
            }

            // discard smaller (i.e. noise) outer box areas as soon as possible for speed
            // used to do Imgproc.isContourConvex(outerPoints) later, but the angle check covers this, so no need
            double originalArea = Math.abs(Imgproc.contourArea(boxPoints));
            if (originalArea < minimumOuterBoxArea) {
                // if (DEBUG) {
                // drawPoints(bitMat, boxPoints, new Scalar(255, 255, 255), 1);
                // Log.d(TAG, "Outer box too small");
                // }
                continue;
            }
            if (originalArea > maximumOuterBoxArea) {
                // if (DEBUG) {
                // drawPoints(bitMat, boxPoints, new Scalar(255, 255, 255), 1);
                // Log.d(TAG, "Outer box too big");
                // }
                continue;
            }

            // simplify the contours of the outer box - we want to detect four-sided shapes only
            MatOfPoint2f boxPoints2f = new MatOfPoint2f(boxPoints.toArray()); // Point2f for approxPolyDP
            Imgproc.approxPolyDP(boxPoints2f, boxPoints2f,
                    outerPolygonSimilarity * Imgproc.arcLength(boxPoints2f, true), true); // simplify the contour
            if (boxPoints2f.height() != 4) { // height is number of points
                if (DEBUG) {
                    // drawPoints(bitMat, new MatOfPoint(boxPoints2f.toArray()), new Scalar(255, 255, 255), 1);
                    Log.d(TAG, "Outer box not 4 points");
                }
                continue;
            }

            // check that the simplified outer box is approximately a square, angle-wise
            org.opencv.core.Point[] boxPointsArray = boxPoints2f.toArray();
            double maxCosine = 0;
            for (int j = 0; j < 4; j++) {
                org.opencv.core.Point pL = boxPointsArray[j];
                org.opencv.core.Point pIntersect = boxPointsArray[(j + 1) % 4];
                org.opencv.core.Point pR = boxPointsArray[(j + 2) % 4];
                getLineAngle(pL, pIntersect, pR);
                maxCosine = Math.max(maxCosine, getLineAngle(pL, pIntersect, pR));
            }
            if (maxCosine > maxOuterAngleCos) {
                if (DEBUG) {
                    // drawPoints(bitMat, new MatOfPoint(boxPoints2f.toArray()), new Scalar(255, 255, 255), 1);
                    Log.d(TAG, "Outer angles not square enough");
                }
                continue;
            }

            // check that the simplified outer box is approximately a square, line length-wise
            double minLine = Double.MAX_VALUE;
            double maxLine = 0;
            for (int p = 1; p < 4; p++) {
                org.opencv.core.Point p1 = boxPointsArray[p - 1];
                org.opencv.core.Point p2 = boxPointsArray[p];
                double xd = p1.x - p2.x;
                double yd = p1.y - p2.y;
                double lineLength = Math.sqrt((xd * xd) + (yd * yd));
                minLine = Math.min(minLine, lineLength);
                maxLine = Math.max(maxLine, lineLength);
            }
            if (maxLine - minLine > minLine) {
                if (DEBUG) {
                    // drawPoints(bitMat, new MatOfPoint(boxPoints2f.toArray()), new Scalar(255, 255, 255), 1);
                    Log.d(TAG, "Outer lines not square enough");
                }
                continue;
            }

            // draw the outer box if debugging
            if (DEBUG) {
                MatOfPoint debugBoxPoints = new MatOfPoint(boxPointsArray);
                Log.d(TAG,
                        "Potential tick box: " + boxPoints2f.size() + ", " + "area: "
                                + Math.abs(Imgproc.contourArea(debugBoxPoints)) + " (min:" + minimumOuterBoxArea
                                + ", max:" + maximumOuterBoxArea + ")");
                drawPoints(bitMat, debugBoxPoints, new Scalar(50, 255, 255), 2);
            }

            // loop through the children - they should be in descending size order, but sometimes this is wrong
            boolean wrongBox = false;
            while (true) {
                if (DEBUG) {
                    Log.d(TAG, "Looping with box: " + childBox);
                }

                // we've previously tried a child - try the next one
                // key: 0 = next sibling num, 1 = previous sibling num, 2 = first child num, 3 = parent num
                if (wrongBox) {
                    childBox = (int) hierarchyMat.get(0, childBox)[0];
                    if (childBox == -1) {
                        break;
                    }
                    if (searchedContours.contains(childBox)) {
                        if (DEBUG) {
                            Log.d(TAG, "Ignoring duplicate box at loop stage: " + childBox);
                        }
                        break;
                    } else {
                        searchedContours.add(childBox);
                    }
                    //noinspection UnusedAssignment
                    wrongBox = false;
                }

                // perhaps this is the outer box - check its child has no children itself
                // (removed so tiny children (i.e. noise) don't mean we mis-detect an un-ticked box as ticked)
                // if (hierarchyMat.get(0, childBox)[2] != -1) {
                // continue;
                // }

                // check the size of the child box is large enough
                boxPoints = contours.get(childBox);
                originalArea = Math.abs(Imgproc.contourArea(boxPoints));
                if (originalArea < minimumInnerBoxArea) {
                    if (DEBUG) {
                        // drawPoints(bitMat, boxPoints, new Scalar(255, 255, 255), 1);
                        Log.d(TAG, "Inner box too small");
                    }
                    wrongBox = true;
                    continue;
                }

                // simplify the contours of the inner box - again, we want four-sided shapes only
                boxPoints2f = new MatOfPoint2f(boxPoints.toArray());
                Imgproc.approxPolyDP(boxPoints2f, boxPoints2f,
                        innerPolygonSimilarity * Imgproc.arcLength(boxPoints2f, true), true);
                if (boxPoints2f.height() != 4) { // height is number of points
                    // if (DEBUG) {
                    // drawPoints(bitMat, boxPoints, new Scalar(255, 255, 255), 1);
                    // }
                    Log.d(TAG, "Inner box fewer than 4 points"); // TODO: allow > 4 for low quality images?
                    wrongBox = true;
                    continue;
                }

                // check that the simplified inner box is approximately a square, angle-wise
                // higher tolerance because noise means if we get several inners, the box may not be quite square
                boxPointsArray = boxPoints2f.toArray();
                maxCosine = 0;
                for (int j = 0; j < 4; j++) {
                    org.opencv.core.Point pL = boxPointsArray[j];
                    org.opencv.core.Point pIntersect = boxPointsArray[(j + 1) % 4];
                    org.opencv.core.Point pR = boxPointsArray[(j + 2) % 4];
                    getLineAngle(pL, pIntersect, pR);
                    maxCosine = Math.max(maxCosine, getLineAngle(pL, pIntersect, pR));
                }
                if (maxCosine > maxInnerAngleCos) {
                    Log.d(TAG, "Inner angles not square enough");
                    wrongBox = true;
                    continue;
                }

                // this is probably an inner box - log if debugging
                if (DEBUG) {
                    Log.d(TAG,
                            "Un-ticked inner box: " + boxPoints2f.size() + ", " + "area: "
                                    + Math.abs(Imgproc.contourArea(new MatOfPoint2f(boxPointsArray)))
                                    + " (min: " + minimumInnerBoxArea + ")");
                }

                // find the inner box centre
                double centreX = (boxPointsArray[0].x + boxPointsArray[1].x + boxPointsArray[2].x
                        + boxPointsArray[3].x) / 4f;
                double centreY = (boxPointsArray[0].y + boxPointsArray[1].y + boxPointsArray[2].y
                        + boxPointsArray[3].y) / 4f;

                // draw the inner box if debugging
                if (DEBUG) {
                    drawPoints(bitMat, new MatOfPoint(boxPointsArray), new Scalar(255, 255, 255), 1);
                    Core.circle(bitMat, new org.opencv.core.Point(centreX, centreY), 3,
                            new Scalar(255, 255, 255));
                }

                // add to the list of boxes to check
                centrePoints.add(new PointF((float) centreX, (float) centreY));
                break;
            }
        }
    }

    Log.d(TAG, "Found " + centrePoints.size() + " un-ticked boxes");
    return centrePoints;
}

From source file:app.AppMain.java

public static Mat crossImage(Mat image, ArrayList<OcrChar[]> foundWords) {
    for (OcrChar[] points : foundWords) {
        System.out.println(points[0].getCenterX() + " " + points[0].getCenterY() + " " + points[1].getCenterX()
                + " " + points[1].getCenterY());
        Imgproc.line(image, new Point(points[0].getCenterX(), points[0].getCenterY()),
                new Point(points[1].getCenterX(), points[1].getCenterY()), new Scalar(0, 0, 0), 1);
    }//from   w  w  w. j  a va 2 s  .  co m
    return image;
}

From source file:app.AppMain.java

/**
 * badly designed main aplication, use with app.GUI.java for fast results.
 *
 * @param image//from  w  w w  . ja va 2 s . c  om
 * @param lookUpList
 * @throws IOException
 */
public static void main(String image, String lookUpList) throws IOException {

    //        String imagePath = "resource\\find_small.jpg";
    String imagePath = image;
    File imageFile = new File(imagePath);
    //        File imageFile1 = new File("C:\\Users\\Zygis\\Desktop\\tess_test_data\\Find_1lookup.jpg");

    ArrayList<String> lookupWords = new ArrayList<>();
    ArrayList<OcrChar[]> foundWords = new ArrayList<>();
    ArrayList<ArrayList<OcrChar>> matrix = new ArrayList<>();

    Mat ImageMatrix = Imgcodecs.imread(imagePath);

    Scanner inFile1 = new Scanner(new File(lookUpList));
    while (inFile1.hasNext()) {
        lookupWords.add(inFile1.nextLine().toUpperCase());
    }
    System.out.println(lookupWords);

    Tesseract1 instance = new Tesseract1(); //
    try {

        instance.setHocr(true);
        instance.setTessVariable("tessedit_char_blacklist", "|0123456789"); // blaklistas
        //          instance.setTessVariable("tessedit_create_boxfile", "true");
        //          instance.setTessVariable("save_blob_choices", "T"); // kazka padeda char atpazinimui

        String result = instance.doOCR(imageFile);
        //            System.out.println(result);
        matrix = ParseHOCR.parse(result);
        System.out.println(matrix);
        foundWords = FindWord.findWords(matrix, lookupWords);

        try (
                //            instance.setHocr(false);
                //            result = instance.doOCR(imageFile1);
                //            System.out.println(result);
                //            -----------
                //            writing info to html file
                PrintWriter writer = new PrintWriter("the-file-name.html", "UTF-8")) {
            writer.println(result);
        }
    } catch (TesseractException e) {
        System.err.println(e.getMessage());
    }

    /*----- 
    cross found letters from the image
     */
    for (OcrChar[] points : foundWords) {
        //           System.out.println(points[0].getCenterX()+" "+points[0].getCenterY()+" "+points[1].getCenterX()+" "+points[1].getCenterY());
        Imgproc.line(ImageMatrix, new Point(points[0].getCenterX(), points[0].getCenterY()),
                new Point(points[1].getCenterX(), points[1].getCenterY()), new Scalar(0, 0, 0), 1);
    }

    //        for (OcrChar[] points : foundWords) {
    ////           System.out.println(points[0].getCenterX()+" "+points[0].getCenterY()+" "+points[1].getCenterX()+" "+points[1].getCenterY());
    //            Imgproc.line(ImageMatrix, new Point(points[0].getCenterX(), points[0].getCenterY()),
    //                    new Point(points[1].getCenterX(), points[1].getCenterY()),
    //                    new Scalar(0, 0, 0), 3);
    //        }
    //        ////-------
    //        // DEBUG 
    //        //put regognised chars POSTION on the image ( to check recognised char matches)
    //        for (ArrayList<OcrChar> line_array : matrix) {
    //            for (OcrChar Char : line_array) {
    //                Imgproc.circle(ImageMatrix, new Point(Char.getCenterX(), Char.getCenterY()),
    //                        1, new Scalar(0, 0, 255), 4);
    //            }
    //        }
    //        ////-------
    //        // DEBUG 
    //        // put regognised chars back to the image ( to check recognised char matches)
    //        for (ArrayList<OcrChar> line_array : matrix){
    //            for (OcrChar Char : line_array){
    //                Imgproc.putText(ImageMatrix, Char.toString(), 
    //                        new Point(Char.x2, Char.y2),
    //                                1, 2,  new Scalar(0, 0, 255));
    //            }
    //        }
    Imgcodecs.imwrite("resource\\output.jpeg", ImageMatrix);
}

From source file:app.GUI2.java

/**
 * this function preproces image to draw helper figures on top of image and
 * then calls updateView()//from  ww  w .  j a v a  2 s . c o m
 */
private void updateDrawings(Mat unprocesedImage) {
    try {
        image = unprocesedImage.clone();
        // draw Points as circles
        if (pointA1 != null) {
            Imgproc.circle(image, pointA1, 10, new Scalar(0, 0, 128), 2);
        }
        if (pointA2 != null) {
            Imgproc.circle(image, pointA2, 10, new Scalar(0, 0, 255), 2);
        }
        if (pointB1 != null) {
            Imgproc.circle(image, pointB1, 10, new Scalar(128, 0, 0), 2);
        }
        if (pointB2 != null) {
            Imgproc.circle(image, pointB2, 10, new Scalar(255, 0, 0), 2);
        }
        // draw rectangles of selected circles
        if (pointA1 != null & pointA2 != null) {
            Imgproc.rectangle(image, pointA1, pointA2, new Scalar(0, 0, 255), 2);
        }
        if (pointB1 != null & pointB2 != null) {
            Imgproc.rectangle(image, pointB1, pointB2, new Scalar(255, 0, 0), 2);
        }

        updateView(image);
    } catch (Exception e) {
        System.err.println("Tryied to Draw without loading image" + e);
    }

}