Example usage for org.opencv.core Mat submat

List of usage examples for org.opencv.core Mat submat

Introduction

In this page you can find the example usage for org.opencv.core Mat submat.

Prototype

public Mat submat(Rect roi) 

Source Link

Usage

From source file:LicenseDetection.java

public void run() {

    // ------------------ set up tesseract for later use ------------------
    ITesseract tessInstance = new Tesseract();
    tessInstance.setDatapath("/Users/BradWilliams/Downloads/Tess4J");
    tessInstance.setLanguage("eng");

    // ------------------  Save image first ------------------
    Mat img;// w w w.j  av  a 2 s .c om
    img = Imgcodecs.imread(getClass().getResource("/resources/car_2_shopped2.jpg").getPath());
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/True_Image.png", img);

    // ------------------ Convert to grayscale ------------------
    Mat imgGray = new Mat();
    Imgproc.cvtColor(img, imgGray, Imgproc.COLOR_BGR2GRAY);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/Gray.png", imgGray);

    // ------------------ Blur so edge detection wont pick up noise ------------------
    Mat imgGaussianBlur = new Mat();
    Imgproc.GaussianBlur(imgGray, imgGaussianBlur, new Size(3, 3), 0);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/gaussian_blur.png", imgGaussianBlur);

    // ****************** Create image that will be cropped at end of program before OCR ***************************

    // ------------------ Binary theshold for OCR (used later)------------------
    Mat imgThresholdOCR = new Mat();
    Imgproc.adaptiveThreshold(imgGaussianBlur, imgThresholdOCR, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C,
            Imgproc.THRESH_BINARY, 7, 10);
    //Imgproc.threshold(imgSobel,imgThreshold,120,255,Imgproc.THRESH_TOZERO);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgThresholdOCR.png", imgThresholdOCR);

    // ------------------ Erosion operation------------------
    Mat kern = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_CROSS, new Size(3, 3));
    Mat imgErodeOCR = new Mat();
    Imgproc.morphologyEx(imgThresholdOCR, imgErodeOCR, Imgproc.MORPH_DILATE, kern); //Imgproc.MORPH_DILATE is performing erosion, wtf?
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgErodeOCR.png", imgErodeOCR);

    //------------------ Dilation operation  ------------------
    Mat kernall = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(3, 3));
    Mat imgDilateOCR = new Mat();
    Imgproc.morphologyEx(imgErodeOCR, imgDilateOCR, Imgproc.MORPH_ERODE, kernall);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgDilateOCR.png", imgDilateOCR);

    // *************************************************************************************************************

    //        // ------------------ Close operation (dilation followed by erosion) to reduce noise ------------------
    //        Mat k = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(3, 3));
    //        Mat imgCloseOCR = new Mat();
    //        Imgproc.morphologyEx(imgThresholdOCR,imgCloseOCR,1,k);
    //        Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgCloseOCR.png", imgCloseOCR);

    // ------------------ Sobel vertical edge detection ------------------
    Mat imgSobel = new Mat();
    Imgproc.Sobel(imgGaussianBlur, imgSobel, -1, 1, 0);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgSobel.png", imgSobel);

    // ------------------ Binary theshold ------------------
    Mat imgThreshold = new Mat();
    Imgproc.adaptiveThreshold(imgSobel, imgThreshold, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C,
            Imgproc.THRESH_BINARY, 99, -60);
    //Imgproc.threshold(imgSobel,imgThreshold,120,255,Imgproc.THRESH_TOZERO);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgThreshold.png", imgThreshold);

    //        // ------------------ Open operation (erosion followed by dilation) ------------------
    //        Mat ker = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_CROSS, new Size(3, 2));
    //        Mat imgOpen = new Mat();
    //        Imgproc.morphologyEx(imgThreshold,imgOpen,0,ker);
    //        Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgOpen.png", imgOpen);

    // ------------------ Close operation (dilation followed by erosion) to reduce noise ------------------
    Mat kernel = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(22, 8));
    Mat imgClose = new Mat();
    Imgproc.morphologyEx(imgThreshold, imgClose, 1, kernel);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgClose.png", imgClose);

    // ------------------ Find contours ------------------
    List<MatOfPoint> contours = new ArrayList<>();

    Imgproc.findContours(imgClose, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);

    // **************************** DEBUG CODE **************************

    Mat contourImg = new Mat(imgClose.size(), imgClose.type());
    for (int i = 0; i < contours.size(); i++) {
        Imgproc.drawContours(contourImg, contours, i, new Scalar(255, 255, 255), -1);
    }

    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/contours.png", contourImg);

    // ******************************************************************

    // --------------  Convert contours --------------------

    //Convert to MatOfPoint2f so that minAreaRect can be called
    List<MatOfPoint2f> newContours = new ArrayList<>();

    for (MatOfPoint mat : contours) {

        MatOfPoint2f newPoint = new MatOfPoint2f(mat.toArray());
        newContours.add(newPoint);

    }

    //Get minAreaRects
    List<RotatedRect> minAreaRects = new ArrayList<>();

    for (MatOfPoint2f mat : newContours) {

        RotatedRect rect = Imgproc.minAreaRect(mat);

        /*
         --------------- BUG WORK AROUND ------------
                
        Possible bug:
        When converting from MatOfPoint2f to RotatectRect the width height were reversed and the
        angle was -90 degrees from what it would be if the width and height were correct.
                
        When painting rectangle in image, the correct boxes were produced, but performing calculations on rect.angle
        rect.width, or rect.height yielded unwanted results.
                
        The following work around is buggy but works for my purpose
         */

        if (rect.size.width < rect.size.height) {
            double temp;

            temp = rect.size.width;
            rect.size.width = rect.size.height;
            rect.size.height = temp;
            rect.angle = rect.angle + 90;

        }

        //check aspect ratio and area and angle
        if (rect.size.width / rect.size.height > 1 && rect.size.width / rect.size.height < 5
                && rect.size.width * rect.size.height > 10000 && rect.size.width * rect.size.height < 50000
                && Math.abs(rect.angle) < 20) {
            minAreaRects.add(rect);
        }

        //minAreaRects.add(rect);
    }

    // **************************** DEBUG CODE **************************
    /*
    The following code is used to draw the rectangles on top of the original image for debugging purposes
     */
    //Draw Rotated Rects
    Point[] vertices = new Point[4];

    Mat imageWithBoxes = img;

    // Draw color rectangles on top of binary contours
    //        Mat imageWithBoxes = new Mat();
    //        Mat temp = imgDilateOCR;
    //        Imgproc.cvtColor(temp, imageWithBoxes, Imgproc.COLOR_GRAY2RGB);

    for (RotatedRect rect : minAreaRects) {

        rect.points(vertices);

        for (int i = 0; i < 4; i++) {
            Imgproc.line(imageWithBoxes, vertices[i], vertices[(i + 1) % 4], new Scalar(0, 0, 255), 2);
        }

    }

    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgWithBoxes.png", imageWithBoxes);

    // ******************************************************************

    // **************************** DEBUG CODE **************************
    //        for(RotatedRect rect : minAreaRects) {
    //            System.out.println(rect.toString());
    //        }
    // ******************************************************************

    /*
    In order to rotate image without cropping it:
            
    1. Create new square image with dimension = diagonal of initial image.
    2. Draw initial image into the center of new image.
     Insert initial image at ROI (Region of Interest) in new image
    3. Rotate new image
     */

    //Find diagonal/hypotenuse
    int hypotenuse = (int) Math.sqrt((img.rows() * img.rows()) + (img.cols() * img.cols()));

    //New Mat with hypotenuse as height and width
    Mat rotateSpace = new Mat(hypotenuse, hypotenuse, 0);

    int ROI_x = (rotateSpace.width() - imgClose.width()) / 2; //x start of ROI
    int ROI_y = (rotateSpace.height() - imgClose.height()) / 2; //x start of ROI

    //designate region of interest
    Rect r = new Rect(ROI_x, ROI_y, imgClose.width(), imgClose.height());

    //Insert image into region of interest
    imgDilateOCR.copyTo(rotateSpace.submat(r));

    Mat rotatedTemp = new Mat(); //Mat to hold temporarily rotated mat
    Mat rectMat = new Mat();//Mat to hold rect contents (needed for looping through pixels)
    Point[] rectVertices = new Point[4];//Used to build rect to make ROI
    Rect rec = new Rect();

    List<RotatedRect> edgeDensityRects = new ArrayList<>(); //populate new arraylist with rects that satisfy edge density

    int count = 0;

    //Loop through Rotated Rects and find edge density
    for (RotatedRect rect : minAreaRects) {

        count++;

        rect.center = new Point((float) ROI_x + rect.center.x, (float) ROI_y + rect.center.y);

        //rotate image to math orientation of rotated rect
        rotate(rotateSpace, rotatedTemp, rect.center, rect.angle);

        //remove rect rotation
        rect.angle = 0;

        //get vertices from rotatedRect
        rect.points(rectVertices);

        // **************************** DEBUG CODE **************************
        //
        //            for (int k = 0; k < 4; k++) {
        //                System.out.println(rectVertices[k]);
        //                Imgproc.line(rotatedTemp, rectVertices[k], rectVertices[(k + 1) % 4], new Scalar(0, 0, 255), 2);
        //            }
        //
        //            Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rotated" + count + ".png", rotatedTemp);

        // *****************************************************************

        //build rect to use as ROI
        rec = new Rect(rectVertices[1], rectVertices[3]);

        rectMat = rotatedTemp.submat(rec);

        Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/extracted" + count + ".png", rectMat);

        //find edge density

        //            // ------------------------ edge density check NOT IMPLEMENTED --------------------
        //            /*
        //            Checking for edge density was not necessary for this image so it was not implemented due to lack of time
        //             */
        //            for(int i = 0; i < rectMat.rows(); ++i){
        //                for(int j = 0; j < rectMat.cols(); ++j){
        //
        //                  //add up white pixels
        //                }
        //            }
        //
        //            //check number of white pixels against total pixels
        //            //only add rects to new arraylist that satisfy threshold

        edgeDensityRects.add(rect);
    }

    // **************************** DEBUG CODE **************************

    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rotatedSpace.png", rotateSpace);
    //Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rotatedSpaceROTATED.png", rotatedTemp);

    //System.out.println(imgGray.type());

    // *****************************************************************

    // if there is only one rectangle left, its the license plate
    if (edgeDensityRects.size() == 1) {

        String result = ""; //Hold result from OCR
        BufferedImage bimg;
        Mat cropped;

        cropped = rectMat.submat(new Rect(20, 50, rectMat.width() - 40, rectMat.height() - 70));

        Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rectMatCropped.png", cropped);

        bimg = matToBufferedImage(cropped);

        BufferedImage image = bimg;

        try {
            result = tessInstance.doOCR(image);
        } catch (TesseractException e) {
            System.err.println(e.getMessage());
        }

        for (int i = 0; i < 10; ++i) {

        }

        result = result.replace("\n", "");

        System.out.println(result);

        CarProfDBImpl db = new CarProfDBImpl();

        db.connect("localhost:3306/computer_vision", "root", "*******");

        CarProf c = db.getCarProf(result);

        System.out.print(c.toString());

        db.close();

    }

}

From source file:bikecalibration.fxml.controller.MainWindowController.java

private boolean createAndDrawNode(MouseEvent event) {
    try {//from   w  ww  .  ja  v a  2s. c o m
        // create the node
        Mat original_mat = OpenCvUtils.getImageFromVideo(currentVideoFrameNumberProperty.get(), cap);
        double original_width = original_mat.cols();
        double resized_width = imageViewCanvas.getWidth();

        double[] orig_coords = Utils.calculateOriginalCoordinates(original_width, resized_width,
                new double[] { event.getX(), event.getY() });
        double[] orig_width = Utils.calculateOriginalCoordinates(original_width, resized_width,
                new double[] { NODE_WIDTH });
        Rect roi = new Rect((int) orig_coords[0], (int) orig_coords[1], (int) orig_width[0],
                (int) orig_width[0]);
        Mat roi_mat = original_mat.submat(roi);
        Node n = new Node((int) orig_coords[0], (int) orig_coords[1], cpickerNode.getValue().toString(),
                currentVideoFrameNumberProperty.getName(), null);
        n.setRoi(roi_mat);
        n.setId(nodeData.size());
        nodeData.add(n);

        // add the node to the nodes array
        Node[] currentFrameNodes = nodes[currentVideoFrameNumberProperty.get()];
        if (currentFrameNodes == null) {
            currentFrameNodes = new Node[1];
            currentFrameNodes[0] = n;
            nodes[currentVideoFrameNumberProperty.get()] = currentFrameNodes;
        } else {
            Node[] tmp = new Node[currentFrameNodes.length + 1];
            int i = 0;
            for (Node currentFrameNode : currentFrameNodes) {
                tmp[i] = currentFrameNode;
                ++i;
            }
            tmp[i] = n;
            nodes[currentVideoFrameNumberProperty.get()] = tmp;
        }
        // start editing current image
        drawImage(Utils.matToImage(original_mat));
        return true;
    } catch (Exception ex) {
        return false;
    }
}

From source file:ch.zhaw.facerecognitionlibrary.Helpers.FaceDetection.java

License:Open Source License

public Eyes getEyes(Mat img) {
    double halfWidth = img.cols() / 2;
    double height = img.rows();
    double[] values = new double[4];
    values[0] = 0;/*from ww w.  java2s  .c  om*/
    values[1] = 0;
    values[2] = halfWidth;
    values[3] = height;
    Rect rightHalf = new Rect(values);
    values[0] = halfWidth;
    Rect leftHalf = new Rect(values);
    MatOfRect rightEyes = new MatOfRect();
    MatOfRect leftEyes = new MatOfRect();

    Mat rightHalfImg = img.submat(rightHalf);
    rightEyeDetector.detectMultiScale(rightHalfImg, rightEyes);
    Mat leftHalfImg = img.submat(leftHalf);
    leftEyeDetector.detectMultiScale(leftHalfImg, leftEyes);

    if (rightEyes.empty() || leftEyes.empty() || rightEyes.toArray().length > 1
            || leftEyes.toArray().length > 1) {
        return null;
    }

    Rect rightEye = rightEyes.toArray()[0];
    Rect leftEye = leftEyes.toArray()[0];

    MatOfFloat rightPoint = new MatOfFloat(rightEye.x + rightEye.width / 2, rightEye.y + rightEye.height / 2);
    MatOfFloat leftPoint = new MatOfFloat(img.cols() / 2 + leftEye.x + leftEye.width / 2,
            leftEye.y + leftEye.height / 2);

    MatOfFloat diff = new MatOfFloat();
    Core.subtract(leftPoint, rightPoint, diff);
    double angle = Core.fastAtan2(diff.toArray()[1], diff.toArray()[0]);
    double dist = Core.norm(leftPoint, rightPoint, Core.NORM_L2);
    Eyes eyes = new Eyes(dist, rightPoint, leftPoint, angle);
    return eyes;
}

From source file:ch.zhaw.facerecognitionlibrary.PreProcessor.StandardPreprocessing.Crop.java

License:Open Source License

public PreProcessor preprocessImage(PreProcessor preProcessor) {
    Mat img = preProcessor.getImages().get(0);
    List<Mat> processed = new ArrayList<Mat>();
    if (preProcessor.getFaces().length == 0) {
        return null;
    } else {//ww w  . j  a v a  2 s. co  m
        for (Rect rect : preProcessor.getFaces()) {
            Mat subImg = img.submat(rect);
            processed.add(subImg);
        }
    }
    preProcessor.setImages(processed);
    return preProcessor;
}

From source file:ch.zhaw.facerecognitionlibrary.PreProcessor.StandardPreprocessing.Crop.java

License:Open Source License

public Mat preprocessImage(Mat img, Rect rect) {
    return img.submat(rect);
}

From source file:com.joravasal.keyface.FindFacesView.java

License:Open Source License

private void addFaceToDB(Mat mRGBA, Rect r, int numFaces) {
    //Code to put the face in a bitmap and save it in the phone memory
    Mat aux = mRGBA.submat(r);
    int saveSize = new Integer(KeyFaceActivity.prefs.getString("savedFaceSize", "200"));
    //Mat aux2 = new Mat(new Size(saveSize, saveSize), aux.type());
    Imgproc.resize(aux, aux, new Size(saveSize, saveSize));
    final Bitmap bm = Bitmap.createBitmap(saveSize, saveSize, Bitmap.Config.ARGB_8888);
    Imgproc.cvtColor(aux, aux, Imgproc.COLOR_GRAY2RGBA, 4);
    Utils.matToBitmap(aux, bm);//from  w w  w .  j  a v  a 2s.  c o  m

    try {
        //Check if our folder exists (where all the photos are)
        File directory = new File(Environment.getExternalStorageDirectory(),
                KeyFaceActivity.globalappcontext.getString(R.string.app_dir));
        if (!directory.exists() && !directory.mkdirs()) {
            throw new IOException("Path to app directory could not be opened or created.");
        }
        //save image
        String lfile = Environment.getExternalStorageDirectory().getPath() + "/"
                + KeyFaceActivity.globalappcontext.getString(R.string.app_dir) + "/Face" + numFaces + ".png";
        OutputStream out = new FileOutputStream(lfile);
        bm.compress(Bitmap.CompressFormat.PNG, 100, out);
        out.flush();
        out.close();
    } catch (IOException e) {
        System.err.println(e.getMessage());
    }
}

From source file:com.raulh82vlc.face_detection_sample.opencv.domain.EyesDetectionInteractorImpl.java

License:Apache License

/**
 * Matches concrete point of the eye by using template with TM_SQDIFF_NORMED
 *//*from w  w w .j  ava 2 s  .  c o m*/
private static void matchEye(Rect area, Mat builtTemplate, Mat matrixGray, Mat matrixRGBA) {
    Point matchLoc;
    try {
        // when there is not builtTemplate we skip it
        if (builtTemplate.cols() == 0 || builtTemplate.rows() == 0) {
            return;
        }
        Mat submatGray = matrixGray.submat(area);
        int cols = submatGray.cols() - builtTemplate.cols() + 1;
        int rows = submatGray.rows() - builtTemplate.rows() + 1;
        Mat outputTemplateMat = new Mat(cols, rows, CvType.CV_8U);

        Imgproc.matchTemplate(submatGray, builtTemplate, outputTemplateMat, Imgproc.TM_SQDIFF_NORMED);
        Core.MinMaxLocResult minMaxLocResult = Core.minMaxLoc(outputTemplateMat);
        // when is difference in matching methods, the best match is max / min value
        matchLoc = minMaxLocResult.minLoc;
        Point matchLocTx = new Point(matchLoc.x + area.x, matchLoc.y + area.y);
        Point matchLocTy = new Point(matchLoc.x + builtTemplate.cols() + area.x,
                matchLoc.y + builtTemplate.rows() + area.y);

        FaceDrawerOpenCV.drawMatchedEye(matchLocTx, matchLocTy, matrixRGBA);
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:com.raulh82vlc.face_detection_sample.opencv.domain.EyesDetectionInteractorImpl.java

License:Apache License

/**
 * <p>Build a template from a specific eye area previously substracted
 * uses detectMultiScale for this area, then uses minMaxLoc method to
 * detect iris from the detected eye</p>
 *
 * @param area Preformatted Area//from  w w  w .  ja  va2 s  .co m
 * @param size minimum iris size
 * @param grayMat image in gray
 * @param rgbaMat image in color
 * @param detectorEye Haar Cascade classifier
 * @return built template
 */
@NonNull
private static Mat buildTemplate(Rect area, final int size, @NonNull Mat grayMat, @NonNull Mat rgbaMat,
        CascadeClassifier detectorEye) {
    Mat template = new Mat();
    Mat graySubMatEye = grayMat.submat(area);
    MatOfRect eyes = new MatOfRect();
    Rect eyeTemplate;
    detectorEye.detectMultiScale(graySubMatEye, eyes, 1.15, 2,
            Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE,
            new Size(EYE_MIN_SIZE, EYE_MIN_SIZE), new Size());

    Rect[] eyesArray = eyes.toArray();
    if (eyesArray.length > 0) {
        Rect e = eyesArray[0];
        e.x = area.x + e.x;
        e.y = area.y + e.y;
        Rect eyeRectangle = getEyeArea((int) e.tl().x, (int) (e.tl().y + e.height * 0.4), e.width,
                (int) (e.height * 0.6));
        graySubMatEye = grayMat.submat(eyeRectangle);
        Mat rgbaMatEye = rgbaMat.submat(eyeRectangle);

        Core.MinMaxLocResult minMaxLoc = Core.minMaxLoc(graySubMatEye);

        FaceDrawerOpenCV.drawIrisCircle(rgbaMatEye, minMaxLoc);
        Point iris = new Point();
        iris.x = minMaxLoc.minLoc.x + eyeRectangle.x;
        iris.y = minMaxLoc.minLoc.y + eyeRectangle.y;
        eyeTemplate = getEyeArea((int) iris.x - size / 2, (int) iris.y - size / 2, size, size);
        FaceDrawerOpenCV.drawEyeRectangle(eyeTemplate, rgbaMat);
        template = (grayMat.submat(eyeTemplate)).clone();
    }
    return template;
}

From source file:com.trandi.opentld.tld.Tld.java

License:Apache License

public void init(Mat frame1, Rect trackedBox) {
    // get Bounding boxes
    if (Math.min(trackedBox.width, trackedBox.height) < _params.min_win) {
        throw new IllegalArgumentException(
                "Provided trackedBox: " + trackedBox + " is too small (min " + _params.min_win + ")");
    }/*from  www  . j  a  va 2  s .co m*/
    _grid = new Grid(frame1, trackedBox, _params.min_win);
    Log.i(Util.TAG, "Init Created " + _grid.getSize() + " bounding boxes.");
    _grid.updateGoodBadBoxes(trackedBox, _params.num_closest_init);

    _iiRows = frame1.rows();
    _iiCols = frame1.cols();
    _iisum.create(_iiRows, _iiCols, CvType.CV_32F);
    _iisqsum.create(_iiRows, _iiCols, CvType.CV_64F);

    // correct bounding box
    _lastbox = _grid.getBestBox();

    _classifierFern.init(_grid.getTrackedBoxScales(), _rng);

    // generate DATA
    // generate POSITIVE DATA
    generatePositiveData(frame1, _params.num_warps_init, _grid);

    // Set variance threshold
    MatOfDouble stddev = new MatOfDouble();
    Core.meanStdDev(frame1.submat(_grid.getBestBox()), new MatOfDouble(), stddev);
    updateIntegralImgs(frame1);
    // this is directly half of the variance of the initial box, which will be used the the 1st stage of the classifier
    _var = (float) Math.pow(stddev.toArray()[0], 2d) * 0.5f;
    // check variance
    final double checkVar = Util.getVar(_grid.getBestBox(), _iisumJava, _iisqsumJava, _iiCols) * 0.5;
    Log.i(Util.TAG, "Variance: " + _var + " / Check variance: " + checkVar);

    // generate NEGATIVE DATA
    final Pair<List<Pair<int[], Boolean>>, List<Mat>> negData = generateNegativeData(frame1);

    // Split Negative Ferns <features, labels=false> into Training and Testing sets (they are already shuffled)
    final int nFernsSize = negData.first.size();
    final List<Pair<int[], Boolean>> nFernsTest = new ArrayList<Pair<int[], Boolean>>(
            negData.first.subList(0, nFernsSize / 2));
    final List<Pair<int[], Boolean>> nFerns = new ArrayList<Pair<int[], Boolean>>(
            negData.first.subList(nFernsSize / 2, nFernsSize));

    // Split Negative NN Examples into Training and Testing sets
    final int nExSize = negData.second.size();
    final List<Mat> nExamplesTest = new ArrayList<Mat>(negData.second.subList(0, nExSize / 2));
    _nExamples = new ArrayList<Mat>(negData.second.subList(nExSize / 2, nExSize));

    //MERGE Negative Data with Positive Data and shuffle it
    final List<Pair<int[], Boolean>> fernsData = new ArrayList<Pair<int[], Boolean>>(_pFerns);
    fernsData.addAll(nFerns);
    Collections.shuffle(fernsData);

    // TRAINING
    Log.i(Util.TAG, "Init Start Training with " + fernsData.size() + " ferns, " + _nExamples.size()
            + " nExamples, " + nFernsTest.size() + " nFernsTest, " + nExamplesTest.size() + " nExamplesTest");
    _classifierFern.trainF(fernsData, 10);
    _classifierNN.trainNN(_pExample, _nExamples);
    // Threshold evaluation on testing sets
    _classifierFern.evaluateThreshold(nFernsTest);
    _classifierNN.evaluateThreshold(nExamplesTest);
}

From source file:com.trandi.opentld.tld.Tld.java

License:Apache License

private TrackingStruct track(final Mat lastImg, final Mat currentImg, final BoundingBox lastBox) {
    Log.i(Util.TAG, "[TRACK]");

    // Generate points
    final Point[] lastPoints = lastBox.points();
    if (lastPoints.length == 0) {
        Log.e(Util.TAG, "Points not generated from lastBox: " + lastBox);
        return null;
    }//from   www. j  a va  2  s.  co  m

    // Frame-to-frame tracking with forward-backward error checking
    final Pair<Point[], Point[]> trackedPoints = _tracker.track(lastImg, currentImg, lastPoints);
    if (trackedPoints == null) {
        Log.e(Util.TAG, "No points could be tracked.");
        return null;
    }
    if (_tracker.getMedianErrFB() > _params.tracker_stability_FBerrMax) {
        Log.w(Util.TAG, "TRACKER too unstable. FB Median error: " + _tracker.getMedianErrFB() + " > "
                + _params.tracker_stability_FBerrMax);
        // return null;  // we hope the detection will find the pattern again
    }

    // bounding box prediction
    final BoundingBox predictedBB = lastBox.predict(trackedPoints.first, trackedPoints.second);
    if (predictedBB.x > currentImg.cols() || predictedBB.y > currentImg.rows() || predictedBB.br().x < 1
            || predictedBB.br().y < 1) {
        Log.e(Util.TAG, "TRACKER Predicted bounding box out of range !");
        return null;
    }

    // estimate Confidence
    Mat pattern = new Mat();
    try {
        resizeZeroMeanStdev(currentImg.submat(predictedBB.intersect(currentImg)), pattern, _params.patch_size);
    } catch (Throwable t) {
        Log.e(Util.TAG, "PredBB when failed: " + predictedBB);
    }
    //Log.i(Util.TAG, "Confidence " + pattern.dump());      

    //Conservative Similarity
    final NNConfStruct nnConf = _classifierNN.nnConf(pattern);
    Log.i(Util.TAG, "Tracking confidence: " + nnConf.conservativeSimilarity);

    Log.i(Util.TAG, "[TRACK END]");
    return new TrackingStruct(nnConf.conservativeSimilarity, predictedBB, trackedPoints.first,
            trackedPoints.second);
}