Example usage for org.opencv.core MatOfDouble MatOfDouble

List of usage examples for org.opencv.core MatOfDouble MatOfDouble

Introduction

In this page you can find the example usage for org.opencv.core MatOfDouble MatOfDouble.

Prototype

public MatOfDouble() 

Source Link

Usage

From source file:com.android.cts.verifier.sensors.RVCVXCheckAnalyzer.java

License:Apache License

/**
 * Analyze video frames using computer vision approach and generate a ArrayList<AttitudeRec>
 *
 * @param recs  output ArrayList of AttitudeRec
 * @return total number of frame of the video
 *///from w ww  . j  ava 2  s.  c  o  m
private int analyzeVideo(ArrayList<AttitudeRec> recs) {
    VideoMetaInfo meta = new VideoMetaInfo(new File(mPath, "videometa.json"));

    int decimation = 1;
    boolean use_timestamp = true;

    // roughly determine if decimation is necessary
    if (meta.fps > DECIMATION_FPS_TARGET) {
        decimation = (int) (meta.fps / DECIMATION_FPS_TARGET);
        meta.fps /= decimation;
    }

    VideoDecoderForOpenCV videoDecoder = new VideoDecoderForOpenCV(new File(mPath, "video.mp4"), decimation);

    Mat frame;
    Mat gray = new Mat();
    int i = -1;

    Size frameSize = videoDecoder.getSize();

    if (frameSize.width != meta.frameWidth || frameSize.height != meta.frameHeight) {
        // this is very unlikely
        return -1;
    }

    if (TRACE_VIDEO_ANALYSIS) {
        Debug.startMethodTracing("cvprocess");
    }

    Size patternSize = new Size(4, 11);

    float fc = (float) (meta.frameWidth / 2.0 / Math.tan(meta.fovWidth / 2.0));
    Mat camMat = cameraMatrix(fc, new Size(frameSize.width / 2, frameSize.height / 2));
    MatOfDouble coeff = new MatOfDouble(); // dummy

    MatOfPoint2f centers = new MatOfPoint2f();
    MatOfPoint3f grid = asymmetricalCircleGrid(patternSize);
    Mat rvec = new MatOfFloat();
    Mat tvec = new MatOfFloat();

    MatOfPoint2f reprojCenters = new MatOfPoint2f();

    if (LOCAL_LOGV) {
        Log.v(TAG, "Camera Mat = \n" + camMat.dump());
    }

    long startTime = System.nanoTime();
    long[] ts = new long[1];

    while ((frame = videoDecoder.getFrame(ts)) != null) {
        if (LOCAL_LOGV) {
            Log.v(TAG, "got a frame " + i);
        }

        if (use_timestamp && ts[0] == -1) {
            use_timestamp = false;
        }

        // has to be in front, as there are cases where execution
        // will skip the later part of this while
        i++;

        // convert to gray manually as by default findCirclesGridDefault uses COLOR_BGR2GRAY
        Imgproc.cvtColor(frame, gray, Imgproc.COLOR_RGB2GRAY);

        boolean foundPattern = Calib3d.findCirclesGrid(gray, patternSize, centers,
                Calib3d.CALIB_CB_ASYMMETRIC_GRID);

        if (!foundPattern) {
            // skip to next frame
            continue;
        }

        if (OUTPUT_DEBUG_IMAGE) {
            Calib3d.drawChessboardCorners(frame, patternSize, centers, true);
        }

        // figure out the extrinsic parameters using real ground truth 3D points and the pixel
        // position of blobs found in findCircleGrid, an estimated camera matrix and
        // no-distortion are assumed.
        boolean foundSolution = Calib3d.solvePnP(grid, centers, camMat, coeff, rvec, tvec, false,
                Calib3d.CV_ITERATIVE);

        if (!foundSolution) {
            // skip to next frame
            if (LOCAL_LOGV) {
                Log.v(TAG, "cannot find pnp solution in frame " + i + ", skipped.");
            }
            continue;
        }

        // reproject points to for evaluation of result accuracy of solvePnP
        Calib3d.projectPoints(grid, rvec, tvec, camMat, coeff, reprojCenters);

        // error is evaluated in norm2, which is real error in pixel distance / sqrt(2)
        double error = Core.norm(centers, reprojCenters, Core.NORM_L2);

        if (LOCAL_LOGV) {
            Log.v(TAG, "Found attitude, re-projection error = " + error);
        }

        // if error is reasonable, add it into the results. use ratio to frame height to avoid
        // discriminating higher definition videos
        if (error < REPROJECTION_THREASHOLD_RATIO * frameSize.height) {
            double[] rv = new double[3];
            double timestamp;

            rvec.get(0, 0, rv);
            if (use_timestamp) {
                timestamp = (double) ts[0] / 1e6;
            } else {
                timestamp = (double) i / meta.fps;
            }
            if (LOCAL_LOGV)
                Log.v(TAG, String.format("Added frame %d  ts = %f", i, timestamp));
            recs.add(new AttitudeRec(timestamp, rodr2rpy(rv)));
        }

        if (OUTPUT_DEBUG_IMAGE) {
            Calib3d.drawChessboardCorners(frame, patternSize, reprojCenters, true);
            Imgcodecs.imwrite(Environment.getExternalStorageDirectory().getPath() + "/RVCVRecData/DebugCV/img"
                    + i + ".png", frame);
        }
    }

    if (LOCAL_LOGV) {
        Log.v(TAG, "Finished decoding");
    }

    if (TRACE_VIDEO_ANALYSIS) {
        Debug.stopMethodTracing();
    }

    if (LOCAL_LOGV) {
        // time analysis
        double totalTime = (System.nanoTime() - startTime) / 1e9;
        Log.i(TAG, "Total time: " + totalTime + "s, Per frame time: " + totalTime / i);
    }
    return i;
}

From source file:com.shootoff.camera.Camera.java

License:Open Source License

public static Mat colorTransfer(Mat source, Mat target) {
    Mat src = new Mat();
    Mat dst = new Mat();

    Imgproc.cvtColor(source, src, Imgproc.COLOR_BGR2Lab);
    Imgproc.cvtColor(target, dst, Imgproc.COLOR_BGR2Lab);

    ArrayList<Mat> src_channels = new ArrayList<Mat>();
    ArrayList<Mat> dst_channels = new ArrayList<Mat>();
    Core.split(src, src_channels);/*  w ww  .ja  v a  2 s  .c  om*/
    Core.split(dst, dst_channels);

    for (int i = 0; i < 3; i++) {
        MatOfDouble src_mean = new MatOfDouble(), src_std = new MatOfDouble();
        MatOfDouble dst_mean = new MatOfDouble(), dst_std = new MatOfDouble();
        Core.meanStdDev(src_channels.get(i), src_mean, src_std);
        Core.meanStdDev(dst_channels.get(i), dst_mean, dst_std);

        dst_channels.get(i).convertTo(dst_channels.get(i), CvType.CV_64FC1);
        Core.subtract(dst_channels.get(i), dst_mean, dst_channels.get(i));
        Core.divide(dst_std, src_std, dst_std);
        Core.multiply(dst_channels.get(i), dst_std, dst_channels.get(i));
        Core.add(dst_channels.get(i), src_mean, dst_channels.get(i));
        dst_channels.get(i).convertTo(dst_channels.get(i), CvType.CV_8UC1);
    }

    Core.merge(dst_channels, dst);

    Imgproc.cvtColor(dst, dst, Imgproc.COLOR_Lab2BGR);

    return dst;
}

From source file:com.sikulix.api.Image.java

License:Open Source License

private void checkProbe() {
    resizeFactor = Math.min(((double) content.width()) / resizeMinDownSample,
            ((double) content.height()) / resizeMinDownSample);
    resizeFactor = Math.max(1.0, resizeFactor);

    MatOfDouble pMean = new MatOfDouble();
    MatOfDouble pStdDev = new MatOfDouble();
    Core.meanStdDev(content, pMean, pStdDev);
    double min = 1.0E-5;
    plainColor = false;//w w w .  jav a 2  s.c  om
    double sum = 0.0;
    double[] arr = pStdDev.toArray();
    for (int i = 0; i < arr.length; i++) {
        sum += arr[i];
    }
    if (sum < min) {
        plainColor = true;
    }
    sum = 0.0;
    arr = pMean.toArray();
    for (int i = 0; i < arr.length; i++) {
        sum += arr[i];
    }
    if (sum < min && plainColor) {
        blackColor = true;
    }
}

From source file:com.sikulix.api.Picture.java

License:Open Source License

private void setAttributes() {
    if (!hasContent()) {
        return;//from www.  jav  a  2  s  .  c om
    }
    plainColor = false;
    blackColor = false;
    resizeFactor = Math.min(((double) getContent().width()) / resizeMinDownSample,
            ((double) getContent().height()) / resizeMinDownSample);
    resizeFactor = Math.max(1.0, resizeFactor);
    MatOfDouble pMean = new MatOfDouble();
    MatOfDouble pStdDev = new MatOfDouble();
    Core.meanStdDev(getContent(), pMean, pStdDev);
    double sum = 0.0;
    double[] arr = pStdDev.toArray();
    for (int i = 0; i < arr.length; i++) {
        sum += arr[i];
    }
    if (sum < minThreshhold) {
        plainColor = true;
    }
    sum = 0.0;
    arr = pMean.toArray();
    meanColor = new int[arr.length];
    for (int i = 0; i < arr.length; i++) {
        meanColor[i] = (int) arr[i];
        sum += arr[i];
    }
    if (sum < minThreshhold && plainColor) {
        blackColor = true;
    }

    whiteColor = isMeanColorEqual(Color.WHITE);
}

From source file:com.trandi.opentld.tld.PatchGenerator.java

License:Apache License

void generate(final Mat image, Point pt, Mat patch, Size patchSize, final RNG rng) {
    final Mat T = new MatOfDouble();

    // TODO why is inverse not specified in the original C++ code
    generateRandomTransform(pt, new Point((patchSize.width - 1) * 0.5, (patchSize.height - 1) * 0.5), T, false);

    generate(image, T, patch, patchSize, rng);
}

From source file:com.trandi.opentld.tld.Tld.java

License:Apache License

public void init(Mat frame1, Rect trackedBox) {
    // get Bounding boxes
    if (Math.min(trackedBox.width, trackedBox.height) < _params.min_win) {
        throw new IllegalArgumentException(
                "Provided trackedBox: " + trackedBox + " is too small (min " + _params.min_win + ")");
    }/*www.  jav a2s.  co m*/
    _grid = new Grid(frame1, trackedBox, _params.min_win);
    Log.i(Util.TAG, "Init Created " + _grid.getSize() + " bounding boxes.");
    _grid.updateGoodBadBoxes(trackedBox, _params.num_closest_init);

    _iiRows = frame1.rows();
    _iiCols = frame1.cols();
    _iisum.create(_iiRows, _iiCols, CvType.CV_32F);
    _iisqsum.create(_iiRows, _iiCols, CvType.CV_64F);

    // correct bounding box
    _lastbox = _grid.getBestBox();

    _classifierFern.init(_grid.getTrackedBoxScales(), _rng);

    // generate DATA
    // generate POSITIVE DATA
    generatePositiveData(frame1, _params.num_warps_init, _grid);

    // Set variance threshold
    MatOfDouble stddev = new MatOfDouble();
    Core.meanStdDev(frame1.submat(_grid.getBestBox()), new MatOfDouble(), stddev);
    updateIntegralImgs(frame1);
    // this is directly half of the variance of the initial box, which will be used the the 1st stage of the classifier
    _var = (float) Math.pow(stddev.toArray()[0], 2d) * 0.5f;
    // check variance
    final double checkVar = Util.getVar(_grid.getBestBox(), _iisumJava, _iisqsumJava, _iiCols) * 0.5;
    Log.i(Util.TAG, "Variance: " + _var + " / Check variance: " + checkVar);

    // generate NEGATIVE DATA
    final Pair<List<Pair<int[], Boolean>>, List<Mat>> negData = generateNegativeData(frame1);

    // Split Negative Ferns <features, labels=false> into Training and Testing sets (they are already shuffled)
    final int nFernsSize = negData.first.size();
    final List<Pair<int[], Boolean>> nFernsTest = new ArrayList<Pair<int[], Boolean>>(
            negData.first.subList(0, nFernsSize / 2));
    final List<Pair<int[], Boolean>> nFerns = new ArrayList<Pair<int[], Boolean>>(
            negData.first.subList(nFernsSize / 2, nFernsSize));

    // Split Negative NN Examples into Training and Testing sets
    final int nExSize = negData.second.size();
    final List<Mat> nExamplesTest = new ArrayList<Mat>(negData.second.subList(0, nExSize / 2));
    _nExamples = new ArrayList<Mat>(negData.second.subList(nExSize / 2, nExSize));

    //MERGE Negative Data with Positive Data and shuffle it
    final List<Pair<int[], Boolean>> fernsData = new ArrayList<Pair<int[], Boolean>>(_pFerns);
    fernsData.addAll(nFerns);
    Collections.shuffle(fernsData);

    // TRAINING
    Log.i(Util.TAG, "Init Start Training with " + fernsData.size() + " ferns, " + _nExamples.size()
            + " nExamples, " + nFernsTest.size() + " nFernsTest, " + nExamplesTest.size() + " nExamplesTest");
    _classifierFern.trainF(fernsData, 10);
    _classifierNN.trainNN(_pExample, _nExamples);
    // Threshold evaluation on testing sets
    _classifierFern.evaluateThreshold(nFernsTest);
    _classifierNN.evaluateThreshold(nExamplesTest);
}

From source file:com.trandi.opentld.tld.Tld.java

License:Apache License

/**
 * Output: resized zero-mean patch/pattern
 * @param inImg INPUT, outPattern OUTPUT
 * @return stdev//from  w ww  .  ja  v  a2 s  . c o  m
 */
private static double resizeZeroMeanStdev(final Mat inImg, Mat outPattern, int patternSize) {
    if (inImg == null || outPattern == null) {
        return -1;
    }

    Imgproc.resize(inImg, outPattern, new Size(patternSize, patternSize));
    final MatOfDouble mean = new MatOfDouble();
    final MatOfDouble stdev = new MatOfDouble();
    Core.meanStdDev(outPattern, mean, stdev);
    outPattern.convertTo(outPattern, CvType.CV_32F);
    Core.subtract(outPattern, new Scalar(mean.toArray()[0]), outPattern);

    return stdev.toArray()[0];
}

From source file:org.ar.rubik.CubeReconstructor.java

License:Open Source License

/**
 * Pose Estimation//w  ww. java 2 s  .  co m
 * 
 * Deduce real world cube coordinates and rotation
 * 
 * @param rubikFace
 * @param image 
 * @param stateModel 
 */
public void poseEstimation(RubikFace rubikFace, Mat image, StateModel stateModel) {

    if (rubikFace == null)
        return;

    if (rubikFace.faceRecognitionStatus != FaceRecognitionStatusEnum.SOLVED)
        return;

    LeastMeansSquare lmsResult = rubikFace.lmsResult;

    if (lmsResult == null)
        return;

    // OpenCV Pose Estimate requires at least four points.
    if (rubikFace.rhombusList.size() <= 4)
        return;

    // List of real world point and screen points that correspond.
    List<Point3> objectPointsList = new ArrayList<Point3>(9);
    List<Point> imagePointsList = new ArrayList<Point>(9);

    // Create list of image (in 2D) and object (in 3D) points.
    // Loop over Rubik Face Tiles/Rhombi
    for (int n = 0; n < 3; n++) {
        for (int m = 0; m < 3; m++) {

            Rhombus rhombus = rubikFace.faceRhombusArray[n][m];

            // Only use if Rhombus was non null.
            if (rhombus != null) {

                // Obtain center of Rhombus in screen image coordinates
                // Convention:
                //  o X is zero on the left, and increases to the right.
                //  o Y is zero on the top and increases downward.
                Point imagePoint = new Point(rhombus.center.x, rhombus.center.y);
                imagePointsList.add(imagePoint);

                // N and M are actual not conceptual (as in design doc).
                int mm = 2 - n;
                int nn = 2 - m;
                // above now matches design doc.
                // that is:
                //  o the nn vector is to the right and upwards.
                //  o the mm vector is to the left and upwards.

                // Calculate center of Tile in OpenCV World Space Coordinates
                // Convention:
                //  o X is zero in the center, and increases to the left.
                //  o Y is zero in the center and increases downward.
                //  o Z is zero (at the world coordinate origin) and increase away for the camera.
                float x = (1 - mm) * 0.66666f;
                float y = -1.0f;
                float z = -1.0f * (1 - nn) * 0.666666f;
                Point3 objectPoint = new Point3(x, y, z);
                objectPointsList.add(objectPoint);
            }
        }
    }

    // Cast image point list into OpenCV Matrix.
    MatOfPoint2f imagePoints = new MatOfPoint2f();
    imagePoints.fromList(imagePointsList);

    // Cast object point list into OpenCV Matrix.
    MatOfPoint3f objectPoints = new MatOfPoint3f();
    objectPoints.fromList(objectPointsList);

    Mat cameraMatrix = stateModel.cameraParameters.getOpenCVCameraMatrix();
    MatOfDouble distCoeffs = new MatOfDouble();
    Mat rvec = new Mat();
    Mat tvec = new Mat();

    //      Log.e(Constants.TAG, "Image Points: " + imagePoints.dump());
    //      Log.e(Constants.TAG, "Object Points: " + objectPoints.dump());

    //      boolean result = 
    Calib3d.solvePnP(objectPoints, imagePoints, cameraMatrix, distCoeffs, rvec, tvec);

    Log.v(Constants.TAG, String.format("Open CV Rotation Vector x=%4.2f y=%4.2f z=%4.2f", rvec.get(0, 0)[0],
            rvec.get(1, 0)[0], rvec.get(2, 0)[0]));

    // Convert from OpenCV to OpenGL World Coordinates
    x = +1.0f * (float) tvec.get(0, 0)[0];
    y = -1.0f * (float) tvec.get(1, 0)[0];
    z = -1.0f * (float) tvec.get(2, 0)[0];

    // =+= Add manual offset correction to translation  
    x += MenuAndParams.xTranslationOffsetParam.value;
    y += MenuAndParams.yTranslationOffsetParam.value;
    z += MenuAndParams.zTranslationOffsetParam.value;

    // Convert Rotation Vector from OpenCL polarity axes definition to OpenGL definition
    rvec.put(1, 0, -1.0f * rvec.get(1, 0)[0]);
    rvec.put(2, 0, -1.0f * rvec.get(2, 0)[0]);

    // =+= Add manual offset correction to Rotation
    rvec.put(0, 0, rvec.get(0, 0)[0] + MenuAndParams.xRotationOffsetParam.value * Math.PI / 180.0); // X rotation
    rvec.put(1, 0, rvec.get(1, 0)[0] + MenuAndParams.yRotationOffsetParam.value * Math.PI / 180.0); // Y rotation
    rvec.put(2, 0, rvec.get(2, 0)[0] + MenuAndParams.zRotationOffsetParam.value * Math.PI / 180.0); // Z rotation

    // Create an OpenCV Rotation Matrix from a Rotation Vector
    Mat rMatrix = new Mat(4, 4, CvType.CV_32FC2);
    Calib3d.Rodrigues(rvec, rMatrix);
    Log.v(Constants.TAG, "Rodrigues Matrix: " + rMatrix.dump());

    /*
     * Create an OpenGL Rotation Matrix
     * Notes:
     *   o  OpenGL is in column-row order (correct?).
     *   o  OpenCV Rodrigues Rotation Matrix is 3x3 where OpenGL Rotation Matrix is 4x4.
     */

    // Initialize all Rotational Matrix elements to zero.
    for (int i = 0; i < 16; i++)
        rotationMatrix[i] = 0.0f; // Initialize to zero

    // Initialize element [3,3] to 1.0: i.e., "w" component in homogenous coordinates
    rotationMatrix[3 * 4 + 3] = 1.0f;

    // Copy OpenCV matrix to OpenGL matrix element by element.
    for (int r = 0; r < 3; r++)
        for (int c = 0; c < 3; c++)
            rotationMatrix[r + c * 4] = (float) (rMatrix.get(r, c)[0]);

    // Diagnostics
    for (int r = 0; r < 4; r++)
        Log.v(Constants.TAG, String.format("Rotation Matrix  r=%d  [%5.2f  %5.2f  %5.2f  %5.2f]", r,
                rotationMatrix[r + 0], rotationMatrix[r + 4], rotationMatrix[r + 8], rotationMatrix[r + 12]));

    //      Log.e(Constants.TAG, "Result: " + result);
    //      Log.e(Constants.TAG, "Camera: " + cameraMatrix.dump());
    //      Log.e(Constants.TAG, "Rotation: " + rvec.dump());
    //      Log.e(Constants.TAG, "Translation: " + tvec.dump());

    //      // Reporting in OpenGL World Coordinates
    //      Core.rectangle(image, new Point(0, 50), new Point(1270, 150), Constants.ColorBlack, -1);
    //      Core.putText(image, String.format("Translation  x=%4.2f y=%4.2f z=%4.2f", x, y, z), new Point(50, 100), Constants.FontFace, 3, Constants.ColorWhite, 3);
    //      Core.putText(image, String.format("Rotation     x=%4.0f y=%4.0f z=%4.0f", cubeXrotation, cubeYrotation, cubeZrotation), new Point(50, 150), Constants.FontFace, 3, Constants.ColorWhite, 3);
}

From source file:org.sikuli.script.ImageFind.java

License:MIT License

private void checkProbe() {
    MatOfDouble pMean = new MatOfDouble();
    MatOfDouble pStdDev = new MatOfDouble();
    Core.meanStdDev(probe, pMean, pStdDev);
    double min = 0.00001;
    isPlainColor = false;/*from w ww . j a va 2  s  .  c  om*/
    double sum = 0.0;
    double arr[] = pStdDev.toArray();
    for (int i = 0; i < arr.length; i++) {
        sum += arr[i];
    }
    if (sum < min) {
        isPlainColor = true;
    }
    sum = 0.0;
    arr = pMean.toArray();
    for (int i = 0; i < arr.length; i++) {
        sum += arr[i];
    }
    if (sum < min && isPlainColor) {
        isBlack = true;
    }
    resizeFactor = Math.min(((double) probe.width()) / resizeMinDownSample,
            ((double) probe.height()) / resizeMinDownSample);
    resizeFactor = Math.max(1.0, resizeFactor);
}

From source file:qupath.opencv.tools.WandToolCV.java

License:Open Source License

@Override
protected Shape createShape(double x, double y, boolean useTiles) {

    if (mat == null)
        mat = new Mat(w, w, CvType.CV_8UC3);
    if (matMask == null)
        matMask = new Mat(w + 2, w + 2, CvType.CV_8U);

    if (pLast != null && pLast.distanceSq(x, y) < 4)
        return new Path2D.Float();

    long startTime = System.currentTimeMillis();

    QuPathViewer viewer = getViewer();/*from w w  w . j  a va2  s .c om*/
    if (viewer == null)
        return new Path2D.Float();

    double downsample = viewer.getDownsampleFactor();

    DefaultImageRegionStore regionStore = viewer.getImageRegionStore();

    // Paint the image as it is currently being viewed
    Graphics2D g2d = imgTemp.createGraphics();
    g2d.setColor(Color.BLACK);
    g2d.fillRect(0, 0, w, w);
    bounds.setFrame(x - w * downsample * .5, y - w * downsample * .5, w * downsample, w * downsample);
    g2d.scale(1.0 / downsample, 1.0 / downsample);
    g2d.translate(-bounds.getX(), -bounds.getY());
    regionStore.paintRegionCompletely(viewer.getServer(), g2d, bounds, viewer.getZPosition(),
            viewer.getTPosition(), viewer.getDownsampleFactor(), null, viewer.getImageDisplay(), 250);
    g2d.dispose();

    // Put pixels into an OpenCV image
    byte[] buffer = ((DataBufferByte) imgTemp.getRaster().getDataBuffer()).getData();
    mat.put(0, 0, buffer);

    //      Imgproc.cvtColor(mat, mat, Imgproc.COLOR_BGR2Lab);
    //      blurSigma = 4;

    double blurSigma = Math.max(0.5, getWandSigmaPixels());
    double size = Math.ceil(blurSigma * 2) * 2 + 1;
    blurSize.width = size;
    blurSize.height = size;

    // Smooth a little
    Imgproc.GaussianBlur(mat, mat, blurSize, blurSigma);

    //      Imgproc.cvtColor(mat, mat, Imgproc.COLOR_RGB2Lab);

    MatOfDouble mean = new MatOfDouble();
    MatOfDouble stddev = new MatOfDouble();
    Core.meanStdDev(mat, mean, stddev);
    //      logger.trace(stddev.dump());

    double[] stddev2 = stddev.toArray();
    double scale = .4;
    for (int i = 0; i < stddev2.length; i++)
        stddev2[i] = stddev2[i] * scale;
    threshold.set(stddev2);

    mean.release();
    stddev.release();

    matMask.setTo(zero);
    Imgproc.circle(matMask, seed, w / 2, one);
    Imgproc.floodFill(mat, matMask, seed, one, null, threshold, threshold,
            4 | (2 << 8) | Imgproc.FLOODFILL_MASK_ONLY | Imgproc.FLOODFILL_FIXED_RANGE);
    Core.subtract(matMask, one, matMask);

    if (strel == null)
        strel = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5));
    Imgproc.morphologyEx(matMask, matMask, Imgproc.MORPH_CLOSE, strel);
    ////      Imgproc.morphologyEx(matMask, matMask, Imgproc.MORPH_OPEN, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, size));
    //      
    ////      threshold = new Scalar(10, 10, 10);
    //      double[] stddev2 = stddev.toArray();
    //      double scale = .5;
    //      threshold = new Scalar(stddev2[0]*scale, stddev2[1]*scale, stddev2[2]*scale);

    contours.clear();
    if (contourHierarchy == null)
        contourHierarchy = new Mat();
    Imgproc.findContours(matMask, contours, contourHierarchy, Imgproc.RETR_EXTERNAL,
            Imgproc.CHAIN_APPROX_SIMPLE);
    //      logger.trace("Contours: " + contours.size());

    Path2D path = new Path2D.Float();
    boolean isOpen = false;
    for (MatOfPoint contour : contours) {

        // Discard single pixels / lines
        if (contour.size().height <= 2)
            continue;

        // Create a polygon ROI
        boolean firstPoint = true;
        for (Point p : contour.toArray()) {
            double xx = (p.x - w / 2 - 1) * downsample + x;
            double yy = (p.y - w / 2 - 1) * downsample + y;
            if (firstPoint) {
                path.moveTo(xx, yy);
                firstPoint = false;
                isOpen = true;
            } else
                path.lineTo(xx, yy);
        }
    }
    if (isOpen)
        path.closePath();

    long endTime = System.currentTimeMillis();
    logger.trace(getClass().getSimpleName() + " time: " + (endTime - startTime));

    if (pLast == null)
        pLast = new Point2D.Double(x, y);
    else
        pLast.setLocation(x, y);

    return path;

}