Example usage for org.opencv.core MatOfFloat MatOfFloat

List of usage examples for org.opencv.core MatOfFloat MatOfFloat

Introduction

In this page you can find the example usage for org.opencv.core MatOfFloat MatOfFloat.

Prototype

public MatOfFloat() 

Source Link

Usage

From source file:com.android.cts.verifier.sensors.RVCVXCheckAnalyzer.java

License:Apache License

/**
 * Analyze video frames using computer vision approach and generate a ArrayList<AttitudeRec>
 *
 * @param recs  output ArrayList of AttitudeRec
 * @return total number of frame of the video
 */// ww  w.j  a  v  a 2 s  .  c  om
private int analyzeVideo(ArrayList<AttitudeRec> recs) {
    VideoMetaInfo meta = new VideoMetaInfo(new File(mPath, "videometa.json"));

    int decimation = 1;
    boolean use_timestamp = true;

    // roughly determine if decimation is necessary
    if (meta.fps > DECIMATION_FPS_TARGET) {
        decimation = (int) (meta.fps / DECIMATION_FPS_TARGET);
        meta.fps /= decimation;
    }

    VideoDecoderForOpenCV videoDecoder = new VideoDecoderForOpenCV(new File(mPath, "video.mp4"), decimation);

    Mat frame;
    Mat gray = new Mat();
    int i = -1;

    Size frameSize = videoDecoder.getSize();

    if (frameSize.width != meta.frameWidth || frameSize.height != meta.frameHeight) {
        // this is very unlikely
        return -1;
    }

    if (TRACE_VIDEO_ANALYSIS) {
        Debug.startMethodTracing("cvprocess");
    }

    Size patternSize = new Size(4, 11);

    float fc = (float) (meta.frameWidth / 2.0 / Math.tan(meta.fovWidth / 2.0));
    Mat camMat = cameraMatrix(fc, new Size(frameSize.width / 2, frameSize.height / 2));
    MatOfDouble coeff = new MatOfDouble(); // dummy

    MatOfPoint2f centers = new MatOfPoint2f();
    MatOfPoint3f grid = asymmetricalCircleGrid(patternSize);
    Mat rvec = new MatOfFloat();
    Mat tvec = new MatOfFloat();

    MatOfPoint2f reprojCenters = new MatOfPoint2f();

    if (LOCAL_LOGV) {
        Log.v(TAG, "Camera Mat = \n" + camMat.dump());
    }

    long startTime = System.nanoTime();
    long[] ts = new long[1];

    while ((frame = videoDecoder.getFrame(ts)) != null) {
        if (LOCAL_LOGV) {
            Log.v(TAG, "got a frame " + i);
        }

        if (use_timestamp && ts[0] == -1) {
            use_timestamp = false;
        }

        // has to be in front, as there are cases where execution
        // will skip the later part of this while
        i++;

        // convert to gray manually as by default findCirclesGridDefault uses COLOR_BGR2GRAY
        Imgproc.cvtColor(frame, gray, Imgproc.COLOR_RGB2GRAY);

        boolean foundPattern = Calib3d.findCirclesGrid(gray, patternSize, centers,
                Calib3d.CALIB_CB_ASYMMETRIC_GRID);

        if (!foundPattern) {
            // skip to next frame
            continue;
        }

        if (OUTPUT_DEBUG_IMAGE) {
            Calib3d.drawChessboardCorners(frame, patternSize, centers, true);
        }

        // figure out the extrinsic parameters using real ground truth 3D points and the pixel
        // position of blobs found in findCircleGrid, an estimated camera matrix and
        // no-distortion are assumed.
        boolean foundSolution = Calib3d.solvePnP(grid, centers, camMat, coeff, rvec, tvec, false,
                Calib3d.CV_ITERATIVE);

        if (!foundSolution) {
            // skip to next frame
            if (LOCAL_LOGV) {
                Log.v(TAG, "cannot find pnp solution in frame " + i + ", skipped.");
            }
            continue;
        }

        // reproject points to for evaluation of result accuracy of solvePnP
        Calib3d.projectPoints(grid, rvec, tvec, camMat, coeff, reprojCenters);

        // error is evaluated in norm2, which is real error in pixel distance / sqrt(2)
        double error = Core.norm(centers, reprojCenters, Core.NORM_L2);

        if (LOCAL_LOGV) {
            Log.v(TAG, "Found attitude, re-projection error = " + error);
        }

        // if error is reasonable, add it into the results. use ratio to frame height to avoid
        // discriminating higher definition videos
        if (error < REPROJECTION_THREASHOLD_RATIO * frameSize.height) {
            double[] rv = new double[3];
            double timestamp;

            rvec.get(0, 0, rv);
            if (use_timestamp) {
                timestamp = (double) ts[0] / 1e6;
            } else {
                timestamp = (double) i / meta.fps;
            }
            if (LOCAL_LOGV)
                Log.v(TAG, String.format("Added frame %d  ts = %f", i, timestamp));
            recs.add(new AttitudeRec(timestamp, rodr2rpy(rv)));
        }

        if (OUTPUT_DEBUG_IMAGE) {
            Calib3d.drawChessboardCorners(frame, patternSize, reprojCenters, true);
            Imgcodecs.imwrite(Environment.getExternalStorageDirectory().getPath() + "/RVCVRecData/DebugCV/img"
                    + i + ".png", frame);
        }
    }

    if (LOCAL_LOGV) {
        Log.v(TAG, "Finished decoding");
    }

    if (TRACE_VIDEO_ANALYSIS) {
        Debug.stopMethodTracing();
    }

    if (LOCAL_LOGV) {
        // time analysis
        double totalTime = (System.nanoTime() - startTime) / 1e9;
        Log.i(TAG, "Total time: " + totalTime + "s, Per frame time: " + totalTime / i);
    }
    return i;
}

From source file:com.trandi.opentld.tld.LKTracker.java

License:Apache License

/**
 * @return Pair of new, FILTERED, last and current POINTS, or null if it hasn't managed to track anything.
 *///from ww  w  .j a  v a  2  s . c  o m
Pair<Point[], Point[]> track(final Mat lastImg, final Mat currentImg, Point[] lastPoints) {
    final int size = lastPoints.length;
    final MatOfPoint2f currentPointsMat = new MatOfPoint2f();
    final MatOfPoint2f pointsFBMat = new MatOfPoint2f();
    final MatOfByte statusMat = new MatOfByte();
    final MatOfFloat errSimilarityMat = new MatOfFloat();
    final MatOfByte statusFBMat = new MatOfByte();
    final MatOfFloat errSimilarityFBMat = new MatOfFloat();

    //Forward-Backward tracking
    Video.calcOpticalFlowPyrLK(lastImg, currentImg, new MatOfPoint2f(lastPoints), currentPointsMat, statusMat,
            errSimilarityMat, WINDOW_SIZE, MAX_LEVEL, termCriteria, 0, LAMBDA);
    Video.calcOpticalFlowPyrLK(currentImg, lastImg, currentPointsMat, pointsFBMat, statusFBMat,
            errSimilarityFBMat, WINDOW_SIZE, MAX_LEVEL, termCriteria, 0, LAMBDA);

    final byte[] status = statusMat.toArray();
    float[] errSimilarity = new float[lastPoints.length];
    //final byte[] statusFB = statusFBMat.toArray();
    final float[] errSimilarityFB = errSimilarityFBMat.toArray();

    // compute the real FB error (relative to LAST points not the current ones...
    final Point[] pointsFB = pointsFBMat.toArray();
    for (int i = 0; i < size; i++) {
        errSimilarityFB[i] = Util.norm(pointsFB[i], lastPoints[i]);
    }

    final Point[] currPoints = currentPointsMat.toArray();
    // compute real similarity error
    errSimilarity = normCrossCorrelation(lastImg, currentImg, lastPoints, currPoints, status);

    //TODO  errSimilarityFB has problem != from C++
    // filter out points with fwd-back error > the median AND points with similarity error > median
    return filterPts(lastPoints, currPoints, errSimilarity, errSimilarityFB, status);
}

From source file:se.hb.jcp.bindings.opencv.DenseDoubleMatrix1D.java

License:Open Source License

/**
 * Constructs a matrix with a given number of columns.
 * All entries are initially <tt>0</tt>.
 * @param columns the number of columns the matrix shall have.
 * @throws IllegalArgumentException if//w  ww .ja v a  2 s .c  o  m
       <tt>columns<0 || columns > Integer.MAX_VALUE</tt>.
*/
public DenseDoubleMatrix1D(int columns) {
    _mat = new MatOfFloat();
    Mat.zeros(1, columns, CvType.CV_32F).assignTo(_mat);
    setUp(columns);
}

From source file:se.hb.jcp.bindings.opencv.DenseDoubleMatrix2D.java

License:Open Source License

/**
 * Constructs a matrix with a given number of rows and columns.
 * All entries are initially <tt>0</tt>.
 * @param rows the number of rows the matrix shall have.
 * @param columns the number of columns the matrix shall have.
 * @throws IllegalArgumentException if//from  ww  w .ja va  2  s .c  o m
       <tt>rows<0 || columns<0 || (double)columns*rows >
       Integer.MAX_VALUE</tt>.
*/
public DenseDoubleMatrix2D(int rows, int columns) {
    _mat = new MatOfFloat();
    Mat.zeros(rows, columns, CvType.CV_32F).assignTo(_mat);
    setUp(rows, columns);
}

From source file:se.hb.jcp.bindings.opencv.DenseDoubleMatrix2D.java

License:Open Source License

/**
   Constructs and returns a new <i>slice view</i> representing the
   columns of the given row.// w  w w  .jav a 2  s . c om
   The returned view is backed by this matrix, so changes in the
   returned view are reflected in this matrix, and vice-versa.
   To obtain a slice view on subranges, construct a sub-ranging
   view (<tt>viewPart( ...)</tt>), then apply this method to the
   sub-range view.
        
   <p> <b>Example:</b> <table border="0"> <tr nowrap>
   <td valign="top">2 x 3 matrix: <br> 1, 2, 3<br> 4, 5, 6</td>
   <td>viewRow(0) ==&gt;</td>
   <td valign="top">Matrix1D of size 3:<br> 1, 2, 3</td> </tr> </table>
        
   @param row the row to fix.
   @return a new slice view.
   @throws IndexOutOfBoundsException if <tt>row < 0 || row >= rows()</tt>.
   @see #viewColumn(int)
*/
public DoubleMatrix1D viewRow(int row) {
    checkRow(row);
    MatOfFloat rowMat = new MatOfFloat();
    _mat.row(row).assignTo(rowMat);
    return new DenseDoubleMatrix1D(columns, rowMat);
}

From source file:syncleus.dann.data.video.LKTracker.java

License:Apache License

/**
 * @return Pair of new, FILTERED, last and current POINTS, or null if it hasn't managed to track anything.
 *///from w w  w.j a  v a 2s.c o m
public Pair<Point[], Point[]> track(final Mat lastImg, final Mat currentImg, Point[] lastPoints) {
    final int size = lastPoints.length;
    final MatOfPoint2f currentPointsMat = new MatOfPoint2f();
    final MatOfPoint2f pointsFBMat = new MatOfPoint2f();
    final MatOfByte statusMat = new MatOfByte();
    final MatOfFloat errSimilarityMat = new MatOfFloat();
    final MatOfByte statusFBMat = new MatOfByte();
    final MatOfFloat errSimilarityFBMat = new MatOfFloat();

    //Forward-Backward tracking
    Video.calcOpticalFlowPyrLK(lastImg, currentImg, new MatOfPoint2f(lastPoints), currentPointsMat, statusMat,
            errSimilarityMat, WINDOW_SIZE, MAX_LEVEL, termCriteria, 0, LAMBDA);
    Video.calcOpticalFlowPyrLK(currentImg, lastImg, currentPointsMat, pointsFBMat, statusFBMat,
            errSimilarityFBMat, WINDOW_SIZE, MAX_LEVEL, termCriteria, 0, LAMBDA);

    final byte[] status = statusMat.toArray();
    float[] errSimilarity = new float[lastPoints.length];
    //final byte[] statusFB = statusFBMat.toArray();
    final float[] errSimilarityFB = errSimilarityFBMat.toArray();

    // compute the real FB error (relative to LAST points not the current ones...
    final Point[] pointsFB = pointsFBMat.toArray();
    for (int i = 0; i < size; i++) {
        errSimilarityFB[i] = TLDUtil.norm(pointsFB[i], lastPoints[i]);
    }

    final Point[] currPoints = currentPointsMat.toArray();
    // compute real similarity error
    errSimilarity = normCrossCorrelation(lastImg, currentImg, lastPoints, currPoints, status);

    //TODO  errSimilarityFB has problem != from C++
    // filter out points with fwd-back error > the median AND points with similarity error > median
    return filterPts(lastPoints, currPoints, errSimilarity, errSimilarityFB, status);
}