Example usage for org.opencv.imgproc Imgproc getPerspectiveTransform

List of usage examples for org.opencv.imgproc Imgproc getPerspectiveTransform

Introduction

In this page you can find the example usage for org.opencv.imgproc Imgproc getPerspectiveTransform.

Prototype

public static Mat getPerspectiveTransform(Mat src, Mat dst) 

Source Link

Usage

From source file:OCV_GetPerspectiveTransform.java

License:Open Source License

@Override
public void run(ImageProcessor ip) {
    MatOfPoint2f matPt_src = new MatOfPoint2f();
    MatOfPoint2f matPt_dst = new MatOfPoint2f();
    matPt_src.fromList(lstPt_src);//from   www.ja  v  a2  s .com
    matPt_dst.fromList(lstPt_dst);

    Mat mat = Imgproc.getPerspectiveTransform(matPt_src, matPt_dst);

    if (mat == null || mat.rows() <= 0 || mat.cols() <= 0) {
        IJ.showMessage("Output is null or error");
        return;
    }

    ResultsTable rt = OCV__LoadLibrary.GetResultsTable(true);

    for (int i = 0; i < 3; i++) {
        rt.incrementCounter();
        rt.addValue("Column01", String.valueOf(mat.get(i, 0)[0]));
        rt.addValue("Column02", String.valueOf(mat.get(i, 1)[0]));
        rt.addValue("Column03", String.valueOf(mat.get(i, 2)[0]));
    }

    rt.show("Results");
}

From source file:com.github.mbillingr.correlationcheck.ImageProcessor.java

License:Open Source License

void setPerspectiveCorrection(List<Point> refpoints) {
    List<Point> pts_in = new ArrayList<>();
    for (Point p : refpoints) {
        pts_in.add(new Point(p.x * raw_width, (1 - p.y) * raw_height));
    }/*w ww  . j av a 2 s . c  om*/
    Mat mat_src = Converters.vector_Point2f_to_Mat(pts_in);

    List<Point> pts_out = new ArrayList<>();
    pts_out.add(new Point(0, work_height));
    pts_out.add(new Point(0, 0));
    pts_out.add(new Point(work_width, work_height));
    pts_out.add(new Point(work_width, 0));
    Mat mat_dst = Converters.vector_Point2f_to_Mat(pts_out);

    perspective_transform = Imgproc.getPerspectiveTransform(mat_src, mat_dst);
}

From source file:com.jonwohl.Attention.java

License:Open Source License

private Mat getPerspectiveTransformation(ArrayList<PVector> inputPoints, int w, int h) {
    Point[] canonicalPoints = new Point[4];
    canonicalPoints[0] = new Point(0, 0);
    canonicalPoints[1] = new Point(w, 0);
    canonicalPoints[2] = new Point(w, h);
    canonicalPoints[3] = new Point(0, h);

    MatOfPoint2f canonicalMarker = new MatOfPoint2f();
    canonicalMarker.fromArray(canonicalPoints);

    Point[] points = new Point[4];
    for (int i = 0; i < 4; i++) {
        points[i] = new Point(inputPoints.get(i).x, inputPoints.get(i).y);
    }//from  w w  w  .  jav a 2  s. c o  m
    MatOfPoint2f marker = new MatOfPoint2f(points);
    return Imgproc.getPerspectiveTransform(marker, canonicalMarker);
}

From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java

License:Open Source License

private void initializeWarpPerspective(final Mat frame, final MatOfPoint2f sourceCorners) {
    final MatOfPoint2f destCorners = new MatOfPoint2f();
    destCorners.alloc(4);/*from   w  ww.  j  a v a 2 s.  c  o  m*/

    // 1st-------2nd
    // | |
    // | |
    // | |
    // 3rd-------4th
    destCorners.put(0, 0, new double[] { boundsRect.boundingRect().x, boundsRect.boundingRect().y });
    destCorners.put(1, 0, new double[] { boundsRect.boundingRect().x + boundsRect.boundingRect().width,
            boundsRect.boundingRect().y });
    destCorners.put(2, 0, new double[] { boundsRect.boundingRect().x,
            boundsRect.boundingRect().y + boundsRect.boundingRect().height });
    destCorners.put(3, 0, new double[] { boundsRect.boundingRect().x + boundsRect.boundingRect().width,
            boundsRect.boundingRect().y + boundsRect.boundingRect().height });

    if (logger.isDebugEnabled()) {
        logger.debug("initializeWarpPerspective {} {} {} {}", sourceCorners.get(0, 0), sourceCorners.get(1, 0),
                sourceCorners.get(2, 0), sourceCorners.get(3, 0));
        logger.debug("initializeWarpPerspective {} {} {} {}", destCorners.get(0, 0), destCorners.get(1, 0),
                destCorners.get(2, 0), destCorners.get(3, 0));
    }

    perspMat = Imgproc.getPerspectiveTransform(sourceCorners, destCorners);

    int width = boundsRect.boundingRect().width;
    int height = boundsRect.boundingRect().height;

    // Make them divisible by two for video recording purposes
    if ((width & 1) == 1)
        width++;
    if ((height & 1) == 1)
        height++;

    boundingBox = new BoundingBox(boundsRect.boundingRect().x, boundsRect.boundingRect().y, width, height);

    warpInitialized = true;

    if (logger.isTraceEnabled()) {
        Mat debugFrame = frame.clone();

        Core.circle(debugFrame, new Point(sourceCorners.get(0, 0)[0], sourceCorners.get(0, 0)[1]), 1,
                new Scalar(255, 0, 255), -1);
        Core.circle(debugFrame, new Point(sourceCorners.get(1, 0)[0], sourceCorners.get(1, 0)[1]), 1,
                new Scalar(255, 0, 255), -1);
        Core.circle(debugFrame, new Point(sourceCorners.get(2, 0)[0], sourceCorners.get(2, 0)[1]), 1,
                new Scalar(255, 0, 255), -1);
        Core.circle(debugFrame, new Point(sourceCorners.get(3, 0)[0], sourceCorners.get(3, 0)[1]), 1,
                new Scalar(255, 0, 255), -1);

        Core.circle(debugFrame, new Point(destCorners.get(0, 0)[0], destCorners.get(0, 0)[1]), 1,
                new Scalar(255, 0, 0), -1);
        Core.circle(debugFrame, new Point(destCorners.get(1, 0)[0], destCorners.get(1, 0)[1]), 1,
                new Scalar(255, 0, 0), -1);
        Core.circle(debugFrame, new Point(destCorners.get(2, 0)[0], destCorners.get(2, 0)[1]), 1,
                new Scalar(255, 0, 0), -1);
        Core.circle(debugFrame, new Point(destCorners.get(3, 0)[0], destCorners.get(3, 0)[1]), 1,
                new Scalar(255, 0, 0), -1);

        Core.line(debugFrame, new Point(boundingBox.getMinX(), boundingBox.getMinY()),
                new Point(boundingBox.getMaxX(), boundingBox.getMinY()), new Scalar(0, 255, 0));
        Core.line(debugFrame, new Point(boundingBox.getMinX(), boundingBox.getMinY()),
                new Point(boundingBox.getMinX(), boundingBox.getMaxY()), new Scalar(0, 255, 0));
        Core.line(debugFrame, new Point(boundingBox.getMaxX(), boundingBox.getMaxY()),
                new Point(boundingBox.getMaxX(), boundingBox.getMinY()), new Scalar(0, 255, 0));
        Core.line(debugFrame, new Point(boundingBox.getMaxX(), boundingBox.getMaxY()),
                new Point(boundingBox.getMinX(), boundingBox.getMaxY()), new Scalar(0, 255, 0));

        String filename = String.format("calibrate-transformation.png");
        File file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, debugFrame);
    }
}

From source file:com.superbool.easylpr.model.Transformation.java

public Mat getTransformationMatrix(Mat cornersMat, Mat outputCornersMat) {

    // Get transformation matrix
    Mat transmtx = Imgproc.getPerspectiveTransform(cornersMat, outputCornersMat);

    return transmtx;
}

From source file:karthik.Barcode.CandidateMatrixBarcode.java

License:Open Source License

CandidateResult NormalizeCandidateRegion(double angle) {
    /* candidateRegion is the RotatedRect which contains a candidate region for the barcode
     // angle is the rotation angle or USE_ROTATED_RECT_ANGLE for this function to 
     // estimate rotation angle from the rect parameter
     // returns Mat containing cropped area(region of interest) with just the barcode 
     // The barcode region is from the *original* image, not the scaled image
     // the cropped area is also rotated as necessary to be horizontal or vertical rather than skewed        
     // Some parts of this function are from http://felix.abecassis.me/2011/10/opencv-rotation-deskewing/
     // and http://stackoverflow.com/questions/22041699/rotate-an-image-without-cropping-in-opencv-in-c
     *//*from   ww  w .j a va2  s  .  c o m*/

    double rotation_angle;
    CandidateResult result = new CandidateResult();

    // scale candidate region back up to original size to return cropped part from *original* image 
    // need the 1.0 there to force floating-point arithmetic from int values
    double scale_factor = img_details.src_original.rows() / (1.0 * img_details.src_grayscale.rows());

    // expand the region found - this helps capture the entire code including the border zone
    candidateRegion.size.width += 2 * params.RECT_WIDTH;
    candidateRegion.size.height += 2 * params.RECT_HEIGHT;

    // calculate location of rectangle in original image and its corner points
    RotatedRect scaledRegion = new RotatedRect(candidateRegion.center, candidateRegion.size,
            candidateRegion.angle);
    scaledRegion.center.x = scaledRegion.center.x * scale_factor;
    scaledRegion.center.y = scaledRegion.center.y * scale_factor;
    scaledRegion.size.height *= scale_factor;
    scaledRegion.size.width *= scale_factor;

    scaledRegion.points(img_details.scaledCorners);
    // lets get the coordinates of the ROI in the original image and save it

    result.ROI_coords = Arrays.copyOf(img_details.scaledCorners, 4);

    // get the bounding rectangle of the ROI by sorting its corner points
    // we do it manually because RotatedRect can generate corner points outside the Mat area
    Arrays.sort(img_details.scaledCorners, CandidateBarcode.get_x_comparator());
    int leftCol = (int) img_details.scaledCorners[0].x;
    int rightCol = (int) img_details.scaledCorners[3].x;
    leftCol = (leftCol < 0) ? 0 : leftCol;
    rightCol = (rightCol > img_details.src_original.cols() - 1) ? img_details.src_original.cols() - 1
            : rightCol;

    Arrays.sort(img_details.scaledCorners, CandidateBarcode.get_y_comparator());
    int topRow = (int) img_details.scaledCorners[0].y;
    int bottomRow = (int) img_details.scaledCorners[3].y;
    topRow = (topRow < 0) ? 0 : topRow;
    bottomRow = (bottomRow > img_details.src_original.rows() - 1) ? img_details.src_original.rows() - 1
            : bottomRow;

    Mat ROI_region = img_details.src_original.submat(topRow, bottomRow, leftCol, rightCol);

    // create a container that is a square with side = diagonal of ROI.
    // this is large enough to accommodate the ROI region with rotation without cropping it

    int orig_rows = bottomRow - topRow;
    int orig_cols = rightCol - leftCol;
    int diagonal = (int) Math.sqrt(orig_rows * orig_rows + orig_cols * orig_cols);

    int newWidth = diagonal + 1;
    int newHeight = diagonal + 1;

    int offsetX = (newWidth - orig_cols) / 2;
    int offsetY = (newHeight - orig_rows) / 2;

    Mat enlarged_ROI_container = new Mat(newWidth, newHeight, img_details.src_original.type());
    enlarged_ROI_container.setTo(ZERO_SCALAR);

    // copy ROI to centre of container and rotate it
    ROI_region.copyTo(enlarged_ROI_container.rowRange(offsetY, offsetY + orig_rows).colRange(offsetX,
            offsetX + orig_cols));
    Point enlarged_ROI_container_centre = new Point(enlarged_ROI_container.rows() / 2.0,
            enlarged_ROI_container.cols() / 2.0);
    Mat rotated = Mat.zeros(enlarged_ROI_container.size(), enlarged_ROI_container.type());

    if (angle == Barcode.USE_ROTATED_RECT_ANGLE)
        rotation_angle = estimate_barcode_orientation();
    else
        rotation_angle = angle;

    // perform the affine transformation
    img_details.rotation_matrix = Imgproc.getRotationMatrix2D(enlarged_ROI_container_centre, rotation_angle,
            1.0);
    img_details.rotation_matrix.convertTo(img_details.rotation_matrix, CvType.CV_32F); // convert type so matrix multip. works properly

    img_details.newCornerCoord.setTo(ZERO_SCALAR);

    // convert scaledCorners to contain locations of corners in enlarged_ROI_container Mat
    img_details.scaledCorners[0] = new Point(offsetX, offsetY);
    img_details.scaledCorners[1] = new Point(offsetX, offsetY + orig_rows);
    img_details.scaledCorners[2] = new Point(offsetX + orig_cols, offsetY);
    img_details.scaledCorners[3] = new Point(offsetX + orig_cols, offsetY + orig_rows);
    // calculate the new location for each corner point of the rectangle ROI after rotation
    for (int r = 0; r < 4; r++) {
        img_details.coord.put(0, 0, img_details.scaledCorners[r].x);
        img_details.coord.put(1, 0, img_details.scaledCorners[r].y);
        Core.gemm(img_details.rotation_matrix, img_details.coord, 1, img_details.delta, 0,
                img_details.newCornerCoord);
        updatePoint(img_details.newCornerPoints.get(r), img_details.newCornerCoord.get(0, 0)[0],
                img_details.newCornerCoord.get(1, 0)[0]);
    }
    rotated.setTo(ZERO_SCALAR);
    Imgproc.warpAffine(enlarged_ROI_container, rotated, img_details.rotation_matrix,
            enlarged_ROI_container.size(), Imgproc.INTER_CUBIC);
    // sort rectangles points in order by first sorting all 4 points based on x
    // we then sort the first two based on y and then the next two based on y
    // this leaves the array in order top-left, bottom-left, top-right, bottom-right
    Collections.sort(img_details.newCornerPoints, CandidateBarcode.get_x_comparator());
    Collections.sort(img_details.newCornerPoints.subList(0, 2), CandidateBarcode.get_y_comparator());
    Collections.sort(img_details.newCornerPoints.subList(2, 4), CandidateBarcode.get_y_comparator());

    // calc height and width of rectangular region

    double height = length(img_details.newCornerPoints.get(1), img_details.newCornerPoints.get(0));
    double width = length(img_details.newCornerPoints.get(2), img_details.newCornerPoints.get(0));

    // create destination points for warpPerspective to map to
    updatePoint(img_details.transformedPoints.get(0), 0, 0);
    updatePoint(img_details.transformedPoints.get(1), 0, height);
    updatePoint(img_details.transformedPoints.get(2), width, 0);
    updatePoint(img_details.transformedPoints.get(3), width, height);

    Mat perspectiveTransform = Imgproc.getPerspectiveTransform(
            Converters.vector_Point2f_to_Mat(img_details.newCornerPoints),
            Converters.vector_Point2f_to_Mat(img_details.transformedPoints));
    Mat perspectiveOut = Mat.zeros((int) height + 2, (int) width + 2, CvType.CV_32F);
    Imgproc.warpPerspective(rotated, perspectiveOut, perspectiveTransform, perspectiveOut.size(),
            Imgproc.INTER_CUBIC);

    result.ROI = perspectiveOut;
    return result;
}

From source file:karthiknr.TextID.ProcessAsyncActivity.java

License:Apache License

public Mat warpImage(Mat inputMat, Mat startM) {
    int resultWidth = 1000;
    int resultHeight = 1000;

    Mat outputMat = new Mat(resultWidth, resultHeight, CvType.CV_8UC4);

    Point ocvPOut1 = new Point(0, 0);
    Point ocvPOut2 = new Point(0, resultHeight);
    Point ocvPOut3 = new Point(resultWidth, resultHeight);
    Point ocvPOut4 = new Point(resultWidth, 0);
    List<Point> dest = new ArrayList<Point>();
    dest.add(ocvPOut1);//from ww  w.  j a v  a 2  s.c o m
    dest.add(ocvPOut2);
    dest.add(ocvPOut3);
    dest.add(ocvPOut4);
    Mat endM = Converters.vector_Point2f_to_Mat(dest);

    Mat perspectiveTransform = Imgproc.getPerspectiveTransform(startM, endM);

    Imgproc.warpPerspective(inputMat, outputMat, perspectiveTransform, new Size(resultWidth, resultHeight),
            Imgproc.INTER_CUBIC);

    return outputMat;
}

From source file:mineshcvit.opendocscanner.CropImage.java

License:Apache License

private void onSaveClicked() throws Exception {
    // TODO this code needs to change to use the decode/crop/encode single
    // step api so that we don't require that the whole (possibly large)
    // bitmap doesn't have to be read into memory
    if (mSaving)//from   w w w  . java2  s . c  o  m
        return;

    if (mCrop == null) {

        return;
    }

    mSaving = true;

    //   Rect r = mCrop.getCropRect();
    final float[] trapezoid = mCrop.getTrapezoid();
    Log.w("myApp", "onsaveclicekd, trap[0] is " + trapezoid[0]);
    Log.w("myApp", "onsaveclicekd, trap[1] is " + trapezoid[1]);

    Log.w("myApp", "onsaveclicekd, trap[2] is " + trapezoid[2]);

    Log.w("myApp", "onsaveclicekd, trap[3] is " + trapezoid[3]);

    Log.w("myApp", "onsaveclicekd, trap[4] is " + trapezoid[4]);

    Log.w("myApp", "onsaveclicekd, trap[5] is " + trapezoid[5]);

    Log.w("myApp", "onsaveclicekd, trap[6] is " + trapezoid[6]);
    Log.w("myApp", "onsaveclicekd, trap[7] is " + trapezoid[7]);

    /// refer this for perspective correction

    //minesh:
    //find the bounding rectangle of the quadilateral
    //new image to whiche perspective corrected matrix to be plotted will be made in this size
    final RectF perspectiveCorrectedBoundingRect = new RectF(mCrop.getPerspectiveCorrectedBoundingRect());

    //dimension of the new image
    int result_width = (int) perspectiveCorrectedBoundingRect.width();
    int result_height = (int) perspectiveCorrectedBoundingRect.height();

    Log.w("myApp", "bounding rect width is " + result_width);
    Log.w("myApp", "bounding rect height " + result_height);

    Mat inputMat = new Mat(mBitmap.getHeight(), mBitmap.getHeight(), CvType.CV_8UC4);
    Utils.bitmapToMat(mBitmap, inputMat);
    final Mat outputMat = new Mat(result_width, result_height, CvType.CV_8UC4);

    //the 4 points of the quad,
    Point ocvPIn1 = new Point((int) trapezoid[0], (int) trapezoid[1]);//left top
    Point ocvPIn2 = new Point((int) trapezoid[6], (int) trapezoid[7]);//left bottom
    Point ocvPIn3 = new Point((int) trapezoid[4], (int) trapezoid[5]); //bottom right
    Point ocvPIn4 = new Point((int) trapezoid[2], (int) trapezoid[3]);//right top

    List<Point> source = new ArrayList<Point>();
    source.add(ocvPIn1);
    source.add(ocvPIn2);
    source.add(ocvPIn3);
    source.add(ocvPIn4);

    Mat startM = Converters.vector_Point2f_to_Mat(source);

    //points in the destination imafge
    Point ocvPOut1 = new Point(0, 0);// lfet top
    Point ocvPOut2 = new Point(0, result_height);//left bottom
    Point ocvPOut3 = new Point(result_width, result_height); //bottom right
    Point ocvPOut4 = new Point(result_width, 0);//right top

    List<Point> dest = new ArrayList<Point>();
    dest.add(ocvPOut1);
    dest.add(ocvPOut2);
    dest.add(ocvPOut3);
    dest.add(ocvPOut4);
    Mat endM = Converters.vector_Point2f_to_Mat(dest);

    Mat perspectiveTransform = Imgproc.getPerspectiveTransform(startM, endM);
    Imgproc.warpPerspective(inputMat, outputMat, perspectiveTransform, new Size(result_width, result_height),
            Imgproc.INTER_CUBIC);

    //
    Imgcodecs.imwrite(mImagePath, outputMat);

    //     }
    //  }, mHandler);
    //  }

    Intent intent = new Intent();
    setResult(RESULT_OK, intent);
    finish();
}

From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.OpenCVUtil.java

License:Open Source License

private static Mat transformMatrix(double[] p1Src, double[] p2Src, double[] p3Src, double[] p4Src,
        double[] p1Dst, double[] p2Dst, double[] p3Dst, double[] p4Dst) {

    //source quad
    Point[] srcQuad = new Point[4];

    //destination quad corresponding with srcQuad
    Point[] dstQuad = new Point[4];

    srcQuad[0] = new Point(p1Src[0], p1Src[1]);
    srcQuad[1] = new Point(p2Src[0], p2Src[1]);
    srcQuad[2] = new Point(p3Src[0], p3Src[1]);
    srcQuad[3] = new Point(p4Src[0], p4Src[1]);

    dstQuad[0] = new Point(p1Dst[0], p1Dst[1]);
    dstQuad[1] = new Point(p2Dst[0], p2Dst[1]);
    dstQuad[2] = new Point(p3Dst[0], p3Dst[1]);
    dstQuad[3] = new Point(p4Dst[0], p4Dst[1]);

    //srcQuad and destQuad to MatOfPoint2f objects, needed in perspective transform
    MatOfPoint2f srcMat2f = new MatOfPoint2f(srcQuad);
    MatOfPoint2f dstMat2f = new MatOfPoint2f(dstQuad);

    //get a perspective transform matrix
    return Imgproc.getPerspectiveTransform(srcMat2f, dstMat2f);
}