List of usage examples for org.opencv.imgproc Imgproc warpPerspective
public static void warpPerspective(Mat src, Mat dst, Mat M, Size dsize, int flags)
From source file:OCV_WarpPerspective.java
License:Open Source License
@Override public void run(ImageProcessor ip) { int imw = ip.getWidth(); int imh = ip.getHeight(); Size size = new Size((double) imw, (double) imh); Mat mat = new Mat(3, 3, CvType.CV_64FC1); for (int i = 0; i < 3; i++) { mat.put(i, 0, new double[] { Double.valueOf(rt.getStringValue(0, i).replaceAll("\"|'", "")) }); mat.put(i, 1, new double[] { Double.valueOf(rt.getStringValue(1, i).replaceAll("\"|'", "")) }); mat.put(i, 2, new double[] { Double.valueOf(rt.getStringValue(2, i).replaceAll("\"|'", "")) }); }//from ww w . j ava2 s . c o m if (ip.getBitDepth() == 8) { byte[] srcdst_ar = (byte[]) ip.getPixels(); Mat src_mat = new Mat(imh, imw, CvType.CV_8UC1); Mat dst_mat = new Mat(imh, imw, CvType.CV_8UC1); src_mat.put(0, 0, srcdst_ar); Imgproc.warpPerspective(src_mat, dst_mat, mat, size, FLAGS_INT[flags_ind]); dst_mat.get(0, 0, srcdst_ar); } else if (ip.getBitDepth() == 16) { short[] srcdst_ar = (short[]) ip.getPixels(); Mat src_mat = new Mat(imh, imw, CvType.CV_16UC1); Mat dst_mat = new Mat(imh, imw, CvType.CV_16UC1); src_mat.put(0, 0, srcdst_ar); Imgproc.warpPerspective(src_mat, dst_mat, mat, size, FLAGS_INT[flags_ind]); dst_mat.get(0, 0, srcdst_ar); } else if (ip.getBitDepth() == 24) { int[] srcdst_ar = (int[]) ip.getPixels(); Mat src_mat = new Mat(imh, imw, CvType.CV_8UC3); Mat dst_mat = new Mat(imh, imw, CvType.CV_8UC3); OCV__LoadLibrary.intarray2mat(srcdst_ar, src_mat, imw, imh); Imgproc.warpPerspective(src_mat, dst_mat, mat, size, FLAGS_INT[flags_ind]); OCV__LoadLibrary.mat2intarray(dst_mat, srcdst_ar, imw, imh); } else if (ip.getBitDepth() == 32) { float[] srcdst_ar = (float[]) ip.getPixels(); Mat src_mat = new Mat(imh, imw, CvType.CV_32FC1); Mat dst_mat = new Mat(imh, imw, CvType.CV_32FC1); src_mat.put(0, 0, srcdst_ar); Imgproc.warpPerspective(src_mat, dst_mat, mat, size, FLAGS_INT[flags_ind]); dst_mat.get(0, 0, srcdst_ar); } else { IJ.error("Wrong image format"); } }
From source file:com.github.mbillingr.correlationcheck.ImageProcessor.java
License:Open Source License
private Mat load_transformed() { Mat image = Imgcodecs.imread(mImageFile.getAbsolutePath()); Imgproc.warpPerspective(image, image, perspective_transform, new Size(work_width, work_height), Imgproc.INTER_CUBIC);/*from w ww . ja va2 s . c om*/ return image; }
From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java
License:Open Source License
private Mat warpPerspective(final Mat frame) { if (warpInitialized) { final Mat mat = new Mat(); Imgproc.warpPerspective(frame, mat, perspMat, frame.size(), Imgproc.INTER_LINEAR); return mat; } else {/* w w w .ja v a 2 s. c o m*/ logger.warn("warpPerspective called when warpInitialized is false - {} {} - {}", perspMat, boundingBox, isCalibrated); return frame; } }
From source file:com.superbool.easylpr.model.Transformation.java
public Mat crop(Size outputImageSize, Mat transformationMatrix) { Mat deskewed = new Mat(outputImageSize, this.bigImage.type()); // Apply perspective transformation to the image Imgproc.warpPerspective(this.bigImage, deskewed, transformationMatrix, deskewed.size(), Imgproc.INTER_CUBIC);// w w w. j a v a 2s. c om return deskewed; }
From source file:karthik.Barcode.CandidateMatrixBarcode.java
License:Open Source License
CandidateResult NormalizeCandidateRegion(double angle) { /* candidateRegion is the RotatedRect which contains a candidate region for the barcode // angle is the rotation angle or USE_ROTATED_RECT_ANGLE for this function to // estimate rotation angle from the rect parameter // returns Mat containing cropped area(region of interest) with just the barcode // The barcode region is from the *original* image, not the scaled image // the cropped area is also rotated as necessary to be horizontal or vertical rather than skewed // Some parts of this function are from http://felix.abecassis.me/2011/10/opencv-rotation-deskewing/ // and http://stackoverflow.com/questions/22041699/rotate-an-image-without-cropping-in-opencv-in-c *//* w w w . j a va 2 s . c o m*/ double rotation_angle; CandidateResult result = new CandidateResult(); // scale candidate region back up to original size to return cropped part from *original* image // need the 1.0 there to force floating-point arithmetic from int values double scale_factor = img_details.src_original.rows() / (1.0 * img_details.src_grayscale.rows()); // expand the region found - this helps capture the entire code including the border zone candidateRegion.size.width += 2 * params.RECT_WIDTH; candidateRegion.size.height += 2 * params.RECT_HEIGHT; // calculate location of rectangle in original image and its corner points RotatedRect scaledRegion = new RotatedRect(candidateRegion.center, candidateRegion.size, candidateRegion.angle); scaledRegion.center.x = scaledRegion.center.x * scale_factor; scaledRegion.center.y = scaledRegion.center.y * scale_factor; scaledRegion.size.height *= scale_factor; scaledRegion.size.width *= scale_factor; scaledRegion.points(img_details.scaledCorners); // lets get the coordinates of the ROI in the original image and save it result.ROI_coords = Arrays.copyOf(img_details.scaledCorners, 4); // get the bounding rectangle of the ROI by sorting its corner points // we do it manually because RotatedRect can generate corner points outside the Mat area Arrays.sort(img_details.scaledCorners, CandidateBarcode.get_x_comparator()); int leftCol = (int) img_details.scaledCorners[0].x; int rightCol = (int) img_details.scaledCorners[3].x; leftCol = (leftCol < 0) ? 0 : leftCol; rightCol = (rightCol > img_details.src_original.cols() - 1) ? img_details.src_original.cols() - 1 : rightCol; Arrays.sort(img_details.scaledCorners, CandidateBarcode.get_y_comparator()); int topRow = (int) img_details.scaledCorners[0].y; int bottomRow = (int) img_details.scaledCorners[3].y; topRow = (topRow < 0) ? 0 : topRow; bottomRow = (bottomRow > img_details.src_original.rows() - 1) ? img_details.src_original.rows() - 1 : bottomRow; Mat ROI_region = img_details.src_original.submat(topRow, bottomRow, leftCol, rightCol); // create a container that is a square with side = diagonal of ROI. // this is large enough to accommodate the ROI region with rotation without cropping it int orig_rows = bottomRow - topRow; int orig_cols = rightCol - leftCol; int diagonal = (int) Math.sqrt(orig_rows * orig_rows + orig_cols * orig_cols); int newWidth = diagonal + 1; int newHeight = diagonal + 1; int offsetX = (newWidth - orig_cols) / 2; int offsetY = (newHeight - orig_rows) / 2; Mat enlarged_ROI_container = new Mat(newWidth, newHeight, img_details.src_original.type()); enlarged_ROI_container.setTo(ZERO_SCALAR); // copy ROI to centre of container and rotate it ROI_region.copyTo(enlarged_ROI_container.rowRange(offsetY, offsetY + orig_rows).colRange(offsetX, offsetX + orig_cols)); Point enlarged_ROI_container_centre = new Point(enlarged_ROI_container.rows() / 2.0, enlarged_ROI_container.cols() / 2.0); Mat rotated = Mat.zeros(enlarged_ROI_container.size(), enlarged_ROI_container.type()); if (angle == Barcode.USE_ROTATED_RECT_ANGLE) rotation_angle = estimate_barcode_orientation(); else rotation_angle = angle; // perform the affine transformation img_details.rotation_matrix = Imgproc.getRotationMatrix2D(enlarged_ROI_container_centre, rotation_angle, 1.0); img_details.rotation_matrix.convertTo(img_details.rotation_matrix, CvType.CV_32F); // convert type so matrix multip. works properly img_details.newCornerCoord.setTo(ZERO_SCALAR); // convert scaledCorners to contain locations of corners in enlarged_ROI_container Mat img_details.scaledCorners[0] = new Point(offsetX, offsetY); img_details.scaledCorners[1] = new Point(offsetX, offsetY + orig_rows); img_details.scaledCorners[2] = new Point(offsetX + orig_cols, offsetY); img_details.scaledCorners[3] = new Point(offsetX + orig_cols, offsetY + orig_rows); // calculate the new location for each corner point of the rectangle ROI after rotation for (int r = 0; r < 4; r++) { img_details.coord.put(0, 0, img_details.scaledCorners[r].x); img_details.coord.put(1, 0, img_details.scaledCorners[r].y); Core.gemm(img_details.rotation_matrix, img_details.coord, 1, img_details.delta, 0, img_details.newCornerCoord); updatePoint(img_details.newCornerPoints.get(r), img_details.newCornerCoord.get(0, 0)[0], img_details.newCornerCoord.get(1, 0)[0]); } rotated.setTo(ZERO_SCALAR); Imgproc.warpAffine(enlarged_ROI_container, rotated, img_details.rotation_matrix, enlarged_ROI_container.size(), Imgproc.INTER_CUBIC); // sort rectangles points in order by first sorting all 4 points based on x // we then sort the first two based on y and then the next two based on y // this leaves the array in order top-left, bottom-left, top-right, bottom-right Collections.sort(img_details.newCornerPoints, CandidateBarcode.get_x_comparator()); Collections.sort(img_details.newCornerPoints.subList(0, 2), CandidateBarcode.get_y_comparator()); Collections.sort(img_details.newCornerPoints.subList(2, 4), CandidateBarcode.get_y_comparator()); // calc height and width of rectangular region double height = length(img_details.newCornerPoints.get(1), img_details.newCornerPoints.get(0)); double width = length(img_details.newCornerPoints.get(2), img_details.newCornerPoints.get(0)); // create destination points for warpPerspective to map to updatePoint(img_details.transformedPoints.get(0), 0, 0); updatePoint(img_details.transformedPoints.get(1), 0, height); updatePoint(img_details.transformedPoints.get(2), width, 0); updatePoint(img_details.transformedPoints.get(3), width, height); Mat perspectiveTransform = Imgproc.getPerspectiveTransform( Converters.vector_Point2f_to_Mat(img_details.newCornerPoints), Converters.vector_Point2f_to_Mat(img_details.transformedPoints)); Mat perspectiveOut = Mat.zeros((int) height + 2, (int) width + 2, CvType.CV_32F); Imgproc.warpPerspective(rotated, perspectiveOut, perspectiveTransform, perspectiveOut.size(), Imgproc.INTER_CUBIC); result.ROI = perspectiveOut; return result; }
From source file:karthiknr.TextID.ProcessAsyncActivity.java
License:Apache License
public Mat warpImage(Mat inputMat, Mat startM) { int resultWidth = 1000; int resultHeight = 1000; Mat outputMat = new Mat(resultWidth, resultHeight, CvType.CV_8UC4); Point ocvPOut1 = new Point(0, 0); Point ocvPOut2 = new Point(0, resultHeight); Point ocvPOut3 = new Point(resultWidth, resultHeight); Point ocvPOut4 = new Point(resultWidth, 0); List<Point> dest = new ArrayList<Point>(); dest.add(ocvPOut1);/*from w w w .j av a2 s . c o m*/ dest.add(ocvPOut2); dest.add(ocvPOut3); dest.add(ocvPOut4); Mat endM = Converters.vector_Point2f_to_Mat(dest); Mat perspectiveTransform = Imgproc.getPerspectiveTransform(startM, endM); Imgproc.warpPerspective(inputMat, outputMat, perspectiveTransform, new Size(resultWidth, resultHeight), Imgproc.INTER_CUBIC); return outputMat; }
From source file:mineshcvit.opendocscanner.CropImage.java
License:Apache License
private void onSaveClicked() throws Exception { // TODO this code needs to change to use the decode/crop/encode single // step api so that we don't require that the whole (possibly large) // bitmap doesn't have to be read into memory if (mSaving)/*from w w w .j a va 2 s . c o m*/ return; if (mCrop == null) { return; } mSaving = true; // Rect r = mCrop.getCropRect(); final float[] trapezoid = mCrop.getTrapezoid(); Log.w("myApp", "onsaveclicekd, trap[0] is " + trapezoid[0]); Log.w("myApp", "onsaveclicekd, trap[1] is " + trapezoid[1]); Log.w("myApp", "onsaveclicekd, trap[2] is " + trapezoid[2]); Log.w("myApp", "onsaveclicekd, trap[3] is " + trapezoid[3]); Log.w("myApp", "onsaveclicekd, trap[4] is " + trapezoid[4]); Log.w("myApp", "onsaveclicekd, trap[5] is " + trapezoid[5]); Log.w("myApp", "onsaveclicekd, trap[6] is " + trapezoid[6]); Log.w("myApp", "onsaveclicekd, trap[7] is " + trapezoid[7]); /// refer this for perspective correction //minesh: //find the bounding rectangle of the quadilateral //new image to whiche perspective corrected matrix to be plotted will be made in this size final RectF perspectiveCorrectedBoundingRect = new RectF(mCrop.getPerspectiveCorrectedBoundingRect()); //dimension of the new image int result_width = (int) perspectiveCorrectedBoundingRect.width(); int result_height = (int) perspectiveCorrectedBoundingRect.height(); Log.w("myApp", "bounding rect width is " + result_width); Log.w("myApp", "bounding rect height " + result_height); Mat inputMat = new Mat(mBitmap.getHeight(), mBitmap.getHeight(), CvType.CV_8UC4); Utils.bitmapToMat(mBitmap, inputMat); final Mat outputMat = new Mat(result_width, result_height, CvType.CV_8UC4); //the 4 points of the quad, Point ocvPIn1 = new Point((int) trapezoid[0], (int) trapezoid[1]);//left top Point ocvPIn2 = new Point((int) trapezoid[6], (int) trapezoid[7]);//left bottom Point ocvPIn3 = new Point((int) trapezoid[4], (int) trapezoid[5]); //bottom right Point ocvPIn4 = new Point((int) trapezoid[2], (int) trapezoid[3]);//right top List<Point> source = new ArrayList<Point>(); source.add(ocvPIn1); source.add(ocvPIn2); source.add(ocvPIn3); source.add(ocvPIn4); Mat startM = Converters.vector_Point2f_to_Mat(source); //points in the destination imafge Point ocvPOut1 = new Point(0, 0);// lfet top Point ocvPOut2 = new Point(0, result_height);//left bottom Point ocvPOut3 = new Point(result_width, result_height); //bottom right Point ocvPOut4 = new Point(result_width, 0);//right top List<Point> dest = new ArrayList<Point>(); dest.add(ocvPOut1); dest.add(ocvPOut2); dest.add(ocvPOut3); dest.add(ocvPOut4); Mat endM = Converters.vector_Point2f_to_Mat(dest); Mat perspectiveTransform = Imgproc.getPerspectiveTransform(startM, endM); Imgproc.warpPerspective(inputMat, outputMat, perspectiveTransform, new Size(result_width, result_height), Imgproc.INTER_CUBIC); // Imgcodecs.imwrite(mImagePath, outputMat); // } // }, mHandler); // } Intent intent = new Intent(); setResult(RESULT_OK, intent); finish(); }