Example usage for org.opencv.core Point Point

List of usage examples for org.opencv.core Point Point

Introduction

In this page you can find the example usage for org.opencv.core Point Point.

Prototype

public Point(double x, double y) 

Source Link

Usage

From source file:mineshcvit.opendocscanner.CropImage.java

License:Apache License

private void onSaveClicked() throws Exception {
    // TODO this code needs to change to use the decode/crop/encode single
    // step api so that we don't require that the whole (possibly large)
    // bitmap doesn't have to be read into memory
    if (mSaving)/*from w w w  .  j a va2  s  .c o  m*/
        return;

    if (mCrop == null) {

        return;
    }

    mSaving = true;

    //   Rect r = mCrop.getCropRect();
    final float[] trapezoid = mCrop.getTrapezoid();
    Log.w("myApp", "onsaveclicekd, trap[0] is " + trapezoid[0]);
    Log.w("myApp", "onsaveclicekd, trap[1] is " + trapezoid[1]);

    Log.w("myApp", "onsaveclicekd, trap[2] is " + trapezoid[2]);

    Log.w("myApp", "onsaveclicekd, trap[3] is " + trapezoid[3]);

    Log.w("myApp", "onsaveclicekd, trap[4] is " + trapezoid[4]);

    Log.w("myApp", "onsaveclicekd, trap[5] is " + trapezoid[5]);

    Log.w("myApp", "onsaveclicekd, trap[6] is " + trapezoid[6]);
    Log.w("myApp", "onsaveclicekd, trap[7] is " + trapezoid[7]);

    /// refer this for perspective correction

    //minesh:
    //find the bounding rectangle of the quadilateral
    //new image to whiche perspective corrected matrix to be plotted will be made in this size
    final RectF perspectiveCorrectedBoundingRect = new RectF(mCrop.getPerspectiveCorrectedBoundingRect());

    //dimension of the new image
    int result_width = (int) perspectiveCorrectedBoundingRect.width();
    int result_height = (int) perspectiveCorrectedBoundingRect.height();

    Log.w("myApp", "bounding rect width is " + result_width);
    Log.w("myApp", "bounding rect height " + result_height);

    Mat inputMat = new Mat(mBitmap.getHeight(), mBitmap.getHeight(), CvType.CV_8UC4);
    Utils.bitmapToMat(mBitmap, inputMat);
    final Mat outputMat = new Mat(result_width, result_height, CvType.CV_8UC4);

    //the 4 points of the quad,
    Point ocvPIn1 = new Point((int) trapezoid[0], (int) trapezoid[1]);//left top
    Point ocvPIn2 = new Point((int) trapezoid[6], (int) trapezoid[7]);//left bottom
    Point ocvPIn3 = new Point((int) trapezoid[4], (int) trapezoid[5]); //bottom right
    Point ocvPIn4 = new Point((int) trapezoid[2], (int) trapezoid[3]);//right top

    List<Point> source = new ArrayList<Point>();
    source.add(ocvPIn1);
    source.add(ocvPIn2);
    source.add(ocvPIn3);
    source.add(ocvPIn4);

    Mat startM = Converters.vector_Point2f_to_Mat(source);

    //points in the destination imafge
    Point ocvPOut1 = new Point(0, 0);// lfet top
    Point ocvPOut2 = new Point(0, result_height);//left bottom
    Point ocvPOut3 = new Point(result_width, result_height); //bottom right
    Point ocvPOut4 = new Point(result_width, 0);//right top

    List<Point> dest = new ArrayList<Point>();
    dest.add(ocvPOut1);
    dest.add(ocvPOut2);
    dest.add(ocvPOut3);
    dest.add(ocvPOut4);
    Mat endM = Converters.vector_Point2f_to_Mat(dest);

    Mat perspectiveTransform = Imgproc.getPerspectiveTransform(startM, endM);
    Imgproc.warpPerspective(inputMat, outputMat, perspectiveTransform, new Size(result_width, result_height),
            Imgproc.INTER_CUBIC);

    //
    Imgcodecs.imwrite(mImagePath, outputMat);

    //     }
    //  }, mHandler);
    //  }

    Intent intent = new Intent();
    setResult(RESULT_OK, intent);
    finish();
}

From source file:mineshcvit.opendocscanner.CropImage.java

License:Apache License

private void makeDefault() {

    // minesh: finding the largest rect in the given image

    //Mat grayImage= Imgcodecs.imread(IMAGE_PATH, Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE);

    //////////////////////
    /////////////w  w w.  j a v a 2s. com

    Mat imgSource = new Mat();

    Utils.bitmapToMat(mBitmap, imgSource);
    //  Utils.bitmapToMat(bmp32, imgMAT);

    Imgproc.cvtColor(imgSource, imgSource, Imgproc.COLOR_BGR2GRAY);

    //Mat imgSource = Imgcodecs.imread(mImagePath,Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE);
    Log.w("myApp", "image path from isnde makedefault() is " + mImagePath);

    int matwidth = imgSource.width();
    int matheight = imgSource.height();

    Log.w("myApp", "mat image width, from makedefault() is " + matwidth);
    Log.w("myApp", "mat image height from, makedefault() is " + matheight);

    Mat imageBin = new Mat();

    double threshold = Imgproc.threshold(imgSource, imageBin, 0, 255, Imgproc.THRESH_OTSU);
    Log.w("myApp", "otsu threshold is " + threshold);

    //for canny higher threshold is chosen as otsus threshold and lower threshold is half of the otsu threshold value
    Imgproc.Canny(imgSource.clone(), imgSource, threshold * 0.5, threshold);

    // Imgcodecs.imwrite(mImagePath, imgSource);

    // int canny_height=imgSource.height();
    //   int canny_width=imgSource.width();

    // Log.w("myApp", "canny image height is "+canny_height);

    Imgproc.GaussianBlur(imgSource, imgSource, new org.opencv.core.Size(3, 3), 3);
    // find the contours
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    //MatVector contours = new MatVector();

    Imgproc.findContours(imgSource, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);

    double maxArea = -1;
    MatOfPoint temp_contour = contours.get(0); // the largest is at the
    // index 0 for starting
    // point
    MatOfPoint2f approxCurve = new MatOfPoint2f();

    for (int idx = 0; idx < contours.size(); idx++) {
        temp_contour = contours.get(idx);
        double contourarea = Imgproc.contourArea(temp_contour);
        // compare this contour to the previous largest contour found
        if (contourarea > maxArea) {
            // check if this contour is a square
            MatOfPoint2f new_mat = new MatOfPoint2f(temp_contour.toArray());
            int contourSize = (int) temp_contour.total();
            MatOfPoint2f approxCurve_temp = new MatOfPoint2f();
            Imgproc.approxPolyDP(new_mat, approxCurve_temp, contourSize * 0.05, true);
            if (approxCurve_temp.total() == 4) {
                maxArea = contourarea;
                approxCurve = approxCurve_temp;
            }
        }
    }
    double[] temp_double;
    temp_double = approxCurve.get(0, 0);
    Point p1 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p1,55,new Scalar(0,0,255));
    // Imgproc.warpAffine(sourceImage, dummy, rotImage,sourceImage.size());
    temp_double = approxCurve.get(1, 0);
    Point p2 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p2,150,new Scalar(255,255,255));
    temp_double = approxCurve.get(2, 0);
    Point p3 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p3,200,new Scalar(255,0,0));
    temp_double = approxCurve.get(3, 0);
    Point p4 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p4,100,new Scalar(0,0,255));
    ArrayList<Point> source = new ArrayList<Point>();
    ArrayList<Point> topPoints = new ArrayList<Point>();
    ArrayList<Point> bottomPoints = new ArrayList<Point>();
    ArrayList<Point> sortedPoints = new ArrayList<Point>();

    source.add(p1);
    source.add(p2);
    source.add(p3);
    source.add(p4);

    Collections.sort(source, new Comparator<Point>() {

        public int compare(Point o1, Point o2) {
            return Double.compare(o1.y, o2.y);
        }
    });

    topPoints.add(source.get(0));
    topPoints.add(source.get(1));

    Collections.sort(topPoints, new Comparator<Point>() {

        public int compare(Point o1, Point o2) {
            return Double.compare(o1.x, o2.x);
        }
    });

    bottomPoints.add(source.get(2));
    bottomPoints.add(source.get(3));

    Collections.sort(bottomPoints, new Comparator<Point>() {

        public int compare(Point o1, Point o2) {
            return Double.compare(o1.x, o2.x);
        }
    });

    sortedPoints.add(topPoints.get(0));//top left
    sortedPoints.add(bottomPoints.get(0));//bottom left
    sortedPoints.add(bottomPoints.get(1));//bottom right
    sortedPoints.add(topPoints.get(1));//top right

    /*
    c++ code to sort the points
            
    void sortCorners(std::vector<cv::Point2f>& corners, cv::Point2f center)
    {
    std::vector<cv::Point2f> top, bot;
            
    for (int i = 0; i < corners.size(); i++)
    {
    if (corners[i].y < center.y)
    top.push_back(corners[i]);
    else
    bot.push_back(corners[i]);
    }
            
    cv::Point2f tl = top[0].x > top[1].x ? top[1] : top[0];
    cv::Point2f tr = top[0].x > top[1].x ? top[0] : top[1];
    cv::Point2f bl = bot[0].x > bot[1].x ? bot[1] : bot[0];
    cv::Point2f br = bot[0].x > bot[1].x ? bot[0] : bot[1];
            
    corners.clear();
    corners.push_back(tl);
    corners.push_back(tr);
    corners.push_back(br);
    corners.push_back(bl);
    }
            
    ...
            
    // Get mass center
    cv::Point2f center(0,0);
    for (int i = 0; i < corners.size(); i++)
    center += corners[i];
            
    center *= (1. / corners.size());
    sortCorners(corners, center);
            
            
            
     */

    //p1 t0 p4 are in the anti clock wise order starting from top left

    // double s=source.get(0).x;

    /////////////////
    /////////////////
    int width = mBitmap.getWidth();
    int height = mBitmap.getHeight();

    Log.w("myApp", "bitmap width is " + width);
    Log.w("myApp", "bitmap height is " + height);

    Rect imageRect = new Rect(0, 0, width, height);

    // make the default size about 4/5 of the width or height

    /*
            
            int cropWidth = Math.min(width, height) * 4 / 5;
            int cropHeight = cropWidth;
            
            
            int x = (width - cropWidth) / 2;
            int y = (height - cropHeight) / 2;
            
            RectF cropRect = new RectF(x, y, x + cropWidth, y + cropHeight);
            
    */
    /// To test the points order

    /*
    Point p1 = new Point(1.0*x,1.0*y );
    Point p2 = new Point(1.0*x+150.0,1.0*y+1.0*cropHeight);
            
    Point p3 = new Point(1.0*x+1.0*cropWidth,1.0*y+1.0*cropHeight);
            
    Point p4 = new Point(1.0*x+1.0*cropWidth,1.0*y);
            
    ArrayList<Point> source = new ArrayList<Point>();
    source.add(p1);
    source.add(p2);
    source.add(p3);
    source.add(p4);
            
    */
    ////////////////////////////

    Log.w("myApp",
            "from inside makedeafult inside cropimage calss, default crop rect values are set and now highlight view will be initiated ");

    HighlightView hv = new HighlightView(mImageView, imageRect, sortedPoints);

    Log.w("myApp", "higlight view initiated; done");

    mImageView.add(hv);
    Log.w("myApp", "add hv is done; done");

    mImageView.invalidate();
    mCrop = hv;

    Log.w("myApp", "mcrop=hv donee");
    mCrop.setFocus(true);
    ;
}

From source file:mineshcvit.opendocscanner.CroppingTrapezoid.java

License:Apache License

public Point getTopLeft() {
    return new Point((int) mPoints[0], (int) mPoints[1]);
}

From source file:mineshcvit.opendocscanner.CroppingTrapezoid.java

License:Apache License

public Point getTopRight() {
    return new Point((int) mPoints[2], (int) mPoints[3]);
}

From source file:mineshcvit.opendocscanner.CroppingTrapezoid.java

License:Apache License

public Point getBottomRight() {
    return new Point((int) mPoints[4], (int) mPoints[5]);
}

From source file:mineshcvit.opendocscanner.CroppingTrapezoid.java

License:Apache License

public Point getBottomLeft() {
    return new Point((int) mPoints[6], (int) mPoints[7]);
}

From source file:mvc_util.Util_Recorte.java

private void formMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_formMouseClicked
    if (isIrregular()) {
        Point p = new Point(evt.getPoint().x, evt.getPoint().y);
        pontos.add(p);//from w  w  w  . ja  v  a2  s  . c  o  m
    }
    if (isRegular()) {
        if (pontos.size() < 2) {
            Point p = new Point(evt.getPoint().x, evt.getPoint().y);
            pontos.add(p);
        }
    }
}

From source file:net.bsrc.cbod.opencv.OpenCV.java

/**
 * @param imgPath/*from   w  w  w  .j  a v  a2 s  .c o  m*/
 * @param box
 * @return
 */
public static Mat getImageMat(String imgPath, PascalBndBox box) {

    Mat result = null;
    Mat org = getImageMat(imgPath);

    Point[] arr = new Point[] { new Point(box.getXmin(), box.getYmin()),
            new Point(box.getXmin(), box.getYmax() - 1), new Point(box.getXmax() - 1, box.getYmin()),
            new Point(box.getXmax() - 1, box.getYmax() - 1) };
    try {
        Rect r = Imgproc.boundingRect(new MatOfPoint(arr));
        result = org.submat(r);

    } catch (CvException ex) {
        logger.error("", ex);
    }

    return result;
}

From source file:net.bsrc.cbod.opencv.OpenCV.java

public static void drawRect(Rect rect, Mat mat, Scalar scalar) {

    Point p1 = new Point(rect.x, rect.y);
    Point p2 = new Point(rect.x + rect.width, rect.y);
    Point p3 = new Point(rect.x + rect.width, rect.y + rect.height);
    Point p4 = new Point(rect.x, rect.y + rect.height);

    if (scalar == null)
        scalar = new Scalar(0, 255, 0);
    Core.line(mat, p1, p2, scalar, 2);//from  w  ww. ja v  a2s .co m
    Core.line(mat, p2, p3, scalar, 2);
    Core.line(mat, p3, p4, scalar, 2);
    Core.line(mat, p4, p1, scalar, 2);
}

From source file:net.bsrc.cbod.opencv.OpenCV.java

private static void drawComponentsToImage(List<CandidateComponent> candidateComponents,
        CandidateComponent pivot, ImageModel imageModel, String outputSuffix, double fuzzyResult,
        boolean writeFuzzyResult) {

    Mat copy = OpenCV.copyImage(imageModel.getMat());
    Scalar blue = new Scalar(255, 0, 0);
    Scalar green = new Scalar(0, 255, 0);
    Scalar red = new Scalar(0, 0, 255);
    Scalar yellow = new Scalar(0, 255, 255);
    if (candidateComponents != null) {
        for (CandidateComponent cc : candidateComponents) {
            Rect rect = cc.getRect();/*www .ja va2  s.  co m*/
            if (cc.getObjectType().equals(EObjectType.WHEEL)) {
                OpenCV.drawRect(rect, copy, red);
            }
            if (cc.getObjectType().equals(EObjectType.TAIL_LIGHT)) {
                OpenCV.drawRect(rect, copy, green);
            }
            if (cc.getObjectType().equals(EObjectType.LICENSE_PLATE)) {
                OpenCV.drawRect(rect, copy, blue);
            }
        }
    }
    if (pivot != null) {
        OpenCV.drawRect(pivot.getRect(), copy, yellow);
    }

    if (writeFuzzyResult) {
        DecimalFormat dFormat = new DecimalFormat("#.####");
        drawText(copy, new Point(5, 20), dFormat.format(fuzzyResult));
    }

    String outputImagePath = CBODUtil.getCbodTempDirectory().concat("/")
            .concat(imageModel.getRawImageName() + outputSuffix + "." + imageModel.getExtension());
    OpenCV.writeImage(copy, outputImagePath);

}