Example usage for org.opencv.core Point Point

List of usage examples for org.opencv.core Point Point

Introduction

In this page you can find the example usage for org.opencv.core Point Point.

Prototype

public Point(double x, double y) 

Source Link

Usage

From source file:hu.unideb.fksz.view.TrafficCounterController.java

License:Open Source License

/**
 * Initializes some elements of the user interface.
 *///from ww w.  java 2s .  co  m
private void init() {
    try {
        this.imageView
                .setImage(new Image(Main.class.getClass().getResource("/image/load_video.jpg").toString()));
        TrafficCounterLogger.traceMessage("Initial picture loaded successfully!");
    } catch (Exception e) {
        TrafficCounterLogger.errorMessage("Initial picture failed to load!");
    }

    this.listViewForFileNames.getSelectionModel().setSelectionMode(SelectionMode.SINGLE);

    this.saveImageButton.disableProperty().set(true);
    this.startButton.disableProperty().set(true);

    EventHandler<InputEvent> eventHandler = event -> {

        if (event.getEventType().equals(KeyEvent.KEY_PRESSED)) {
            KeyEvent keyevent = (KeyEvent) event.clone();
            if (keyevent.getCode().equals(KeyCode.ESCAPE)) {
                timer.cancel();
                videoProcessor.getVideoCap().release();
                ((Node) (event.getSource())).getScene().getWindow().hide();
                System.exit(0);
            }
        }
    };

    root.setOnKeyPressed(eventHandler);

    imageView.setOnMousePressed(event -> {

        mousePosition.x = event.getX();
        mousePosition.y = event.getY();

        if (mousePosition.inside(videoProcessor.getImageArea())) {
            videoProcessor.setPreviousControlPointsHeight((int) videoProcessor.getHeightOfAControlPoint());
        }

    });

    imageView.setOnMouseDragged(event -> {

        if (mousePosition.inside(videoProcessor.getImageArea())) {
            Point relativeMousePosition = new Point(mousePosition.x - event.getX(),
                    mousePosition.y - event.getY());
            videoProcessor.setHeightOfTheControlPoints(
                    videoProcessor.getPreviousControlPointsHeight() - relativeMousePosition.y);
        }

        if (videoProcessor.getHeightOfAControlPoint() < 100) {
            videoProcessor.setHeightOfTheControlPoints(100);
        }

        if (videoProcessor.getHeightOfAControlPoint() > videoProcessor.getImageArea().height - 100) {
            videoProcessor.setHeightOfTheControlPoints(videoProcessor.getImageArea().height - 100);
        }

    });
    setTrafficCount(0);
    TrafficCounterLogger.infoMessage("TrafficCounterController initialized!");
    hideControls();
}

From source file:image.utils.Tagger.java

public static void main(String[] args) throws IOException, FileNotFoundException {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    File result;//ww  w  . j av a  2  s  . c o m
    File image_dir = new File(args[1]);
    File result_dir = new File(args[0]);
    File img_output_dir = new File(args[2]);
    for (String s : result_dir.list()) {
        result = new File(result_dir.getAbsolutePath() + "/" + s);
        BufferedReader br = null;
        String line;
        br = new BufferedReader(new FileReader(result));

        while ((line = br.readLine()) != null) {
            String[] parts = line.split("\t");
            JSONArray a = (JSONArray) JSONValue.parse(parts[1]);
            Mat m = Imgcodecs.imread(parts[0]);
            for (int i = 0; i < a.size(); i++) {
                long x, y, w, h;
                JSONObject o = (JSONObject) a.get(i);
                x = (long) o.get("x");
                y = (long) o.get("y");
                w = (long) o.get("width");
                h = (long) o.get("height");
                System.out.println(String.format("%s %d %d %d %d", parts[0], x, y, w, h));
                Rect r = new Rect((int) x, (int) y, (int) w, (int) h);
                Imgproc.rectangle(m, new Point(r.x, r.y), new Point(r.x + r.width, r.y + r.height),
                        new Scalar(255, 0, 0), 2);
                Imgcodecs.imwrite(img_output_dir + "/"
                        + new String(Base64.getEncoder().encode(parts[0].getBytes())) + ".jpg", m);
            }
        }
    }
}

From source file:imageprocess.FaceDetector.java

public void run() {
    System.out.println("\nRunning DetectFaceDemo");

    // Create a face detector from the cascade file in the resources
    // directory.
    CascadeClassifier faceDetector = new CascadeClassifier("D:\\backup\\lbpcascade_frontalface.xml");
    Mat image = Highgui.imread("D:\\backup\\scarlett-800x600.jpg");

    // Detect faces in the image.
    // MatOfRect is a special container class for Rect.
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);

    System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));

    // Draw a bounding box around each face.
    for (Rect rect : faceDetections.toArray()) {
        Core.rectangle(image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height),
                new Scalar(0, 255, 0));
    }/* w ww  . j  ava  2  s . c o m*/

    // Save the visualized detection.
    String filename = "D:\\backup\\faceDetection.png";
    System.out.println(String.format("Writing %s", filename));
    Highgui.imwrite(filename, image);
}

From source file:imageprocess.HistogramProcessor.java

public static Mat getHistogramImage(Mat image) {

    // Compute histogram first
    Mat hist = getGrayHistogram(image);//from  w ww.  j  a  v  a  2  s.com
    // Get min and max bin values

    MinMaxLocResult locPeak = Core.minMaxLoc(hist);
    double maxVal = locPeak.maxVal;
    double minVal = locPeak.minVal;

    // Image on which to display histogram
    Mat histImg = new Mat(image.rows(), image.rows(), CV_8U, new Scalar(255));

    // set highest point at 90% of nbins
    int hpt = (int) (0.9 * 256);

    // Draw vertical line for each bin 
    for (int h = 0; h < 256; h++) {

        double[] f = hist.get(h, 0);
        float binVal = (float) f[0];
        int intensity = (int) (binVal * hpt / maxVal);
        Core.line(histImg, new Point(h, 256.0d), new Point(h, 256.0d - intensity), Scalar.all(0));
    }
    return histImg;
}

From source file:imageprocess.ObjectFinder.java

public static void main(String[] args) {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    Mat image = Highgui.imread("D:\\backup\\opencv\\baboon1.jpg");
    // Define ROI
    Rect rect = new Rect(110, 260, 35, 40);
    Mat imageROI = new Mat(image, rect);
    Core.rectangle(image, new Point(110, 260), new Point(145, 300), new Scalar(0, 0, 255));

    Imshow origIm = new Imshow("Origin");
    origIm.showImage(image);/*from  w  w w . ja va2s . c o  m*/

    ObjectFinder finder = new ObjectFinder(false, 0.2f);

    // Get the Hue histogram
    int minSat = 65;
    Mat hist = finder.getHueHistogram(imageROI, minSat);
    Mat norm = new Mat();
    Core.normalize(hist, norm, 1, 0, NORM_L2);

    finder.setROIHistogram(norm);

    // Convert to HSV space
    Mat hsv = new Mat();
    Imgproc.cvtColor(image, hsv, CV_BGR2HSV);
    // Split the image
    List<Mat> v = new ArrayList<>();
    Core.split(hsv, v);

    // Eliminate pixels with low saturation
    Imgproc.threshold(v.get(1), v.get(1), minSat, 255, THRESH_BINARY);
    Imshow satIm = new Imshow("Saturation");
    satIm.showImage(v.get(1));
    // Get back-projection of hue histogram
    Mat result = finder.find(hsv, new MatOfInt(0), new MatOfFloat(0.0f, 180.0f));

    Imshow resultHueIm = new Imshow("Result Hue");
    resultHueIm.showImage(result);

    Core.bitwise_and(result, v.get(1), result);
    Imshow resultHueAndIm = new Imshow("Result Hue and raw");
    resultHueAndIm.showImage(result);

    // Second image
    Mat image2 = Highgui.imread("D:\\backup\\opencv\\baboon3.jpg");

    // Display image
    Imshow img2Im = new Imshow("Imgage2");
    img2Im.showImage(image2);

    // Convert to HSV space
    Imgproc.cvtColor(image2, hsv, CV_BGR2HSV);

    // Split the image
    Core.split(hsv, v);

    // Eliminate pixels with low saturation
    Imgproc.threshold(v.get(1), v.get(1), minSat, 255, THRESH_BINARY);
    Imshow satIm2 = new Imshow("Saturation2");
    satIm2.showImage(v.get(1));

    // Get back-projection of hue histogram
    finder.setThreshold(-1.0f);
    result = finder.find(hsv, new MatOfInt(0), new MatOfFloat(0.0f, 180.0f));

    Imshow resultHueIm2 = new Imshow("Result Hue2");
    resultHueIm2.showImage(result);

    Core.bitwise_and(result, v.get(1), result);
    Imshow resultHueAndIm2 = new Imshow("Result Hue and raw2");
    resultHueAndIm2.showImage(result);

    Rect rect2 = new Rect(110, 260, 35, 40);
    Core.rectangle(image2, new Point(110, 260), new Point(145, 300), new Scalar(0, 0, 255));

    TermCriteria criteria = new TermCriteria(TermCriteria.MAX_ITER | TermCriteria.EPS, 100, 0.01);
    int steps = Video.meanShift(result, rect2, criteria);

    Core.rectangle(image2, new Point(rect2.x, rect2.y),
            new Point(rect2.x + rect2.width, rect2.y + rect2.height), new Scalar(0, 255, 0));

    Imshow meanshiftIm = new Imshow("Meanshift result");
    meanshiftIm.showImage(image2);

}

From source file:info.jmfavreau.bifrostcore.imageprocessing.ImageToColor.java

License:Open Source License

private Mat compute_roi(Mat original) {
    Mat roi = new Mat();
    Imgproc.cvtColor(original, roi, Imgproc.COLOR_BGR2GRAY, 0);
    roi.setTo(new Scalar(0, 0, 0));
    int x = original.width();
    int y = original.height();
    int cx = x / 2;
    int cy = y / 2;
    int r = Math.min(cx, cy) * 2 / 3;
    Core.circle(roi, new Point(cx, cy), r, new Scalar(255, 255, 255), -1, 8, 0);
    return roi;//from w  ww.j  ava 2  s .c  om
}

From source file:io.github.jakejmattson.facialrecognition.FacialRecognition.java

License:Open Source License

private static Mat detectFaces(Mat image, CascadeClassifier faceDetector, ImageFrame frame) {
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);
    Rect[] faces = faceDetections.toArray();
    boolean shouldSave = frame.shouldSave();
    String name = frame.getFileName();
    Scalar color = frame.getTextColor();

    for (Rect face : faces) {
        Mat croppedImage = new Mat(image, face);

        if (shouldSave)
            saveImage(croppedImage, name);

        Imgproc.putText(image, "ID: " + identifyFace(croppedImage), face.tl(), Font.BOLD, 1.5, color);
        Imgproc.rectangle(image, face.tl(), face.br(), color);
    }/*from ww  w .  ja va  2  s . c  o  m*/

    int faceCount = faces.length;
    String message = faceCount + (faceCount == 1 ? "face" : "faces") + " detected!";
    Imgproc.putText(image, message, new Point(3, 25), Font.BOLD, 2, color);

    return image;
}

From source file:it.baywaylabs.jumpersumo.FrameDisplayCV.java

License:Open Source License

/**
 * This method find a qr-code in the view cam and execute some control.
 *
 * @throws ChecksumException//from  w w  w .j  a v a  2s .  c  o  m
 * @throws FormatException
 */
private void zxing() throws ChecksumException, FormatException {

    int[] intArray = new int[bitmapOriginal.getWidth() * bitmapOriginal.getHeight()];
    //copy pixel data from the Bitmap into the 'intArray' array
    bitmapOriginal.getPixels(intArray, 0, bitmapOriginal.getWidth(), 0, 0, bitmapOriginal.getWidth(),
            bitmapOriginal.getHeight());

    LuminanceSource source = new RGBLuminanceSource(bitmapOriginal.getWidth(), bitmapOriginal.getHeight(),
            intArray);

    BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
    Reader reader = new QRCodeMultiReader();

    String sResult = "";
    Double AREA_RIFERIMENTO = 11500.0;

    try {

        Result result = reader.decode(bitmap);
        sResult = result.getText();
        if (result.getBarcodeFormat().compareTo(BarcodeFormat.QR_CODE) == 0) {

            Log.d(TAG, "SI! E' Un QRCode");
            if ("jump".equalsIgnoreCase(sResult) && this.deviceController != null && this.flag_execute_qrcode) {
                deviceController.getFeatureJumpingSumo().sendAnimationsJump(
                        ARCOMMANDS_JUMPINGSUMO_ANIMATIONS_JUMP_TYPE_ENUM.ARCOMMANDS_JUMPINGSUMO_ANIMATIONS_JUMP_TYPE_HIGH);
            }
        }

        ResultPoint[] points = result.getResultPoints();
        Log.d(TAG, "PUNTI: " + points.toString());

        Point a = new Point(points[0].getX(), points[0].getY());
        Point b = new Point(points[2].getX(), points[2].getY());
        Rect rect = new Rect(a, b);

        Log.d(TAG, "Area del rettangolo: " + rect.area());
        if (rect.area() < AREA_RIFERIMENTO)
            Log.w(TAG, "Mi devo avvicinare!");
        else
            Log.w(TAG, "Mi devo allontanare!");

        Imgproc.rectangle(this.imgMAT, new Point(points[0].getX(), points[0].getY()),
                new Point(points[2].getX(), points[2].getY()), new Scalar(0, 255, 0), 3);
        Log.d(TAG, sResult);
        Point center = new Point(0, 0);

        Imgproc.circle(this.imgMAT, center, 10, new Scalar(0, 0, 255), 2);
    } catch (Resources.NotFoundException e) {
        Log.e(TAG, "Code Not Found");
        e.printStackTrace();
    } catch (NotFoundException e) {
        e.printStackTrace();
    }

}

From source file:it.baywaylabs.jumpersumo.MainActivity.java

License:Open Source License

public void zxing(Mat mRgba) throws ChecksumException, FormatException {

    Bitmap bMap = Bitmap.createBitmap(mRgba.width(), mRgba.height(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(mRgba, bMap);//from w w w.  j a v  a 2  s  .c  o m
    int[] intArray = new int[bMap.getWidth() * bMap.getHeight()];
    //copy pixel data from the Bitmap into the 'intArray' array
    bMap.getPixels(intArray, 0, bMap.getWidth(), 0, 0, bMap.getWidth(), bMap.getHeight());

    LuminanceSource source = new RGBLuminanceSource(bMap.getWidth(), bMap.getHeight(), intArray);

    BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
    Reader reader = new QRCodeMultiReader();

    String sResult = "";
    Double AREA_RIFERIMENTO = 11500.0;

    try {

        Result result = reader.decode(bitmap);
        sResult = result.getText();
        if (result.getBarcodeFormat().compareTo(BarcodeFormat.QR_CODE) == 0)
            Log.d(TAG, "SI! E' Un QRCode");
        ResultPoint[] points = result.getResultPoints();
        Log.d(TAG, "PUNTI: " + points.toString());
        //for (ResultPoint point : result.getResultPoints()) {
        Point a = new Point(points[0].getX(), points[0].getY());
        Point b = new Point(points[2].getX(), points[2].getY());
        Rect rect = new Rect(a, b);
        Log.d(TAG, "Area del rettangolo: " + rect.area());
        if (rect.area() < AREA_RIFERIMENTO)
            Log.w(TAG, "Mi devo avvicinare!");
        else
            Log.w(TAG, "Mi devo allontanare!");
        Imgproc.rectangle(this.mRgba, new Point(points[0].getX(), points[0].getY()),
                new Point(points[2].getX(), points[2].getY()), new Scalar(0, 255, 0), 3);
        Log.d(TAG, sResult);
        Point center = new Point(0, 0);

        Imgproc.circle(this.mRgba, center, 10, new Scalar(0, 0, 255), 2);
        //if (!"".equals(sResult))
        //Toast.makeText(MainActivity.this, "QRCode Scanned: " + sResult, Toast.LENGTH_LONG).show();
    } catch (Resources.NotFoundException e) {
        Log.e(TAG, "Code Not Found");
        e.printStackTrace();
    } catch (NotFoundException e) {
        e.printStackTrace();
    }

}

From source file:javaapplication1.Ocv.java

/**
 * Find faces in an image.//from   w  w w . j a  v a 2  s.  c  o  m
 *
 * @param filter Path to the xml face finding filter to use
 * @param input Path to the input image file
 * @param output Path to the output image file
 */
public void findFaces(String filter, String input, String output) {
    // load the filter and create a classifier with it
    File f = new File(filter);

    /*
    final CascadeClassifier faceDetector
        = new CascadeClassifier(f.getAbsolutePath());
    */
    CascadeClassifier faceDetector = new CascadeClassifier(this.filter);
    System.out.println("This.filter " + this.filter);

    // load the image and read it into a matrix
    File f2 = new File(input);
    //final Mat image = Highgui.imread(f2.getAbsolutePath());

    /*
    final Mat image = Highgui.imread(getClass().getResource(
        "./AverageMaleFace.jpg").getPath());
    */

    Mat image = Highgui.imread(this.input);
    System.out.println("This.input " + this.input);

    // run a face detector on the image
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);
    // inform about faces detected, and then outline each of them
    System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));
    // Draw rectangles on the image wherever we found faces
    for (Rect rect : faceDetections.toArray()) {
        Core.rectangle(image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height),
                new Scalar(0, 255, 0));
    }

    // save the image
    String filename = this.output;
    System.out.println(String.format("Writing %s", filename));
    Highgui.imwrite(filename, image);

}