Example usage for org.opencv.core Point Point

List of usage examples for org.opencv.core Point Point

Introduction

In this page you can find the example usage for org.opencv.core Point Point.

Prototype

public Point(double x, double y) 

Source Link

Usage

From source file:com.imgprocessor.processor.ImageProcessorImpl.java

@Override
public void process() throws ValidatingException, TruncatingException, ProcessingException {

    // here, order kindof matters
    //// loads the opencv 249 library for features2d
    System.loadLibrary("opencv_java249");
    // loads the opencv 310 library for fillConvexPoly and all the others
    System.loadLibrary("opencv_java310");

    imageRepresentation = new Representation();

    // run template detection && line detection in another thread
    new Thread() {

        public void run() {

            // object detection
            if (!DETECT_ONLY_WALLS) {
                DetectObject objectDetector = new DetectObject(ImageFile.getAbsolutePath(), thisReff);
                objectDetector.detectAllObject();
            }/*from   w ww  . j a  v a  2  s .co m*/

            // line detection
            HoughLineDetection houghLineDetection = new HoughLineDetection(DetectObject.TEMPLATE_OUTPUT_PATH,
                    thisReff);
            List<Line> detectedWalls = houghLineDetection.detectLines();
            imageRepresentation.populateWalls(detectedWalls);

            thisReff.appendDetail("Walls detected: " + detectedWalls.size());
            int k = 1;
            for (Line line : detectedWalls) {
                thisReff.appendDetail(k + ". (" + (int) line.x1 + ", " + (int) line.y1 + ") --> " + "("
                        + (int) line.x2 + ", " + (int) line.y2 + ")");
                k++;
            }

            // till here, detected walls nice, united them

            //here, transform the doors & windows too.
            List<Line> theDoors = new ArrayList<>();
            for (Door door : imageRepresentation.getDoors()) {
                Coordinates start = door.getStart();
                Coordinates end = door.getEnd();
                Point s = new Point(start.getX(), start.getY());
                Point e = new Point(end.getX(), end.getY());
                Line lDoor = new Line(s, e);
                lDoor.type = line_type.DOOR;
                theDoors.add(lDoor);
            }

            Mat blackMatrix = Imgcodecs.imread(LineProcessor.drawID + ". lineDetection.png");
            theDoors = LineProcessor.uniteObjectsWithWalls(blackMatrix, detectedWalls, theDoors, 50, true);
            LineProcessor.drawLines(theDoors, blackMatrix, new Scalar(255, 0, 255), 2, false, true, thisReff);

            // try uniting them

            List<Line> theWindows = new ArrayList<>();
            for (Window window : imageRepresentation.getWindows()) {

                Coordinates start = window.getStart();
                Coordinates end = window.getEnd();
                Point s = new Point(start.getX(), start.getY());
                Point e = new Point(end.getX(), end.getY());
                Line lWindow = new Line(s, e);
                lWindow.type = line_type.WINDOW;
                theWindows.add(lWindow);
            }

            blackMatrix = Imgcodecs.imread(LineProcessor.drawID + ". lineDetection.png");
            theWindows = LineProcessor.uniteObjectsWithWalls(blackMatrix, detectedWalls, theWindows, 50, true);
            LineProcessor.drawLines(theWindows, blackMatrix, new Scalar(255, 0, 0), 2, false, true, thisReff);

            // now all good, time to put them back (convert, fucking convert)
            // 1. The DOORS
            imageRepresentation.clearDoors();
            thisReff.appendDetail("The new DOORS coordinates (fixed to the walls): ");
            k = 1;
            for (Line door : theDoors) {

                Coordinates start = new Coordinates((float) door.getStartingPoint().x,
                        (float) door.getStartingPoint().y);
                Coordinates end = new Coordinates((float) door.getEndingPoint().x,
                        (float) door.getEndingPoint().y);
                Door theDoor = new Door(start, end);
                imageRepresentation.addDoor(theDoor);
                thisReff.appendDetail(k + ". (" + (int) start.getX() + ", " + (int) start.getY() + ") -> ("
                        + (int) end.getX() + ", " + (int) end.getY() + ")");
                k++;
            }

            // 2. The WINDOWS
            imageRepresentation.clearWindows();
            thisReff.appendDetail("The new WINDOWS coordinates (fixed to the walls): ");
            k = 1;
            for (Line window : theWindows) {

                Coordinates start = new Coordinates((float) window.getStartingPoint().x,
                        (float) window.getStartingPoint().y);
                Coordinates end = new Coordinates((float) window.getEndingPoint().x,
                        (float) window.getEndingPoint().y);
                Window theWindow = new Window(start, end);
                imageRepresentation.addWindow(theWindow);
                thisReff.appendDetail(k + ". (" + (int) start.getX() + ", " + (int) start.getY() + ") -> ("
                        + (int) end.getX() + ", " + (int) end.getY() + ")");
                k++;
            }

            //xml encode
            thisReff.setProgress(0);
            thisReff.appendDetail("Serializing the representation into 'Representation.xml'...");
            try {

                SAVED_XML_PATH = "Representation.xml";
                XMLEncoder myEncoder = new XMLEncoder(new FileOutputStream(SAVED_XML_PATH));
                myEncoder.writeObject(imageRepresentation);
                myEncoder.flush();
                myEncoder.close();

                thisReff.setProgress(100);
                thisReff.appendDetail("Finished serialization.");

                // RUN THE Graph Module Algorithm
                runGraphALgorithm();

            } catch (FileNotFoundException e) {
                thisReff.appendDetail("FAILED!");
                e.printStackTrace();
            }
        };

    }.start();

}

From source file:com.jeremydyer.nifi.ObjectDetectionProcessor.java

License:Apache License

final public Mat detectObjects(final ProcessSession session, FlowFile original, final JSONObject dd,
        final Mat image) {

    CascadeClassifier objectDetector = new CascadeClassifier(dd.getString("opencv_xml_cascade_path"));
    MatOfRect objectDetections = new MatOfRect();
    objectDetector.detectMultiScale(image, objectDetections);
    //getLogger().error("Detected " + objectDetections.toArray().length + " " + dd.getString("name") + " objects in the input flowfile");

    final AtomicReference<Mat> croppedImageReference = new AtomicReference<>();

    int counter = 0;
    for (int i = 0; i < objectDetections.toArray().length; i++) {
        final Rect rect = objectDetections.toArray()[i];
        FlowFile detection = session.write(session.create(original), new OutputStreamCallback() {
            @Override/*from  w w w  . ja  va 2s.  c  om*/
            public void process(OutputStream outputStream) throws IOException {

                Mat croppedImage = null;

                //Should the image be cropped? If so there is no need to draw bounds because that would be the same as the cropping
                if (dd.getBoolean("crop")) {
                    Rect rectCrop = new Rect(rect.x, rect.y, rect.width, rect.height);
                    croppedImage = new Mat(image, rectCrop);
                    MatOfByte updatedImage = new MatOfByte();
                    Imgcodecs.imencode(".jpg", croppedImage, updatedImage);
                    croppedImageReference.set(croppedImage);
                    outputStream.write(updatedImage.toArray());
                } else {
                    //Should the image have a border drawn around it?
                    if (dd.getBoolean("drawBounds")) {
                        Mat imageWithBorder = image.clone();
                        Imgproc.rectangle(imageWithBorder, new Point(rect.x, rect.y),
                                new Point(rect.x + rect.width, rect.y + rect.height),
                                new Scalar(255, 255, 255));
                        MatOfByte updatedImage = new MatOfByte();
                        Imgcodecs.imencode(".jpg", imageWithBorder, updatedImage);
                        outputStream.write(updatedImage.toArray());
                    } else {
                        MatOfByte updatedImage = new MatOfByte();
                        Imgcodecs.imencode(".jpg", image, updatedImage);
                        outputStream.write(updatedImage.toArray());
                    }
                }

            }
        });

        Map<String, String> atts = new HashMap<>();
        atts.put("object.detection.name", dd.getString("name"));
        atts.put("object.detection.id", new Long(System.currentTimeMillis() + counter).toString());

        counter++;

        detection = session.putAllAttributes(detection, atts);
        session.transfer(detection, REL_OBJECT_DETECTED);
    }

    Mat childResponse = null;

    if (croppedImageReference.get() != null) {
        childResponse = croppedImageReference.get();
    } else {
        childResponse = image;
    }

    if (dd.has("children")) {
        JSONArray children = dd.getJSONArray("children");
        if (children != null) {

            for (int i = 0; i < children.length(); i++) {
                JSONObject ddd = children.getJSONObject(i);
                childResponse = detectObjects(session, original, ddd, childResponse);
            }
        }
    }

    return childResponse;
}

From source file:com.jonwohl.Attention.java

License:Open Source License

private Mat getPerspectiveTransformation(ArrayList<PVector> inputPoints, int w, int h) {
    Point[] canonicalPoints = new Point[4];
    canonicalPoints[0] = new Point(0, 0);
    canonicalPoints[1] = new Point(w, 0);
    canonicalPoints[2] = new Point(w, h);
    canonicalPoints[3] = new Point(0, h);

    MatOfPoint2f canonicalMarker = new MatOfPoint2f();
    canonicalMarker.fromArray(canonicalPoints);

    Point[] points = new Point[4];
    for (int i = 0; i < 4; i++) {
        points[i] = new Point(inputPoints.get(i).x, inputPoints.get(i).y);
    }//  ww w .j ava 2  s . c  o m
    MatOfPoint2f marker = new MatOfPoint2f(points);
    return Imgproc.getPerspectiveTransform(marker, canonicalMarker);
}

From source file:com.joravasal.keyface.CameraAccessView.java

License:Open Source License

public Mat correctCameraImage(Mat image) {
    //Log.i(tag, "Correcting image rotation");
    //Check rotation of device
    int rotation = ((KeyFaceActivity) this.getContext()).getWindowManager().getDefaultDisplay().getRotation();
    switch (rotation) {
    case Surface.ROTATION_0:
        int degrees = 90;
        //Mirror (y axis) if front camera and rotation in any case
        Mat imageResult = new Mat();
        //For some reason to rotate the image properly, we have to set the center like this
        Point center = new Point(image.width() / 2, image.width() / 2);
        Mat transform = Imgproc.getRotationMatrix2D(center, degrees, 1.0);
        try {//from  w w  w .  j a  v a2 s. co  m
            Imgproc.warpAffine(image, imageResult, transform, new Size(image.height(), image.width()));
        } catch (CvException e) {
            System.err.println(e.getMessage());
        }
        if (KeyFaceActivity.cameraRearActive)
            Core.flip(imageResult, imageResult, -1);
        else
            Core.flip(imageResult, imageResult, 1);
        return imageResult;
    case Surface.ROTATION_90:
        //Mirror on y axis if front camera
        if (!KeyFaceActivity.cameraRearActive)
            Core.flip(image, image, 1);
        break;
    case Surface.ROTATION_180:
        //Never gets here but just in case:
        break;
    case Surface.ROTATION_270:
        //Mirror on the x axis if rear camera, both axis if front camera
        if (KeyFaceActivity.cameraRearActive)
            Core.flip(image, image, -1);
        else
            Core.flip(image, image, 0);
        break;
    default:
        break;
    }

    return image;
}

From source file:com.joravasal.keyface.FindFacesView.java

License:Open Source License

@Override
protected Bitmap processFrame(VideoCapture camera) {
    //Log.i(tag,"Processing frame for our delight");

    Mat mRgbaAux = new Mat();
    Mat mGrayAux = new Mat();
    camera.retrieve(mRgbaAux, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
    camera.retrieve(mGrayAux, Highgui.CV_CAP_ANDROID_GREY_FRAME);
    //Correct the direction of the image
    mRgba = correctCameraImage(mRgbaAux);
    mGray = correctCameraImage(mGrayAux);

    AlgorithmReturnValue resExample = null;
    //We look for faces in the captured images
    if (cascade != null) {
        int faceSize = Math.round(mGray.rows() * KeyFaceActivity.minFaceSize);
        List<Rect> faces = new LinkedList<Rect>();
        try {/*w w  w .  j  a va 2  s  . com*/
            cascade.detectMultiScale(mGray, faces, 1.1, 2, 2, new Size(faceSize, faceSize));
        } catch (CvException e) {
            System.err.println(e.getMessage());
        }
        for (Rect r : faces) { //For each face

            //The Rectangle commented is the area that will be used to check the face,
            //but an ellipse is shown instead, I think it looks better.
            //Core.rectangle(mRgba, r.tl(), r.br(), new Scalar(0,0,255,100), 3);

            String nombre = null;

            // We try to recognize it
            AlgorithmReturnValue res = KeyFaceActivity.recogAlgorithm.recognizeFace(mGray.submat(r));
            resExample = res;
            if (res.getResult() != -1) {
                //if it worked, we find the name
                nombre = findName(res.getResult());
            }
            Point center = new Point(r.x + (r.width / 2), r.y + (r.height / 2));
            //If nombre is null we have no name, thus is unrecognized and draw a red circle, together with the text "Unknown"
            if (nombre == null) {
                Core.ellipse(mRgba, center, new Size(r.width / 2 - 5, r.height / 2 + 20), 0, 0, 360,
                        new Scalar(255, 0, 0, 30), 3);
                Core.rectangle(mRgba, new Point(r.x + 45, r.y + r.height + 20),
                        new Point(r.x + 200, r.y + r.height + 60), new Scalar(70, 50, 50, 255), Core.FILLED);
                Core.putText(mRgba, "Unknown", new Point(r.x + 50, r.y + r.height + 50),
                        Core.FONT_HERSHEY_PLAIN, 2, new Scalar(200, 200, 200, 100));

                //Check if the user is tryaing to save a new face
                if (KeyFaceActivity.addingFaces && faces.size() == 1) {
                    //All is in order, we save a new image and update our account of faces. We update the recognizer data as well.
                    addFaceToDB(mGray, r, savedFaces);

                    KeyFaceActivity.toastHandler.post(new Runnable() {
                        public void run() {
                            KeyFaceActivity.prefs.edit()
                                    .putInt("savedFaces", KeyFaceActivity.prefs.getInt("savedFaces", 0) + 1)
                                    .apply();
                        }
                    });

                    /*KeyFaceActivity.lock.lock();
                    try {
                       KeyFaceActivity.faceAdded = true;
                       KeyFaceActivity.addingFaces = false;
                       KeyFaceActivity.condition.signalAll();
                    }
                    finally {
                       KeyFaceActivity.lock.unlock();
                    }
                    */

                    if (!KeyFaceActivity.recogAlgorithm.updateData(false)) {
                        System.err.println("Couldn't update the recognition algorithm with the new picture.");
                    }
                    KeyFaceActivity.addingFaces = false;

                    KeyFaceActivity.toastHandler.post(new Runnable() {
                        public void run() {
                            Toast.makeText(KeyFaceActivity.globalappcontext, "Face saved successfully!",
                                    Toast.LENGTH_SHORT).show();
                        }
                    });
                }
                //The user tried to save a face when there was more than one, it fails and sends a message to the user.
                else if (KeyFaceActivity.addingFaces && faces.size() > 1) {
                    KeyFaceActivity.toastHandler.post(new Runnable() {
                        public void run() {
                            Toast.makeText(KeyFaceActivity.globalappcontext,
                                    "Make sure there is only one face!", Toast.LENGTH_SHORT).show();
                        }
                    });
                    KeyFaceActivity.addingFaces = false;
                }
            }

            else { //We know this face!
                Core.ellipse(mRgba, center, new Size(r.width / 2 - 5, r.height / 2 + 20), 0, 0, 360,
                        new Scalar(0, 255, 0, 100), 3);
                Core.rectangle(mRgba, new Point(r.x + 45, r.y + r.height + 20),
                        new Point(r.x + 200, r.y + r.height + 60), new Scalar(50, 70, 50, 255), Core.FILLED);
                Core.putText(mRgba, nombre, new Point(r.x + 50, r.y + r.height + 50), Core.FONT_HERSHEY_PLAIN,
                        2, new Scalar(0, 255, 0, 100));
                if (KeyFaceActivity.addingFaces && faces.size() == 1) {
                    //If the user tries to save a face when it is already known we don let him.
                    KeyFaceActivity.toastHandler.post(new Runnable() {
                        public void run() {
                            Toast.makeText(KeyFaceActivity.globalappcontext, "This face is already known!",
                                    Toast.LENGTH_SHORT).show();
                        }
                    });
                    KeyFaceActivity.addingFaces = false;
                }
            }
        }
        //If there is no face we tell the user there was a mistake
        if (KeyFaceActivity.addingFaces && faces.size() <= 0) {
            KeyFaceActivity.toastHandler.post(new Runnable() {
                public void run() {
                    Toast.makeText(KeyFaceActivity.globalappcontext, "No face found!", Toast.LENGTH_SHORT)
                            .show();
                }
            });
            KeyFaceActivity.addingFaces = false;
        }
    }

    savedFaces = KeyFaceActivity.prefs.getInt("savedFaces", savedFaces);

    if (KeyFaceActivity.prefs.getBoolean("showData", false)) {
        try {
            if (resExample != null) {
                //background rectangle for extra info on PCA
                Core.rectangle(mRgba, new Point(0, mRgba.height() - 100),
                        new Point(mRgba.width(), mRgba.height()), new Scalar(50, 50, 50, 50), Core.FILLED);
                //Data for closest image 
                Core.putText(mRgba, "1st", new Point(5, mRgba.height() - 80), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                Core.putText(mRgba, Integer.toString(resExample.getClosestImage()),
                        new Point(5, mRgba.height() - 55), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                Core.putText(mRgba, Double.toString(resExample.getDistClosestImage() / 100000).substring(0, 6),
                        new Point(5, mRgba.height() - 30), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                //Data for second closest image
                Core.putText(mRgba, "2nd", new Point(180, mRgba.height() - 80), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                Core.putText(mRgba, Integer.toString(resExample.getSecondClosestImage()),
                        new Point(180, mRgba.height() - 55), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                Core.putText(mRgba,
                        Double.toString(resExample.getDistSecondClosestImage() / 100000).substring(0, 6),
                        new Point(180, mRgba.height() - 30), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                //Data for farthest image
                Core.putText(mRgba, "Last", new Point(355, mRgba.height() - 80), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                Core.putText(mRgba, Integer.toString(resExample.getFarthestImage()),
                        new Point(355, mRgba.height() - 55), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                Core.putText(mRgba, Double.toString(resExample.getDistFarthestImage() / 100000).substring(0, 6),
                        new Point(355, mRgba.height() - 30), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                //Num images and threshold
                Core.putText(mRgba, "Images:" + savedFaces, new Point(15, mRgba.height() - 5),
                        Core.FONT_HERSHEY_PLAIN, 2, new Scalar(250, 250, 250, 200));
                Core.putText(mRgba,
                        "Th:" + Double.toString(resExample.getThreshold() / 100000).substring(0,
                                Math.min(6, Double.toString(resExample.getThreshold() / 100000).length())),
                        new Point(240, mRgba.height() - 5), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
            } else {
                Core.rectangle(mRgba, new Point(0, mRgba.height() - 30), new Point(200, mRgba.height()),
                        new Scalar(50, 50, 50, 50), Core.FILLED);
                Core.putText(mRgba, "Images:" + savedFaces, new Point(15, mRgba.height() - 5),
                        Core.FONT_HERSHEY_PLAIN, 2, new Scalar(250, 250, 250, 200));
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);

    if (Utils.matToBitmap(mRgba, bmp))
        return bmp;

    bmp.recycle();
    return null;
}

From source file:com.kinect.FaceCapture.java

public void saveFace() {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    System.out.println("\nRunning FaceDetector");

    CascadeClassifier faceDetector = new CascadeClassifier(
            FaceCapture.class.getResource("haarcascade_frontalface_alt.xml").getPath().substring(1));
    Mat image = Highgui.imread("screancapture.jpg");
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);
    System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));
    Rect rectCrop = null;/*w  w w  .jav  a  2  s.c o  m*/
    for (Rect rect : faceDetections.toArray()) {
        Core.rectangle(image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height),
                new Scalar(0, 255, 0));
        rectCrop = new Rect(rect.x, rect.y, rect.width, rect.height);
    }
    Mat image_roi = new Mat(image, rectCrop);
    Highgui.imwrite("screancapture.jpg", image_roi);

    System.out.println("save face...");
}

From source file:com.lasarobotics.tests.camera.CameraTestVisionOpMode.java

License:Open Source License

@Override
public Mat frame(Mat rgba, Mat gray) {
    /**//from  www . ja v  a  2 s. c  o  m
     * Set analysis boundary
     * You should comment this to use the entire screen and uncomment only if
     * you want faster analysis at the cost of not using the entire frame.
     * This is also particularly useful if you know approximately where the beacon is
     * as this will eliminate parts of the frame which may cause problems
     * This will not work on some methods, such as COMPLEX
     *
     * We set the Analysis boundary in the frame loop just in case we couldn't get it
     * during init(). This happens when another app is using OpenCV simulataneously.
     * Doing so should only be necessary in testing apps
     **/
    //beacon.setAnalysisBounds(new Rectangle(new Point(width / 2, height/2), width - 200, 200));

    //Run all extensions, then get matrices
    rgba = super.frame(rgba, gray);
    gray = Color.rapidConvertRGBAToGRAY(rgba);

    //Display a Grid-system every 50 pixels
    /*final int dist = 50;
    for (int x = width/2 + 50; x<width; x+=dist)
    Drawing.drawLine(rgba, new Point(x, 0), new Point(x, height), new ColorRGBA("#88888822"), 1);
    for (int x = width/2 - 50; x>=0; x-=dist)
    Drawing.drawLine(rgba, new Point(x, 0), new Point(x, height), new ColorRGBA("#88888822"), 1);
    Drawing.drawLine(rgba, new Point(width/2, 0), new Point(width/2, height), new ColorRGBA("#ffffff44"), 1);
    for (int y = height/2 + 50; y<height; y+=dist)
    Drawing.drawLine(rgba, new Point(0, y), new Point(width, y), new ColorRGBA("#88888822"), 1);
    for (int y = height/2 - 50; y>=0; y-=dist)
    Drawing.drawLine(rgba, new Point(0, y), new Point(width, y), new ColorRGBA("#88888822"), 1);
    Drawing.drawLine(rgba, new Point(0, height/2), new Point(width, height/2), new ColorRGBA("#ffffff44"), 1);*/

    //Get beacon analysis
    Beacon.BeaconAnalysis beaconAnalysis = beacon.getAnalysis();

    //Display confidence
    Drawing.drawText(rgba, "Confidence: " + beaconAnalysis.getConfidenceString(), new Point(0, 50), 1.0f,
            new ColorGRAY(255));

    //Display beacon color
    Drawing.drawText(rgba, beaconAnalysis.getColorString(), new Point(0, 8), 1.0f, new ColorGRAY(255),
            Drawing.Anchor.BOTTOMLEFT);

    //Display FPS
    Drawing.drawText(rgba, "FPS: " + fps.getFPSString(), new Point(0, 24), 1.0f, new ColorRGBA("#ffffff"));

    //Display Beacon Center
    Drawing.drawText(rgba, "Center: " + beacon.getAnalysis().getCenter().toString(), new Point(0, 78), 1.0f,
            new ColorRGBA("#ffffff"));

    //Display analysis method
    Drawing.drawText(rgba, beacon.getAnalysisMethod().toString() + " Analysis", new Point(width - 300, 40),
            1.0f, new ColorRGBA("#FFC107"));

    //Display rotation sensor compensation
    Drawing.drawText(rgba,
            "Rot: " + rotation.getRotationCompensationAngle() + " (" + sensors.getScreenOrientation() + ")",
            new Point(0, 50), 1.0f, new ColorRGBA("#ffffff"), Drawing.Anchor.BOTTOMLEFT); //"#2196F3"

    return rgba;
}

From source file:com.minio.io.alice.XPly.java

License:Open Source License

public void setP1(int x1, int y1) {
    this.p1 = new Point(x1, y1);
}

From source file:com.minio.io.alice.XPly.java

License:Open Source License

public void setP2(int x2, int y2) {
    this.p2 = new Point(x2, y2);
}

From source file:com.mycompany.analyzer.DiagonalDominanceAnalyzer.java

private double minDistToDiagonal(Line line) {
    Point p1 = new Point(line.getX1(), line.getY1());
    Point p2 = new Point(line.getX2(), line.getY2());

    Line diag1 = new Line(frameX, frameY, frameX + frameWidth - 1, frameY + frameHeight - 1);
    Line diag2 = new Line(frameX + frameWidth - 1, 0, 0, frameY + frameHeight - 1);
    double dist1 = 0;
    Point closestPoint = getClosestPointOnSegment(diag1, p1);
    dist1 += distanceBtwPoints(closestPoint, p1);

    closestPoint = getClosestPointOnSegment(diag1, p2);
    dist1 += distanceBtwPoints(closestPoint, p2);
    dist1 = dist1 / 2;//from w  ww  .  j ava  2s  .c o  m

    double dist2 = 0;
    closestPoint = getClosestPointOnSegment(diag2, p1);
    dist2 += distanceBtwPoints(closestPoint, p1);
    closestPoint = getClosestPointOnSegment(diag2, p2);
    dist2 += distanceBtwPoints(closestPoint, p2);
    dist2 = dist2 / 2;

    if (dist1 < dist2) {
        return dist1;
    } else {
        return dist2;

    }
}