Example usage for org.opencv.core Point Point

List of usage examples for org.opencv.core Point Point

Introduction

In this page you can find the example usage for org.opencv.core Point Point.

Prototype

public Point(double x, double y) 

Source Link

Usage

From source file:org.ar.rubik.RubikFace.java

License:Open Source License

/**
 * Find And Move A Rhombus To A Better Location
 * //from ww  w  .  j  av  a 2 s .  co m
 * Returns true if a tile was move or swapped, otherwise returns false.
 * 
 * Find Tile-Rhombus (i.e. {n,m}) with largest error assuming findOptimumFaceFit() has been called.
 * Determine which direction the Rhombus would like to move and swap it with that location.
 */
private boolean findAndMoveArhombusToAbetterLocation() {

    double errorArray[][] = new double[3][3];
    Point errorVectorArray[][] = new Point[3][3];

    // Identify Tile-Rhombus with largest error
    Rhombus largestErrorRhombus = null;
    double largetError = Double.NEGATIVE_INFINITY;
    int tile_n = 0; // Record current location of Rhombus we wish to move.
    int tile_m = 0;
    for (int n = 0; n < 3; n++) {
        for (int m = 0; m < 3; m++) {
            Rhombus rhombus = faceRhombusArray[n][m];
            if (rhombus != null) {

                // X and Y location of the center of a tile {n,m}
                double tile_x = lmsResult.origin.x + n * alphaLatticLength * Math.cos(alphaAngle)
                        + m * betaLatticLength * Math.cos(betaAngle);
                double tile_y = lmsResult.origin.y + n * alphaLatticLength * Math.sin(alphaAngle)
                        + m * betaLatticLength * Math.sin(betaAngle);

                // Error from center of tile to reported center of Rhombus
                double error = Math.sqrt((rhombus.center.x - tile_x) * (rhombus.center.x - tile_x)
                        + (rhombus.center.y - tile_y) * (rhombus.center.y - tile_y));
                errorArray[n][m] = error;
                errorVectorArray[n][m] = new Point(rhombus.center.x - tile_x, rhombus.center.y - tile_y);

                // Record largest error found
                if (error > largetError) {
                    largestErrorRhombus = rhombus;
                    tile_n = n;
                    tile_m = m;
                    largetError = error;
                }
            }
        }
    }

    // For each tile location print: center of current Rhombus, center of tile, error vector, error magnitude.
    Log.d(Constants.TAG, String.format(" m:n|-----0-----|------1----|----2------|"));
    Log.d(Constants.TAG, String.format(" 0  |%s|%s|%s|", Util.dumpLoc(faceRhombusArray[0][0]),
            Util.dumpLoc(faceRhombusArray[1][0]), Util.dumpLoc(faceRhombusArray[2][0])));
    Log.d(Constants.TAG, String.format(" 0  |%s|%s|%s|", Util.dumpPoint(getTileCenterInPixels(0, 0)),
            Util.dumpPoint(getTileCenterInPixels(1, 0)), Util.dumpPoint(getTileCenterInPixels(2, 0))));
    Log.d(Constants.TAG, String.format(" 0  |%s|%s|%s|", Util.dumpPoint(errorVectorArray[0][0]),
            Util.dumpPoint(errorVectorArray[1][0]), Util.dumpPoint(errorVectorArray[2][0])));
    Log.d(Constants.TAG,
            String.format(" 0  |%11.0f|%11.0f|%11.0f|", errorArray[0][0], errorArray[1][0], errorArray[2][0]));
    Log.d(Constants.TAG, String.format(" 1  |%s|%s|%s|", Util.dumpLoc(faceRhombusArray[0][1]),
            Util.dumpLoc(faceRhombusArray[1][1]), Util.dumpLoc(faceRhombusArray[2][1])));
    Log.d(Constants.TAG, String.format(" 1  |%s|%s|%s|", Util.dumpPoint(getTileCenterInPixels(0, 1)),
            Util.dumpPoint(getTileCenterInPixels(1, 1)), Util.dumpPoint(getTileCenterInPixels(2, 1))));
    Log.d(Constants.TAG, String.format(" 1  |%s|%s|%s|", Util.dumpPoint(errorVectorArray[0][1]),
            Util.dumpPoint(errorVectorArray[1][1]), Util.dumpPoint(errorVectorArray[2][1])));
    Log.d(Constants.TAG,
            String.format(" 1  |%11.0f|%11.0f|%11.0f|", errorArray[0][1], errorArray[1][1], errorArray[2][1]));
    Log.d(Constants.TAG, String.format(" 2  |%s|%s|%s|", Util.dumpLoc(faceRhombusArray[0][2]),
            Util.dumpLoc(faceRhombusArray[1][2]), Util.dumpLoc(faceRhombusArray[2][2])));
    Log.d(Constants.TAG, String.format(" 2  |%s|%s|%s|", Util.dumpPoint(getTileCenterInPixels(0, 2)),
            Util.dumpPoint(getTileCenterInPixels(1, 2)), Util.dumpPoint(getTileCenterInPixels(2, 2))));
    Log.d(Constants.TAG, String.format(" 2  |%s|%s|%s|", Util.dumpPoint(errorVectorArray[0][2]),
            Util.dumpPoint(errorVectorArray[1][2]), Util.dumpPoint(errorVectorArray[2][2])));
    Log.d(Constants.TAG,
            String.format(" 2  |%11.0f|%11.0f|%11.0f|", errorArray[0][2], errorArray[1][2], errorArray[2][2]));
    Log.d(Constants.TAG, String.format("    |-----------|-----------|-----------|"));

    // Calculate vector error (from Tile to Rhombus) components along X and Y axis
    double error_x = largestErrorRhombus.center.x
            - (lmsResult.origin.x + tile_n * alphaLatticLength * Math.cos(alphaAngle)
                    + tile_m * betaLatticLength * Math.cos(betaAngle));
    double error_y = largestErrorRhombus.center.y
            - (lmsResult.origin.y + tile_n * alphaLatticLength * Math.sin(alphaAngle)
                    + tile_m * betaLatticLength * Math.sin(betaAngle));
    Log.d(Constants.TAG, String.format("Tile at [%d][%d] has x error = %4.0f y error = %4.0f", tile_n, tile_m,
            error_x, error_y));

    // Project vector error (from Tile to Rhombus) components along alpha and beta directions.
    double alphaError = error_x * Math.cos(alphaAngle) + error_y * Math.sin(alphaAngle);
    double betaError = error_x * Math.cos(betaAngle) + error_y * Math.sin(betaAngle);
    Log.d(Constants.TAG, String.format("Tile at [%d][%d] has alpha error = %4.0f beta error = %4.0f", tile_n,
            tile_m, alphaError, betaError));

    // Calculate index vector correction: i.e., preferred direction to move this tile.
    int delta_n = (int) Math.round(alphaError / alphaLatticLength);
    int delta_m = (int) Math.round(betaError / betaLatticLength);
    Log.d(Constants.TAG, String.format("Correction Index Vector: [%d][%d]", delta_n, delta_m));

    // Calculate new location of tile
    int new_n = tile_n + delta_n;
    int new_m = tile_m + delta_m;

    // Limit according to dimensions of face
    if (new_n < 0)
        new_n = 0;
    if (new_n > 2)
        new_n = 2;
    if (new_m < 0)
        new_m = 0;
    if (new_m > 2)
        new_m = 2;

    // Cannot move, move is to original location
    if (new_n == tile_n && new_m == tile_m) {
        Log.i(Constants.TAG, String.format("Tile at [%d][%d] location NOT moved", tile_n, tile_m));
        return false;
    }

    // Move Tile or swap with tile in that location.
    else {
        Log.i(Constants.TAG,
                String.format("Swapping Rhombi [%d][%d] with  [%d][%d]", tile_n, tile_m, new_n, new_m));
        Rhombus tmp = faceRhombusArray[new_n][new_m];
        faceRhombusArray[new_n][new_m] = faceRhombusArray[tile_n][tile_m];
        faceRhombusArray[tile_n][tile_m] = tmp;
        return true;
    }

}

From source file:org.ar.rubik.RubikFace.java

License:Open Source License

/**
 * //from  ww w . ja va  2  s .co m
 * @param n
 * @param m
 * @return
 */
public Point getTileCenterInPixels(int n, int m) {
    return new Point(
            lmsResult.origin.x + n * alphaLatticLength * Math.cos(alphaAngle)
                    + m * betaLatticLength * Math.cos(betaAngle),
            lmsResult.origin.y + n * alphaLatticLength * Math.sin(alphaAngle)
                    + m * betaLatticLength * Math.sin(betaAngle));
}

From source file:org.designosaurs.opmode.DesignosaursAuto.java

@Override
public void runOpMode() {
    appContext = hardwareMap.appContext;
    long startTime;

    if (TEST_MODE) {
        DesignosaursHardware.hardwareEnabled = false;

        Log.i(TAG, "*** TEST MODE ENABLED ***");
        Log.i(TAG, "Hardware is disabled, skipping to beacon search state.");
        Log.i(TAG, "Web debugging interface can be found at http://"
                + DesignosaursUtils.getIpAddress(appContext) + ":9001/");

        autonomousState = STATE_SEARCHING;
    }// w  w w .ja v a 2  s . c  o  m

    setInitState("Configuring hardware...");
    robot.init(hardwareMap);

    setInitState("Initializing vuforia...");
    VuforiaLocalizer.Parameters params = new VuforiaLocalizer.Parameters(R.id.cameraMonitorViewId);
    params.cameraDirection = VuforiaLocalizer.CameraDirection.BACK;
    params.vuforiaLicenseKey = VUFORIA_LICENCE_KEY;

    vuforia = new VuforiaLocalizerImplSubclass(params);

    // Vuforia tracks the images, we'll call them beacons
    VuforiaTrackables beacons = vuforia.loadTrackablesFromAsset("FTC_2016-17");
    beacons.get(0).setName("wheels");
    beacons.get(1).setName("tools");
    beacons.get(2).setName("legos");
    beacons.get(3).setName("gears");

    setInitState("Initializing OpenCV...");
    OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, appContext, mLoaderCallback);

    setInitState("Select team color using the gamepad.");

    while (!isStarted() && !isStopRequested()) {
        updateTeamColor();

        robot.waitForTick(25);
    }

    if (DesignosaursHardware.hardwareEnabled) {
        buttonPusherManager.start();
        buttonPusherManager.setStatus(ButtonPusherManager.STATE_HOMING);
        shooterManager.start();
        shooterManager.setStatus(ShooterManager.STATE_AT_BASE);
    }

    if (HOTEL_MODE)
        setState(STATE_SEARCHING);

    beacons.activate();
    startTime = System.currentTimeMillis();
    robot.startTimer();

    long ticksSeeingImage = 0;
    while (opModeIsActive()) {
        String beaconName = "";

        // Detect and process images
        for (VuforiaTrackable beac : beacons) {
            OpenGLMatrix pose = ((VuforiaTrackableDefaultListener) beac.getListener()).getRawPose();

            if (pose != null) {
                beaconName = beac.getName();

                if (beac.getName().equals(lastScoredBeaconName)) // fixes seeing the first beacon and wanting to go back to it
                    continue;

                Matrix34F rawPose = new Matrix34F();
                float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12);
                rawPose.setData(poseData);
                lastPose = rawPose;

                recalculateCriticalPoints();
                boundPoints();
                centeredPos = center.y; // drive routines align based on this
            }
        }

        if (vuforia.rgb != null && ENABLE_CAMERA_STREAMING
                && System.currentTimeMillis() > (lastFrameSentAt + 50)) {
            lastFrameSentAt = System.currentTimeMillis();

            Bitmap bm = Bitmap.createBitmap(vuforia.rgb.getWidth(), vuforia.rgb.getHeight(),
                    Bitmap.Config.RGB_565);
            bm.copyPixelsFromBuffer(vuforia.rgb.getPixels());

            Bitmap resizedbitmap = DesignosaursUtils.resize(bm, bm.getWidth() / 2, bm.getHeight() / 2);
            FtcRobotControllerActivity.webServer.streamCameraFrame(resizedbitmap);

            if (center != null) {
                ArrayList<String> coords = new ArrayList<>(4);
                coords.add(start.toString());
                coords.add(end.toString());
                coords.add(center.toString());

                FtcRobotControllerActivity.webServer.streamPoints(coords);
            }
        }

        switch (autonomousState) {
        case STATE_INITIAL_POSITIONING:
            robot.startOrientationTracking();

            if (teamColor == TEAM_BLUE) {
                robot.rightMotor.setDirection(DcMotor.Direction.REVERSE);
                robot.leftMotor.setDirection(DcMotor.Direction.FORWARD);
            }

            if (teamColor == TEAM_RED) {
                robot.accel(0.3, 0.5);
                shooterManager.setStatus(ShooterManager.STATE_SCORING);
                robot.setDrivePower(0);
                robot.waitForTick(1400);
                robot.accel(0.3, 0.5);
                robot.waitForTick(1400);
                robot.goStraight(-0.2, 0.8);

                updateRunningState("Initial turn...");
                robot.turn(-34, 0.3);

                updateRunningState("Secondary move...");
                robot.accel(0.5, FAST_DRIVE_POWER);
                robot.goStraight(2.8, FAST_DRIVE_POWER);
                robot.decel(0.5, 0);

                updateRunningState("Secondary turn...");
                robot.turn(38, 0.2);
            } else {
                robot.goStraight(-0.3, 0.5);
                shooterManager.setStatus(ShooterManager.STATE_SCORING);
                robot.setDrivePower(0);
                robot.accel(-0.3, 0.5);
                robot.waitForTick(1400);
                robot.goStraight(-0.8, 0.4);
                robot.setDrivePower(0);

                robot.turn(179, 0.45);

                updateRunningState("Initial turn...");
                robot.turn(22, 0.3);

                updateRunningState("Secondary move...");
                robot.accel(0.5, FAST_DRIVE_POWER);
                robot.goStraight(4.1, FAST_DRIVE_POWER);
                robot.decel(0.5, 0);

                updateRunningState("Secondary turn...");
                robot.turn(-35, 0.3);
            }

            robot.setDrivePower(0);
            // Allow the camera time to focus:
            robot.waitForTick(1000);
            setState(STATE_SEARCHING);
            break;
        case STATE_SEARCHING:
            if (Math.abs(getRelativePosition()) < BEACON_ALIGNMENT_TOLERANCE) {
                stateMessage = "Analysing beacon data...";
                robot.setDrivePower(0);
                ticksSeeingImage++;
                vuforia.disableFlashlight();

                if (ticksSeeingImage < 5)
                    continue;

                byte pass = 0;
                boolean successful = false;
                BeaconPositionResult lastBeaconPosition = null;
                int[] range = { 0, 500 };
                Mat image = null;

                while (!successful) {
                    image = getRegionAboveBeacon();
                    if (image == null || vuforia.rgb == null) {
                        Log.w(TAG, "No frame! _");
                        robot.setDrivePower(MIN_DRIVE_POWER);

                        continue;
                    }

                    lastBeaconPosition = beaconFinder.process(System.currentTimeMillis(), image, SAVE_IMAGES)
                            .getResult();
                    range = lastBeaconPosition.getRangePixels();

                    if (range[0] < 0)
                        range[0] = 0;

                    if (range[1] > image.width())
                        range[1] = image.width();

                    Log.i(TAG, "Beacon finder results: " + lastBeaconPosition.toString());

                    if (lastBeaconPosition.isConclusive())
                        successful = true;
                    else {
                        pass++;

                        Log.i(TAG, "Searching for beacon presence, pass #" + beaconsFound + ":" + pass + ".");

                        // We can't see both buttons, so move back and forth and run detection algorithm again
                        if (pass <= 3)
                            robot.goCounts(teamColor == TEAM_RED ? -400 : 400, 0.2);

                        if (pass <= 4 && pass >= 3)
                            robot.goCounts(teamColor == TEAM_RED ? 1000 : -1000, 0.2);

                        if (pass > 4) {
                            robot.goCounts(teamColor == TEAM_RED ? -1600 : 1600, 0.4);
                            successful = true;
                        }

                        // Allow camera time to autofocus:
                        robot.setDrivePower(0);
                        robot.waitForTick(100);
                    }
                }

                if (autonomousState != STATE_SEARCHING)
                    continue;

                // Change the values in the following line for how much off the larger image we crop (y-wise,
                // the x axis is controlled by where the robot thinks the beacon is, see BeaconFinder).
                // TODO: Tune this based on actual field
                Log.i(TAG, "Source image is " + image.height() + "px by " + image.width() + "px");

                int width = range[1] - range[0];
                Log.i(TAG, "X: " + range[0] + " Y: 0, WIDTH: " + width + ", HEIGHT: "
                        + (image.height() > 50 ? image.height() - 50 : image.height()));

                if (range[0] < 0)
                    range[0] = 0;

                if (width < 0)
                    width = image.width() - range[0];

                Mat croppedImageRaw = new Mat(image,
                        new Rect(range[0], 0, width,
                                image.height() > 50 ? image.height() - 50 : image.height())),
                        croppedImage = new Mat();

                Imgproc.resize(croppedImageRaw, croppedImage, new Size(), 0.5, 0.5, Imgproc.INTER_LINEAR);

                if (OBFUSCATE_MIDDLE)
                    Imgproc.rectangle(croppedImage, new Point((croppedImage.width() / 2) - 35, 0),
                            new Point((croppedImage.width() / 2) + 55, croppedImage.height()),
                            new Scalar(255, 255, 255, 255), -1);

                BeaconColorResult lastBeaconColor = beaconProcessor
                        .process(System.currentTimeMillis(), croppedImage, SAVE_IMAGES).getResult();
                BeaconColorResult.BeaconColor targetColor = (teamColor == TEAM_RED
                        ? BeaconColorResult.BeaconColor.RED
                        : BeaconColorResult.BeaconColor.BLUE);

                Log.i(TAG, "*** BEACON FOUND ***");
                Log.i(TAG, "Target color: "
                        + (targetColor == BeaconColorResult.BeaconColor.BLUE ? "Blue" : "Red"));
                Log.i(TAG, "Beacon colors: " + lastBeaconColor.toString());

                // Beacon is already the right color:
                if (lastBeaconColor.getLeftColor() == targetColor
                        && lastBeaconColor.getRightColor() == targetColor) {
                    robot.accel(0.5, 1);
                    robot.goStraight(targetSide == SIDE_LEFT ? 1.2 : 0.7, 1);
                    robot.decel(0.5, DRIVE_POWER);
                    advanceToSecondBeacon(beaconName);

                    continue;
                }

                // Both sides of the beacon are the wrong color, so just score the left side:
                if (lastBeaconColor.getLeftColor() != targetColor
                        && lastBeaconColor.getRightColor() != targetColor) {
                    targetSide = SIDE_LEFT;
                    robot.setDrivePower(-DRIVE_POWER * 0.75);
                    setState(STATE_ALIGNING_WITH_BEACON);

                    continue;
                }

                // TODO: Replace goStraight call with proper combined distance from beacon offset + target side offset
                robot.goStraight(lastBeaconPosition.getOffsetFeet(), DRIVE_POWER);
                robot.setDrivePower(0);

                robot.resetDriveEncoders();
                targetSide = lastBeaconColor.getLeftColor() == targetColor ? SIDE_LEFT : SIDE_RIGHT;
                robot.setDrivePower(-DRIVE_POWER * 0.75);

                setState(STATE_ALIGNING_WITH_BEACON);
            } else if (Math.abs(getRelativePosition()) < SLOW_DOWN_AT) {
                stateMessage = "Beacon seen, centering (" + String.valueOf(getRelativePosition()) + ")...";
                robot.resetDriveEncoders();

                if (getRelativePosition() > 0 && getRelativePosition() != Integer.MAX_VALUE)
                    robot.setDrivePower(-MIN_DRIVE_POWER);
                else
                    robot.setDrivePower(MIN_DRIVE_POWER);
            }
            break;
        case STATE_ALIGNING_WITH_BEACON:
            stateMessage = "Positioning to deploy placer...";

            if (ticksInState > 450)
                robot.emergencyStop();

            double targetCounts = (targetSide == SIDE_LEFT) ? 1150 : 250;

            if (Math.max(Math.abs(robot.getAdjustedEncoderPosition(robot.leftMotor)),
                    Math.abs(robot.getAdjustedEncoderPosition(robot.rightMotor))) >= targetCounts) {
                Log.i(TAG, "//// DEPLOYING ////");

                robot.setDrivePower(0);
                robot.startOrientationTracking(true);
                buttonPusherManager.setStatus(ButtonPusherManager.STATE_SCORING);

                setState(STATE_WAITING_FOR_PLACER);
            }
            break;
        case STATE_WAITING_FOR_PLACER:
            stateMessage = "Waiting for placer to deploy.";

            if ((buttonPusherManager.getStatus() != ButtonPusherManager.STATE_SCORING
                    && buttonPusherManager.getTicksInState() >= 10)
                    || buttonPusherManager.getStatus() == ButtonPusherManager.STATE_AT_BASE) {
                advanceToSecondBeacon(beaconName);

                if (beaconsFound == 2 || HOTEL_MODE)
                    setState(STATE_FINISHED);
                else {
                    robot.turn(teamColor == TEAM_RED ? -3 : 3, 0.2);
                    robot.accel(0.5, 1);
                    robot.turn(teamColor == TEAM_RED ? 2 : -2, 0.2);
                    robot.goStraight(targetSide == SIDE_LEFT ? 1.5 : 1, 1);
                    robot.decel(0.5, DRIVE_POWER);

                    setState(STATE_SEARCHING);
                }
            }
        }

        ticksInState++;
        updateRunningState();
        robot.waitForTick(20);
    }
}

From source file:org.firstinspires.ftc.teamcode.libraries.VuforiaBallLib.java

protected BallColor getBallColor() {
    //get frame from vuforia
    try {/*from   ww w  . ja  va  2 s. c  o m*/
        Mat out = getFrame();

        //get vuforia's real position matrix
        Matrix34F goodCodeWritten = ((VuforiaDefaultListenerShim) relicTemplate.getListener()).getRealPose();

        if (goodCodeWritten == null)
            return BallColor.Undefined;

        this.tempMark = RelicRecoveryVuMark.from(relicTemplate);

        final float[][] ballPoints = new float[4][2];
        //the actual color determination
        for (int i = 8; i < point.length; i++)
            ballPoints[i - 8] = Tool.projectPoint(camCal, goodCodeWritten, point[i]).getData();

        Point imagePoints[] = new Point[ballPoints.length];
        //convert points to opencv language
        for (int i = 0; i < ballPoints.length; i++)
            imagePoints[i] = new Point((int) ballPoints[i][0], (int) ballPoints[i][1]);

        //calculate points from projection
        //find the midpoint between the two points
        int leftXPoint = (int) ((imagePoints[0].x + imagePoints[1].x) / 2.0);
        int leftYPoint = (int) ((imagePoints[0].y + imagePoints[1].y) / 2.0);
        //find the y distande between the two
        int leftDist = (int) (Math.abs(imagePoints[0].y - imagePoints[1].y) / 2.0);
        int[] leftBall = new int[] { leftXPoint - (leftDist / 2), leftYPoint - (leftDist / 2) };

        //find the midpoint between the two points
        int rightXPoint = (int) ((imagePoints[2].x + imagePoints[3].x) / 2.0);
        int rightYPoint = (int) ((imagePoints[2].y + imagePoints[3].y) / 2.0);
        //find the y distande between the two
        int rightDist = (int) (Math.abs(imagePoints[2].y - imagePoints[3].y) / 2.0);
        int[] rightBall = new int[] { rightXPoint - (rightDist / 2), rightYPoint - (rightDist / 2) };
        //operation: subsquare
        //take a square mat we are 100% sure will have a ball in it
        //sum it up and find the average color

        Scalar leftColor = drawSquare(out, leftBall, leftDist);
        Scalar rightColor = drawSquare(out, rightBall, rightDist);

        if (displayData) {
            float[][] squarePoints = new float[8][2];
            for (int i = 0; i < 8; i++)
                squarePoints[i] = Tool.projectPoint(camCal, goodCodeWritten, point[i]).getData();
            Point[] squarePointRay = new Point[squarePoints.length];
            for (int i = 0; i < squarePoints.length; i++)
                squarePointRay[i] = new Point((int) squarePoints[i][0], (int) squarePoints[i][1]);

            Scalar green = new Scalar(0, 255, 0);
            for (int i = 0; i < 2; i++)
                for (int o = 0; o < 4; o++)
                    Imgproc.line(out, squarePointRay[o == 0 ? 3 + i * 4 : i * 4 + o - 1],
                            squarePointRay[i * 4 + o], green);

            //connect the rectangles
            for (int i = 0; i < 4; i++)
                Imgproc.line(out, squarePointRay[i], squarePointRay[i + 4], green);

            for (int i = 0; i < imagePoints.length; i++)
                Imgproc.drawMarker(out, imagePoints[i], green);

            //flip it for display
            Core.flip(out, out, -1);

            drawFrame(out);
        }

        if (leftColor != null && rightColor != null) {
            if (leftColor.val[0] < leftColor.val[1] && rightColor.val[0] > rightColor.val[1])
                return BallColor.LeftBlue;
            else if (leftColor.val[0] > leftColor.val[1] && rightColor.val[0] < rightColor.val[1])
                return BallColor.LeftRed;
            else
                return BallColor.Indeterminate;
        } else
            return BallColor.Undefined;
    } catch (Exception e) {
        Log.e("OPENCV", e.getMessage());
        return BallColor.Undefined;
    }
}

From source file:org.firstinspires.ftc.teamcode.libraries.VuforiaBallLib.java

protected static Scalar drawSquare(Mat src, int[] ballPoint, int ballDist) {
    //find average left and right ball square
    //find the average color for all the pixels in that square
    if (ballPoint[0] >= 0 && ballPoint[1] >= 0 && ballPoint[0] + ballDist < src.cols()
            && ballPoint[1] + ballDist < src.rows()) {
        double total[] = new double[3];
        for (int x = 0; x < ballDist; x++)
            for (int y = 0; y < ballDist; y++) {
                double[] pixel = src.get(y + ballPoint[1], x + ballPoint[0]);
                total[0] += pixel[0];//from w  w w .  j av  a 2 s.c o m
                total[1] += pixel[1];
                total[2] += pixel[2];
            }
        //make average color
        Scalar color = new Scalar(total[0] / (ballDist * ballDist), total[1] / (ballDist * ballDist),
                total[2] / (ballDist * ballDist));

        Imgproc.rectangle(src, new Point(ballPoint[0], ballPoint[1]),
                new Point(ballPoint[0] + ballDist, ballPoint[1] + ballDist), color, -1);

        return color;
    } else
        return null;
}

From source file:org.firstinspires.ftc.teamcode.opmodes.demo.VumarkHiJackVideo.java

@Override
public void init_loop() {
    RelicRecoveryVuMark vuMark = RelicRecoveryVuMark.from(relicTemplate);
    if (vuMark != RelicRecoveryVuMark.UNKNOWN) {

        /* Found an instance of the template. In the actual game, you will probably
         * loop until this condition occurs, then move on to act accordingly depending
         * on which VuMark was visible. */
        telemetry.addData("VuMark", "%s visible", vuMark);

        /* For fun, we also exhibit the navigational pose. In the Relic Recovery game,
         * it is perhaps unlikely that you will actually need to act on this pose information, but
         * we illustrate it nevertheless, for completeness. */
        //qualcomm is stupid and didn't let me see the real vuforia matrix, so I shimmed it
        //heres where is gets good
        VuforiaDefaultListenerShim writeGoodCode = (VuforiaDefaultListenerShim) relicTemplate.getListener();

        OpenGLMatrix pose = writeGoodCode.getRawPose();
        telemetry.addData("Pose", format(pose));

        /* We further illustrate how to decompose the pose into useful rotational and
         * translational components */
        if (pose != null) {
            //alternate projection!
            //get vuforia's real position matrix
            Matrix34F goodCodeWritten = writeGoodCode.getRealPose();

            //reset imagePoints
            final float[][] vufPoints = new float[point.length][2];

            //use vuforias projectPoints method to project all those box points
            for (int i = 0; i < point.length; i++) {
                //project
                vufPoints[i] = Tool.projectPoint(camCal, goodCodeWritten, point[i]).getData();
                //convert to opencv language

                //telemetry.addData("point", "num: %d, x: %f.2, y: %f.2", i, vufPoints[i][0], vufPoints[i][1]);
            }/*from w w  w.j a  v a  2  s  .com*/

            //telemetry.addData("Camera Size", "w: %f.2, h: %f.2", camCal.getSize().getData()[0], camCal.getSize().getData()[1]);

            //get frame from vuforia
            try {
                VuforiaLocalizer.CloseableFrame frame = ray.take();

                int img = 0;
                for (; img < frame.getNumImages(); img++) {
                    //telemetry.addData("Image format " + img, frame.getImage(img).getFormat());
                    if (frame.getImage(img).getFormat() == PIXEL_FORMAT.RGB888)
                        break;
                }

                Image mImage = frame.getImage(img);

                //stick it in a Mat for "display purposes"
                Mat out = new Mat(mImage.getHeight(), mImage.getWidth(), CV_8UC3);

                java.nio.ByteBuffer color = mImage.getPixels();
                byte[] ray = new byte[color.limit()];
                color.rewind();
                color.get(ray);
                out.put(0, 0, ray);

                frame.close();

                //convert points, halfing distances b/c vuforia does that internally so we gotta fix it
                for (int i = 0; i < vufPoints.length; i++)
                    imagePoints[i] = new Point((int) vufPoints[i][0], (int) vufPoints[i][1]);

                Scalar green = new Scalar(0, 255, 0);
                for (int i = 0; i < 2; i++)
                    for (int o = 0; o < 4; o++)
                        Imgproc.line(out, imagePoints[o == 0 ? 3 + i * 4 : i * 4 + o - 1],
                                imagePoints[i * 4 + o], green);

                //connect the rectangles
                for (int i = 0; i < 4; i++)
                    Imgproc.line(out, imagePoints[i], imagePoints[i + 4], green);

                for (int i = 8; i < imagePoints.length; i++)
                    Imgproc.drawMarker(out, imagePoints[i], green);

                //calculate points from projection
                //find the midpoint between the two points
                int leftXPoint = (int) ((imagePoints[8].x + imagePoints[9].x) / 2.0);
                int leftYPoint = (int) ((imagePoints[8].y + imagePoints[9].y) / 2.0);
                //find the y distande between the two
                leftDist = (int) (Math.abs(imagePoints[8].y - imagePoints[9].y) / 2.0);
                leftBall = new int[] { leftXPoint - (leftDist / 2), leftYPoint - (leftDist / 2) };

                //find the midpoint between the two points
                int rightXPoint = (int) ((imagePoints[10].x + imagePoints[11].x) / 2.0);
                int rightYPoint = (int) ((imagePoints[10].y + imagePoints[11].y) / 2.0);
                //find the y distande between the two
                rightDist = (int) (Math.abs(imagePoints[10].y - imagePoints[11].y) / 2.0);
                rightBall = new int[] { rightXPoint - (rightDist / 2), rightYPoint - (rightDist / 2) };

                //operation: subsquare
                //take a square mat we are 100% sure will have a ball in it
                //sum it up and find the average color

                leftColor = drawSquare(out, leftBall, leftDist);
                rightColor = drawSquare(out, rightBall, rightDist);

                //flip it for display
                Core.flip(out, out, -1);

                matQueue.add(out);

                //display!
                mView.getHandler().post(new Runnable() {
                    @Override
                    public void run() {
                        try {
                            Mat frame = matQueue.take();
                            //convert to bitmap
                            Utils.matToBitmap(frame, bm);
                            frame.release();
                            mView.invalidate();
                        } catch (InterruptedException e) {
                            //huh
                        }
                    }
                });
            } catch (Exception e) {
                Log.e("OPENCV", e.getLocalizedMessage());
            }
        }
    } else {
        telemetry.addData("VuMark", "not visible");
    }
}

From source file:org.firstinspires.ftc.teamcode.opmodes.demo.VumarkHiJackVideo.java

private static Scalar drawSquare(Mat src, int[] ballPoint, int ballDist) {
    //find average left and right ball square
    //find the average color for all the pixels in that square
    if (ballPoint[0] >= 0 && ballPoint[1] >= 0 && ballPoint[0] + ballDist < src.cols()
            && ballPoint[1] + ballDist < src.rows()) {
        double total[] = new double[3];
        for (int x = 0; x < ballDist; x++)
            for (int y = 0; y < ballDist; y++) {
                double[] pixel = src.get(y + ballPoint[1], x + ballPoint[0]);
                total[0] += pixel[0];//w w w. j  av  a 2  s.  com
                total[1] += pixel[1];
                total[2] += pixel[2];
            }

        //make average color
        Scalar color = new Scalar(total[0] / (ballDist * ballDist), total[1] / (ballDist * ballDist),
                total[2] / (ballDist * ballDist));

        Imgproc.rectangle(src, new Point(ballPoint[0], ballPoint[1]),
                new Point(ballPoint[0] + ballDist, ballPoint[1] + ballDist), color, -1);

        return color;
    } else
        return null;
}

From source file:org.firstinspires.ftc.teamcode.opmodes.demo.VumarkOpenCV.java

@Override
public void init_loop() {
    RelicRecoveryVuMark vuMark = RelicRecoveryVuMark.from(relicTemplate);
    if (vuMark != RelicRecoveryVuMark.UNKNOWN) {

        /* Found an instance of the template. In the actual game, you will probably
         * loop until this condition occurs, then move on to act accordingly depending
         * on which VuMark was visible. */
        telemetry.addData("VuMark", "%s visible", vuMark);

        /* For fun, we also exhibit the navigational pose. In the Relic Recovery game,
         * it is perhaps unlikely that you will actually need to act on this pose information, but
         * we illustrate it nevertheless, for completeness. */
        //qualcomm is stupid and didn't let me see the real vuforia matrix, so I shimmed it
        //heres where is gets good
        VuforiaDefaultListenerShim writeGoodCode = (VuforiaDefaultListenerShim) relicTemplate.getListener();

        OpenGLMatrix pose = writeGoodCode.getRawPose();
        telemetry.addData("Pose", format(pose));

        /* We further illustrate how to decompose the pose into useful rotational and
         * translational components */
        if (pose != null) {
            //alternate projection!
            //get vuforia's real position matrix
            Matrix34F goodCodeWritten = writeGoodCode.getRealPose();

            //reset imagePoints
            final float[][] vufPoints = new float[point.length][2];

            //use vuforias projectPoints method to project all those box points
            for (int i = 0; i < point.length; i++) {
                //project
                vufPoints[i] = Tool.projectPoint(camCal, goodCodeWritten, point[i]).getData();
                //convert to opencv language

                //telemetry.addData("point", "num: %d, x: %f.2, y: %f.2", i, vufPoints[i][0], vufPoints[i][1]);
            }//from   w w  w.  java2  s.c o m

            //telemetry.addData("Camera Size", "w: %f.2, h: %f.2", camCal.getSize().getData()[0], camCal.getSize().getData()[1]);

            //get frame from vuforia
            try {
                VuforiaLocalizer.CloseableFrame frame = ray.take();

                int img = 0;
                for (; img < frame.getNumImages(); img++) {
                    //Log.i("OPENCV", "Image " + img + " " + frame.getImage(img).getWidth());
                    if (frame.getImage(img).getWidth() == size[0])
                        break;
                }

                final Mat out;
                if (img < frame.getNumImages())
                    out = getMatFromImage(frame.getImage(img));
                else
                    throw new IllegalArgumentException("No frame with matching width!");

                frame.close();

                //convert points, halfing distances b/c vuforia does that internally so we gotta fix it
                for (int i = 0; i < vufPoints.length; i++)
                    imagePoints[i] = new Point((int) vufPoints[i][0] / 2, (int) vufPoints[i][1] / 2);

                Scalar color = new Scalar(0, 255, 0);
                for (int i = 0; i < 2; i++)
                    for (int o = 0; o < 4; o++)
                        Imgproc.line(out, imagePoints[o == 0 ? 3 + i * 4 : i * 4 + o - 1],
                                imagePoints[i * 4 + o], color);

                //connect the rectangles
                for (int i = 0; i < 4; i++)
                    Imgproc.line(out, imagePoints[i], imagePoints[i + 4], color);

                for (int i = 8; i < imagePoints.length; i++)
                    Imgproc.drawMarker(out, imagePoints[i], color);

                //flip it for display
                Core.flip(out, out, -1);

                matQueue.add(out);

                //display!
                mView.getHandler().post(new Runnable() {
                    @Override
                    public void run() {
                        try {
                            Mat frame = matQueue.take();
                            //convert to bitmap
                            Utils.matToBitmap(frame, bm);
                            frame.release();
                            mView.invalidate();
                        } catch (InterruptedException e) {
                            //huh
                        }
                    }
                });
            } catch (Exception e) {
                Log.e("OPENCV", e.getLocalizedMessage());
            }
        }
    } else {
        telemetry.addData("VuMark", "not visible");
    }
}

From source file:org.firstinspires.ftc.teamcode.opmodes.demo.VumarkOpenCV.java

private static void drawSquare(Mat src, int[] ballPoint, int ballDist) {
    //find average left and right ball square
    //find the average color for all the pixels in that square
    if (ballPoint[0] >= 0 && ballPoint[1] >= 0 && ballPoint[0] + ballDist < src.cols()
            && ballPoint[1] + ballDist < src.rows()) {
        double total[] = new double[3];
        for (int x = 0; x < ballDist; x++)
            for (int y = 0; y < ballDist; y++) {
                double[] pixel = src.get(y + ballPoint[1], x + ballPoint[0]);
                total[0] += pixel[0];/*from  www .  java 2  s.c  om*/
                total[1] += pixel[1];
                total[2] += pixel[2];
            }

        //make average color
        Scalar color = new Scalar(total[0] / (ballDist * ballDist), total[1] / (ballDist * ballDist),
                total[2] / (ballDist * ballDist));

        Imgproc.rectangle(src, new Point(ballPoint[0], ballPoint[1]),
                new Point(ballPoint[0] + ballDist, ballPoint[1] + ballDist), color, -1);
    }
}

From source file:org.firstinspires.ftc.teamcode.vision.VisionLib.java

public double getCenterVortexWidth() {
    Mat matIn = getCameraMat();/*w w w  .j  a v  a  2  s  . c o m*/
    if (matIn != null) {
        Log.d(TAG, "mat null");
        Imgproc.cvtColor(matIn, matIn, Imgproc.COLOR_RGB2HSV);

        Mat matMasked = new Mat(matIn.rows(), matIn.cols(), CvType.CV_8UC1);
        Scalar vortexLowerThresh = new Scalar(37, 46, 34);
        Scalar vortexUpperThresh = new Scalar(163, 255, 255);

        Core.inRange(matIn, BLUE_LOWER_THRESH, BLUE_UPPER_THRESH, matMasked);

        //find largest contour (the part of the beacon we are interested in
        ArrayList<MatOfPoint> contours = new ArrayList<MatOfPoint>();
        Mat hierarchy = new Mat();
        Mat contourMat = matMasked.clone();
        Imgproc.findContours(contourMat, contours, hierarchy, Imgproc.RETR_EXTERNAL,
                Imgproc.CHAIN_APPROX_SIMPLE);

        if (contours.size() > 1) {
            int largestContourIndex = 0;
            double lastContourArea = 0;
            for (int i = 0; i < contours.size(); i++) {
                double contourArea = Imgproc.contourArea(contours.get(i));
                if (contourArea > lastContourArea) {
                    largestContourIndex = i;
                    lastContourArea = contourArea;
                }
            }
            //get bounding rect
            Rect boundingRect = Imgproc
                    .boundingRect(new MatOfPoint(contours.get(largestContourIndex).toArray()));
            Core.rectangle(matIn, new Point(boundingRect.x, boundingRect.y),
                    new Point(boundingRect.x + boundingRect.width, boundingRect.y + boundingRect.height),
                    OPEN_CV_GREEN);

            saveMatToDisk(matIn);//debug only

            return boundingRect.width;
        }
    }
    return -1;
}