Example usage for org.opencv.core Scalar Scalar

List of usage examples for org.opencv.core Scalar Scalar

Introduction

In this page you can find the example usage for org.opencv.core Scalar Scalar.

Prototype

public Scalar(double v0, double v1, double v2) 

Source Link

Usage

From source file:org.firstinspires.ftc.teamcode.opmodes.demo.VumarkHiJackVideo.java

@Override
public void init_loop() {
    RelicRecoveryVuMark vuMark = RelicRecoveryVuMark.from(relicTemplate);
    if (vuMark != RelicRecoveryVuMark.UNKNOWN) {

        /* Found an instance of the template. In the actual game, you will probably
         * loop until this condition occurs, then move on to act accordingly depending
         * on which VuMark was visible. */
        telemetry.addData("VuMark", "%s visible", vuMark);

        /* For fun, we also exhibit the navigational pose. In the Relic Recovery game,
         * it is perhaps unlikely that you will actually need to act on this pose information, but
         * we illustrate it nevertheless, for completeness. */
        //qualcomm is stupid and didn't let me see the real vuforia matrix, so I shimmed it
        //heres where is gets good
        VuforiaDefaultListenerShim writeGoodCode = (VuforiaDefaultListenerShim) relicTemplate.getListener();

        OpenGLMatrix pose = writeGoodCode.getRawPose();
        telemetry.addData("Pose", format(pose));

        /* We further illustrate how to decompose the pose into useful rotational and
         * translational components */
        if (pose != null) {
            //alternate projection!
            //get vuforia's real position matrix
            Matrix34F goodCodeWritten = writeGoodCode.getRealPose();

            //reset imagePoints
            final float[][] vufPoints = new float[point.length][2];

            //use vuforias projectPoints method to project all those box points
            for (int i = 0; i < point.length; i++) {
                //project
                vufPoints[i] = Tool.projectPoint(camCal, goodCodeWritten, point[i]).getData();
                //convert to opencv language

                //telemetry.addData("point", "num: %d, x: %f.2, y: %f.2", i, vufPoints[i][0], vufPoints[i][1]);
            }/*from  w  ww .  j  av  a 2 s  . com*/

            //telemetry.addData("Camera Size", "w: %f.2, h: %f.2", camCal.getSize().getData()[0], camCal.getSize().getData()[1]);

            //get frame from vuforia
            try {
                VuforiaLocalizer.CloseableFrame frame = ray.take();

                int img = 0;
                for (; img < frame.getNumImages(); img++) {
                    //telemetry.addData("Image format " + img, frame.getImage(img).getFormat());
                    if (frame.getImage(img).getFormat() == PIXEL_FORMAT.RGB888)
                        break;
                }

                Image mImage = frame.getImage(img);

                //stick it in a Mat for "display purposes"
                Mat out = new Mat(mImage.getHeight(), mImage.getWidth(), CV_8UC3);

                java.nio.ByteBuffer color = mImage.getPixels();
                byte[] ray = new byte[color.limit()];
                color.rewind();
                color.get(ray);
                out.put(0, 0, ray);

                frame.close();

                //convert points, halfing distances b/c vuforia does that internally so we gotta fix it
                for (int i = 0; i < vufPoints.length; i++)
                    imagePoints[i] = new Point((int) vufPoints[i][0], (int) vufPoints[i][1]);

                Scalar green = new Scalar(0, 255, 0);
                for (int i = 0; i < 2; i++)
                    for (int o = 0; o < 4; o++)
                        Imgproc.line(out, imagePoints[o == 0 ? 3 + i * 4 : i * 4 + o - 1],
                                imagePoints[i * 4 + o], green);

                //connect the rectangles
                for (int i = 0; i < 4; i++)
                    Imgproc.line(out, imagePoints[i], imagePoints[i + 4], green);

                for (int i = 8; i < imagePoints.length; i++)
                    Imgproc.drawMarker(out, imagePoints[i], green);

                //calculate points from projection
                //find the midpoint between the two points
                int leftXPoint = (int) ((imagePoints[8].x + imagePoints[9].x) / 2.0);
                int leftYPoint = (int) ((imagePoints[8].y + imagePoints[9].y) / 2.0);
                //find the y distande between the two
                leftDist = (int) (Math.abs(imagePoints[8].y - imagePoints[9].y) / 2.0);
                leftBall = new int[] { leftXPoint - (leftDist / 2), leftYPoint - (leftDist / 2) };

                //find the midpoint between the two points
                int rightXPoint = (int) ((imagePoints[10].x + imagePoints[11].x) / 2.0);
                int rightYPoint = (int) ((imagePoints[10].y + imagePoints[11].y) / 2.0);
                //find the y distande between the two
                rightDist = (int) (Math.abs(imagePoints[10].y - imagePoints[11].y) / 2.0);
                rightBall = new int[] { rightXPoint - (rightDist / 2), rightYPoint - (rightDist / 2) };

                //operation: subsquare
                //take a square mat we are 100% sure will have a ball in it
                //sum it up and find the average color

                leftColor = drawSquare(out, leftBall, leftDist);
                rightColor = drawSquare(out, rightBall, rightDist);

                //flip it for display
                Core.flip(out, out, -1);

                matQueue.add(out);

                //display!
                mView.getHandler().post(new Runnable() {
                    @Override
                    public void run() {
                        try {
                            Mat frame = matQueue.take();
                            //convert to bitmap
                            Utils.matToBitmap(frame, bm);
                            frame.release();
                            mView.invalidate();
                        } catch (InterruptedException e) {
                            //huh
                        }
                    }
                });
            } catch (Exception e) {
                Log.e("OPENCV", e.getLocalizedMessage());
            }
        }
    } else {
        telemetry.addData("VuMark", "not visible");
    }
}

From source file:org.firstinspires.ftc.teamcode.opmodes.demo.VumarkHiJackVideo.java

private static Scalar drawSquare(Mat src, int[] ballPoint, int ballDist) {
    //find average left and right ball square
    //find the average color for all the pixels in that square
    if (ballPoint[0] >= 0 && ballPoint[1] >= 0 && ballPoint[0] + ballDist < src.cols()
            && ballPoint[1] + ballDist < src.rows()) {
        double total[] = new double[3];
        for (int x = 0; x < ballDist; x++)
            for (int y = 0; y < ballDist; y++) {
                double[] pixel = src.get(y + ballPoint[1], x + ballPoint[0]);
                total[0] += pixel[0];//  www .j a v  a2  s.c o  m
                total[1] += pixel[1];
                total[2] += pixel[2];
            }

        //make average color
        Scalar color = new Scalar(total[0] / (ballDist * ballDist), total[1] / (ballDist * ballDist),
                total[2] / (ballDist * ballDist));

        Imgproc.rectangle(src, new Point(ballPoint[0], ballPoint[1]),
                new Point(ballPoint[0] + ballDist, ballPoint[1] + ballDist), color, -1);

        return color;
    } else
        return null;
}

From source file:org.firstinspires.ftc.teamcode.opmodes.demo.VumarkOpenCV.java

@Override
public void init_loop() {
    RelicRecoveryVuMark vuMark = RelicRecoveryVuMark.from(relicTemplate);
    if (vuMark != RelicRecoveryVuMark.UNKNOWN) {

        /* Found an instance of the template. In the actual game, you will probably
         * loop until this condition occurs, then move on to act accordingly depending
         * on which VuMark was visible. */
        telemetry.addData("VuMark", "%s visible", vuMark);

        /* For fun, we also exhibit the navigational pose. In the Relic Recovery game,
         * it is perhaps unlikely that you will actually need to act on this pose information, but
         * we illustrate it nevertheless, for completeness. */
        //qualcomm is stupid and didn't let me see the real vuforia matrix, so I shimmed it
        //heres where is gets good
        VuforiaDefaultListenerShim writeGoodCode = (VuforiaDefaultListenerShim) relicTemplate.getListener();

        OpenGLMatrix pose = writeGoodCode.getRawPose();
        telemetry.addData("Pose", format(pose));

        /* We further illustrate how to decompose the pose into useful rotational and
         * translational components */
        if (pose != null) {
            //alternate projection!
            //get vuforia's real position matrix
            Matrix34F goodCodeWritten = writeGoodCode.getRealPose();

            //reset imagePoints
            final float[][] vufPoints = new float[point.length][2];

            //use vuforias projectPoints method to project all those box points
            for (int i = 0; i < point.length; i++) {
                //project
                vufPoints[i] = Tool.projectPoint(camCal, goodCodeWritten, point[i]).getData();
                //convert to opencv language

                //telemetry.addData("point", "num: %d, x: %f.2, y: %f.2", i, vufPoints[i][0], vufPoints[i][1]);
            }/*w  w  w  .  j  a va  2  s . c  o  m*/

            //telemetry.addData("Camera Size", "w: %f.2, h: %f.2", camCal.getSize().getData()[0], camCal.getSize().getData()[1]);

            //get frame from vuforia
            try {
                VuforiaLocalizer.CloseableFrame frame = ray.take();

                int img = 0;
                for (; img < frame.getNumImages(); img++) {
                    //Log.i("OPENCV", "Image " + img + " " + frame.getImage(img).getWidth());
                    if (frame.getImage(img).getWidth() == size[0])
                        break;
                }

                final Mat out;
                if (img < frame.getNumImages())
                    out = getMatFromImage(frame.getImage(img));
                else
                    throw new IllegalArgumentException("No frame with matching width!");

                frame.close();

                //convert points, halfing distances b/c vuforia does that internally so we gotta fix it
                for (int i = 0; i < vufPoints.length; i++)
                    imagePoints[i] = new Point((int) vufPoints[i][0] / 2, (int) vufPoints[i][1] / 2);

                Scalar color = new Scalar(0, 255, 0);
                for (int i = 0; i < 2; i++)
                    for (int o = 0; o < 4; o++)
                        Imgproc.line(out, imagePoints[o == 0 ? 3 + i * 4 : i * 4 + o - 1],
                                imagePoints[i * 4 + o], color);

                //connect the rectangles
                for (int i = 0; i < 4; i++)
                    Imgproc.line(out, imagePoints[i], imagePoints[i + 4], color);

                for (int i = 8; i < imagePoints.length; i++)
                    Imgproc.drawMarker(out, imagePoints[i], color);

                //flip it for display
                Core.flip(out, out, -1);

                matQueue.add(out);

                //display!
                mView.getHandler().post(new Runnable() {
                    @Override
                    public void run() {
                        try {
                            Mat frame = matQueue.take();
                            //convert to bitmap
                            Utils.matToBitmap(frame, bm);
                            frame.release();
                            mView.invalidate();
                        } catch (InterruptedException e) {
                            //huh
                        }
                    }
                });
            } catch (Exception e) {
                Log.e("OPENCV", e.getLocalizedMessage());
            }
        }
    } else {
        telemetry.addData("VuMark", "not visible");
    }
}

From source file:org.firstinspires.ftc.teamcode.opmodes.demo.VumarkOpenCV.java

@Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame frame) {
    Mat currentFrame = frame.rgba();/*from   w w w.  j a  v a2s. c om*/

    //if we haven't figured out how to scale the size, do that
    if (xOffset == 0 || yOffset == 0) {
        //find the offset to the sqaure of pixels vuforia looks at
        xOffset = (int) ((currentFrame.cols() - size[0]) / 2.0);
        yOffset = (int) ((currentFrame.rows() - size[1]) / 2.0);

        //add the offset to all points calculated
        for (Point point : imagePoints) {
            point.x += xOffset;
            point.y += yOffset;
        }
        leftBall[0] += xOffset;
        leftBall[1] += yOffset;
        rightBall[0] += xOffset;
        rightBall[1] += yOffset;
    }

    //operation: subsquare
    //take a square mat we are 100% sure will have a ball in it
    //sum it up and find the average color

    drawSquare(currentFrame, leftBall, leftDist);
    drawSquare(currentFrame, rightBall, rightDist);

    Scalar color = new Scalar(0, 255, 0);

    for (int i = 0; i < 2; i++)
        for (int o = 0; o < 4; o++)
            Imgproc.line(currentFrame, imagePoints[o == 0 ? 3 + i * 4 : i * 4 + o - 1], imagePoints[i * 4 + o],
                    color);

    //connect the rectangles
    for (int i = 0; i < 4; i++)
        Imgproc.line(currentFrame, imagePoints[i], imagePoints[i + 4], color);

    //flip it for display
    Core.flip(currentFrame, currentFrame, -1);

    return currentFrame;
}

From source file:org.firstinspires.ftc.teamcode.opmodes.demo.VumarkOpenCV.java

private static void drawSquare(Mat src, int[] ballPoint, int ballDist) {
    //find average left and right ball square
    //find the average color for all the pixels in that square
    if (ballPoint[0] >= 0 && ballPoint[1] >= 0 && ballPoint[0] + ballDist < src.cols()
            && ballPoint[1] + ballDist < src.rows()) {
        double total[] = new double[3];
        for (int x = 0; x < ballDist; x++)
            for (int y = 0; y < ballDist; y++) {
                double[] pixel = src.get(y + ballPoint[1], x + ballPoint[0]);
                total[0] += pixel[0];/*from   w  ww. ja va  2  s . c o m*/
                total[1] += pixel[1];
                total[2] += pixel[2];
            }

        //make average color
        Scalar color = new Scalar(total[0] / (ballDist * ballDist), total[1] / (ballDist * ballDist),
                total[2] / (ballDist * ballDist));

        Imgproc.rectangle(src, new Point(ballPoint[0], ballPoint[1]),
                new Point(ballPoint[0] + ballDist, ballPoint[1] + ballDist), color, -1);
    }
}

From source file:org.firstinspires.ftc.teamcode.vision.VisionLib.java

public double getCenterVortexWidth() {
    Mat matIn = getCameraMat();// w  ww.jav a  2  s . co m
    if (matIn != null) {
        Log.d(TAG, "mat null");
        Imgproc.cvtColor(matIn, matIn, Imgproc.COLOR_RGB2HSV);

        Mat matMasked = new Mat(matIn.rows(), matIn.cols(), CvType.CV_8UC1);
        Scalar vortexLowerThresh = new Scalar(37, 46, 34);
        Scalar vortexUpperThresh = new Scalar(163, 255, 255);

        Core.inRange(matIn, BLUE_LOWER_THRESH, BLUE_UPPER_THRESH, matMasked);

        //find largest contour (the part of the beacon we are interested in
        ArrayList<MatOfPoint> contours = new ArrayList<MatOfPoint>();
        Mat hierarchy = new Mat();
        Mat contourMat = matMasked.clone();
        Imgproc.findContours(contourMat, contours, hierarchy, Imgproc.RETR_EXTERNAL,
                Imgproc.CHAIN_APPROX_SIMPLE);

        if (contours.size() > 1) {
            int largestContourIndex = 0;
            double lastContourArea = 0;
            for (int i = 0; i < contours.size(); i++) {
                double contourArea = Imgproc.contourArea(contours.get(i));
                if (contourArea > lastContourArea) {
                    largestContourIndex = i;
                    lastContourArea = contourArea;
                }
            }
            //get bounding rect
            Rect boundingRect = Imgproc
                    .boundingRect(new MatOfPoint(contours.get(largestContourIndex).toArray()));
            Core.rectangle(matIn, new Point(boundingRect.x, boundingRect.y),
                    new Point(boundingRect.x + boundingRect.width, boundingRect.y + boundingRect.height),
                    OPEN_CV_GREEN);

            saveMatToDisk(matIn);//debug only

            return boundingRect.width;
        }
    }
    return -1;
}

From source file:org.firstinspires.ftc.teamcode.VuforiaColor.java

public void runOpMode() throws InterruptedException {
    //        frontRightMotor = hardwareMap.dcMotor.get("frontRightMotor");
    //        backRightMotor = hardwareMap.dcMotor.get("backRightMotor");
    //        frontLeftMotor = hardwareMap.dcMotor.get("frontLeftMotor");
    //        backLeftMotor = hardwareMap.dcMotor.get("backLeftMotor");
    //        rollerMotor = hardwareMap.dcMotor.get("rollerMotor");
    ///* w ww.  ja  v a  2s  .co  m*/
    //        backRightMotor.setDirection(DcMotor.Direction.REVERSE);
    //        backLeftMotor.setDirection(DcMotor.Direction.REVERSE);

    colorDetector = new ColorBlobDetector();

    VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(R.id.cameraMonitorViewId);
    parameters.vuforiaLicenseKey = "ATjJBiD/////AAAAGc0JoedLjk5flVb2gExO3UVJCpOq5U4cyH9czcMyX5C8h+1AWXo7A0CU24r/IVeoC+7Te9zwJkX6IjHv5c77UNqrsyerM7pbjywj6/2NlzSUwb3jtEd9APhY5cOoSibb5NDRFM9beUWt0k4HuFMaw5OIZRs5YWge7KaJt5SzhqEFMQ6Loo8eugB9BBbPfuV3d7u4sQZBAKeRsR9mmnfvFJTUHHgcPlALU/rJBgw40AeFFvChjzNhwtlWYymeM/0173jH7JB2dyhoNtn/9byIUQzMw8KtaXbD3IfFJySLgJWmYjaA7cKdboL0nvkOoZNFMm2yqenbUDe/CEIMkhAsKjS3sgX4t6Fq+8gkhSnOS/Vd";
    parameters.cameraDirection = VuforiaLocalizer.CameraDirection.FRONT;
    this.vuforia = ClassFactory.createVuforiaLocalizer(parameters);
    Vuforia.setFrameFormat(PIXEL_FORMAT.RGB565, true); //enables RGB565 format for the image
    vuforia.setFrameQueueCapacity(1); //tells VuforiaLocalizer to only store one frame at a time
    piController = new PIController(.0016, 0.00013, 0.00023, 0.000012);

    Vuforia.setHint(HINT.HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 1);
    VuforiaTrackables visionTargets = vuforia.loadTrackablesFromAsset("FTC_2016-17");
    VuforiaTrackable wheelsTarget = visionTargets.get(0);
    wheelsTarget.setName("Wheels"); // Wheels

    VuforiaTrackable toolsTarget = visionTargets.get(1);
    toolsTarget.setName("Tools"); // Tools

    VuforiaTrackable legosTarget = visionTargets.get(2);
    legosTarget.setName("Legos"); // Legos

    VuforiaTrackable gearsTarget = visionTargets.get(3);
    gearsTarget.setName("Gears"); // Gears

    /** For convenience, gather together all the trackable objects in one easily-iterable collection */
    List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
    allTrackables.addAll(visionTargets);

    /**
     * We use units of mm here because that's the recommended units of measurement for the
     * size values specified in the XML for the ImageTarget trackables in data sets. E.g.:
     *      <ImageTarget name="stones" size="247 173"/>
     * You don't *have to* use mm here, but the units here and the units used in the XML
     * target configuration files *must* correspond for the math to work out correctly.
     */
    float mmPerInch = 25.4f;
    float mmBotLength = 16 * mmPerInch;
    float mmBotWidth = 18 * mmPerInch; // ... or whatever is right for your robot
    float mmFTCFieldWidth = (12 * 12 - 2) * mmPerInch; // the FTC field is ~11'10" center-to-center of the glass panels
    float mmVisionTargetZOffset = 5.75f * mmPerInch;
    float mmPhoneZOffset = 5.5f * mmPerInch;

    OpenGLMatrix gearsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-mmFTCFieldWidth / 2, -mmFTCFieldWidth / 12, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    gearsTarget.setLocation(gearsTargetLocationOnField);
    RobotLog.ii(TAG, "Gears Target=%s", format(gearsTargetLocationOnField));

    OpenGLMatrix toolsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-mmFTCFieldWidth / 2, mmFTCFieldWidth / 4, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    toolsTarget.setLocation(toolsTargetLocationOnField);
    RobotLog.ii(TAG, "Tools Target=%s", format(toolsTargetLocationOnField));

    /*
     * To place the Wheels and Legos Targets on the Blue Audience wall:
     * - First we rotate it 90 around the field's X axis to flip it upright
     * - Finally, we translate it along the Y axis towards the blue audience wall.
     */
    OpenGLMatrix wheelsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(mmFTCFieldWidth / 12, mmFTCFieldWidth / 2, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    wheelsTarget.setLocation(wheelsTargetLocationOnField);
    RobotLog.ii(TAG, "Wheels Target=%s", format(wheelsTargetLocationOnField));

    OpenGLMatrix legosTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(-mmFTCFieldWidth / 4, mmFTCFieldWidth / 2, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    legosTarget.setLocation(legosTargetLocationOnField);
    RobotLog.ii(TAG, "Legos Target=%s", format(legosTargetLocationOnField));

    /**
     * Create a transformation matrix describing where the phone is on the robot. Here, we
     * put the phone on the right hand side of the robot with the screen facing in (see our
     * choice of BACK camera above) and in landscape mode. Starting from alignment between the
     * robot's and phone's axes, this is a rotation of -90deg along the Y axis.
     *
     * When determining whether a rotation is positive or negative, consider yourself as looking
     * down the (positive) axis of rotation from the positive towards the origin. Positive rotations
     * are then CCW, and negative rotations CW. An example: consider looking down the positive Z
     * axis towards the origin. A positive rotation about Z (ie: a rotation parallel to the the X-Y
     * plane) is then CCW, as one would normally expect from the usual classic 2D geometry.
     */
    OpenGLMatrix phoneLocationOnRobot = OpenGLMatrix.translation(mmBotWidth / 2, 0, mmPhoneZOffset)
            .multiplied(Orientation.getRotationMatrix(AxesReference.EXTRINSIC, AxesOrder.YZY, AngleUnit.DEGREES,
                    0, 180, 0));
    RobotLog.ii(TAG, "phone=%s", format(phoneLocationOnRobot));

    ((VuforiaTrackableDefaultListener) visionTargets.get(0).getListener())
            .setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) visionTargets.get(1).getListener())
            .setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) visionTargets.get(2).getListener())
            .setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) visionTargets.get(3).getListener())
            .setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection);

    /** Wait for the game to begin */
    telemetry.addData(">", "Press Play to start tracking");
    telemetry.addData("OpenCV", Core.NATIVE_LIBRARY_NAME);
    telemetry.update();
    waitForStart();

    /** Start tracking the data sets we care about. */
    visionTargets.activate();

    hitRed = true;
    isButtonHit = false;
    directionFoundInARow = 0;
    directionToHit = "";

    telemetry.addData("Loop", "Out");
    telemetry.update();
    while (opModeIsActive()) {
        String visibleTarget = "";
        Mat img = null;
        Mat croppedImg = null;
        Point beaconImageCenter = null;

        VuforiaLocalizer.CloseableFrame frame = vuforia.getFrameQueue().take();
        telemetry.update();
        if (frame != null) {
            Image rgb = null;
            long numImages = frame.getNumImages();

            for (int i = 0; i < numImages; i++) {
                if (frame.getImage(i).getFormat() == PIXEL_FORMAT.RGB565) {
                    rgb = frame.getImage(i);
                    break;
                } //if
            } //for

            if (rgb != null) {
                Bitmap bmp = Bitmap.createBitmap(rgb.getWidth(), rgb.getHeight(), Bitmap.Config.RGB_565);
                bmp.copyPixelsFromBuffer(rgb.getPixels());

                img = new Mat();
                Utils.bitmapToMat(bmp, img);
                telemetry.addData("Img", "Converted");
                telemetry.update();
            }
        }

        for (VuforiaTrackable beacon : allTrackables) {
            // Add beacon to telemetry if visible
            if (((VuforiaTrackableDefaultListener) beacon.getListener()).isVisible()) {
                visibleTarget = beacon.getName();
                telemetry.addData(visibleTarget, "Visible");
            }

            OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) beacon.getListener())
                    .getUpdatedRobotLocation();
            if (robotLocationTransform != null) {
                lastLocation = robotLocationTransform;
            }

            OpenGLMatrix pose = ((VuforiaTrackableDefaultListener) beacon.getListener()).getRawPose();

            if (pose != null) {
                Matrix34F rawPose = new Matrix34F();
                float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12);
                rawPose.setData(poseData);

                // Corners of beacon image in camera image
                Vec2F upperLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(-127, 92, 0));
                Vec2F upperRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(127, 92, 0));
                Vec2F lowerLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(-127, -92, 0));
                Vec2F lowerRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(127, -92, 0));

                VectorF translation = pose.getTranslation();
                /** First argument is get(1) if phone is vertical
                 First argument is get(0) if phone is horizontal */

                if (img != null && !isButtonHit) {
                    telemetry.addData(beacon.getName() + "-Translation", translation);

                    // Vectors are stored (y,x).  Coordinate system starts in top right
                    int height = (int) (upperLeft.getData()[0] - lowerLeft.getData()[0]);
                    int width = (int) (upperRight.getData()[1] - upperLeft.getData()[1]);

                    int rowStart = (int) upperRight.getData()[0] - height < 0 ? 1
                            : (int) upperRight.getData()[0] - height;
                    int rowEnd = rowStart + height > img.rows() ? img.rows() - 1 : rowStart + height;
                    int colStart = (int) upperRight.getData()[1] < 0 ? 1 : (int) upperRight.getData()[1];
                    int colEnd = colStart + width > img.cols() ? img.cols() - 1 : colStart + width;

                    telemetry.addData("Target Location", "");
                    telemetry.addData("[" + upperLeft.getData()[0] + "," + upperLeft.getData()[1] + "]",
                            "[" + upperRight.getData()[0] + "," + upperRight.getData()[1] + "]");
                    telemetry.addData("[" + lowerLeft.getData()[0] + "," + lowerLeft.getData()[1] + "]",
                            "[" + lowerRight.getData()[0] + "," + lowerRight.getData()[1] + "]");

                    telemetry.addData(colStart + "", rowStart);
                    telemetry.addData(colEnd + "", rowEnd);
                    telemetry.addData(img.rows() + "", img.cols());
                    telemetry.update();

                    // Crop the image to look only at the beacon
                    // TODO Verify beacon is in cropped image
                    // NEED TO CHECK BEACON HEIGHT FOR INCLUSION IN CROPPED IMAGE
                    croppedImg = img.submat(rowStart, rowEnd, colStart, colEnd);
                }
            }
        }

        // Process the rgb image
        if (croppedImg != null && !isButtonHit) {
            // Find the color of the beacon you need to hit
            if (hitRed) {
                colorDetector.setHsvColor(new Scalar(230, 75, 255)); // Red detector, needs verification with beacon
            } else {
                colorDetector.setHsvColor(new Scalar(130, 150, 255)); // Blue detector, needs verification with beacon
            }
            colorDetector.process(croppedImg);

            // Calculate the center of the blob detected
            Point beaconToHitCenter = null;
            List<Moments> blueMu = new ArrayList<>(colorDetector.getContours().size());
            for (int i = 0; i < colorDetector.getContours().size(); i++) {
                blueMu.add(Imgproc.moments(colorDetector.getContours().get(i), false));
                Moments p = blueMu.get(i);
                int x = (int) (p.get_m10() / p.get_m00());
                int y = (int) (p.get_m01() / p.get_m00());
                beaconToHitCenter = new Point(x, y);
            }

            // Find the color of the beacon you are not hitting
            if (hitRed) {
                colorDetector.setHsvColor(new Scalar(130, 150, 255)); // Blue detector, needs verification with beacon
            } else {
                colorDetector.setHsvColor(new Scalar(230, 75, 255)); // Red detector, needs verification with beacon
            }
            colorDetector.process(croppedImg);

            // Calculate the center of the blob detected
            Point secondReferenceCenter = null;
            List<Moments> redMu = new ArrayList<>(colorDetector.getContours().size());
            for (int i = 0; i < colorDetector.getContours().size(); i++) {
                redMu.add(Imgproc.moments(colorDetector.getContours().get(i), false));
                Moments p = redMu.get(i);
                int x = (int) (p.get_m10() / p.get_m00());
                int y = (int) (p.get_m01() / p.get_m00());
                secondReferenceCenter = new Point(x, y);
            }

            // Use the two centers of the blobs to determine which direction to hit
            if (beaconToHitCenter != null && secondReferenceCenter != null && !isButtonHit) {
                // (!isButtonHit) Only hit the button once
                // (!needToTurn) Do not hit the button if the robot is not straight centered
                //                    hitBeaconButton(isLeft(center, beaconImageCenter));
                if (isLeft(beaconToHitCenter, secondReferenceCenter)) {
                    if (!directionToHit.equals("Left")) {
                        directionFoundInARow = 0;
                    }
                    directionFoundInARow++;
                    directionToHit = "Left";
                } else {
                    if (!directionToHit.equals("Right")) {
                        directionFoundInARow = 0;
                    }
                    directionFoundInARow++;
                    directionToHit = "Right";
                }
            }

            // Find the color five times in a row before hitting it
            if (directionFoundInARow >= 3) {
                isButtonHit = true;
            }
        }

        if (isButtonHit) {
            telemetry.addData("Hit Button-", directionToHit);
        }

        //            if(needToTurn) {
        //                turn(degreesToTurn);
        //                telemetry.addData("Turn-", degreesToTurn);
        //            }

        /**
         * Provide feedback as to where the robot was last located (if we know).
         */
        if (lastLocation != null) {
            //  RobotLog.vv(TAG, "robot=%s", format(lastLocation));
            telemetry.addData("Pos", myFormat(lastLocation));

            if (!visibleTarget.equals("")) {
                telemetry.addData("Move", piController.processLocation(lastLocation, visibleTarget));
            }
        } else {
            telemetry.addData("Pos", "Unknown");
        }

        telemetry.update();
        idle();
    }
}

From source file:org.firstinspires.ftc.teamcode.VuforiaMovement.java

public void runOpMode() throws InterruptedException {
    frontRightMotor = hardwareMap.dcMotor.get("frontRightMotor");
    backRightMotor = hardwareMap.dcMotor.get("backRightMotor");
    frontLeftMotor = hardwareMap.dcMotor.get("frontLeftMotor");
    backLeftMotor = hardwareMap.dcMotor.get("backLeftMotor");
    rollerMotor = hardwareMap.dcMotor.get("rollerMotor");

    backRightMotor.setDirection(DcMotor.Direction.REVERSE);
    backLeftMotor.setDirection(DcMotor.Direction.REVERSE);

    colorDetector = new ColorBlobDetector();

    VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(R.id.cameraMonitorViewId);
    parameters.vuforiaLicenseKey = "ATjJBiD/////AAAAGc0JoedLjk5flVb2gExO3UVJCpOq5U4cyH9czcMyX5C8h+1AWXo7A0CU24r/IVeoC+7Te9zwJkX6IjHv5c77UNqrsyerM7pbjywj6/2NlzSUwb3jtEd9APhY5cOoSibb5NDRFM9beUWt0k4HuFMaw5OIZRs5YWge7KaJt5SzhqEFMQ6Loo8eugB9BBbPfuV3d7u4sQZBAKeRsR9mmnfvFJTUHHgcPlALU/rJBgw40AeFFvChjzNhwtlWYymeM/0173jH7JB2dyhoNtn/9byIUQzMw8KtaXbD3IfFJySLgJWmYjaA7cKdboL0nvkOoZNFMm2yqenbUDe/CEIMkhAsKjS3sgX4t6Fq+8gkhSnOS/Vd";
    parameters.cameraDirection = VuforiaLocalizer.CameraDirection.BACK;
    this.vuforia = new VuforiaLocalizerImplSubclass(parameters);

    Vuforia.setHint(HINT.HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 4);
    VuforiaTrackables beacons = vuforia.loadTrackablesFromAsset("FTC_2016-17");
    beacons.get(0).setName("Wheels");
    beacons.get(1).setName("Tools");
    beacons.get(2).setName("Legos");
    beacons.get(3).setName("Gears");

    float mmPerInch = 25.4f;
    float mmBotWidth = 18 * mmPerInch; // ... or whatever is right for your robot
    float mmFTCFieldWidth = (12 * 12 - 2) * mmPerInch; // the FTC field is ~11'10" center-to-center of the glass panels
    float mmVisionTargetZOffset = 5.75f * mmPerInch;

    // Initialize the location of the targets and phone on the field
    OpenGLMatrix wheelsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(mmFTCFieldWidth / 12, mmFTCFieldWidth / 2, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    beacons.get(0).setLocation(wheelsTargetLocationOnField);
    RobotLog.ii(TAG, "Wheels Target=%s", format(wheelsTargetLocationOnField));

    OpenGLMatrix toolsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-mmFTCFieldWidth / 2, mmFTCFieldWidth / 4, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    beacons.get(1).setLocation(toolsTargetLocationOnField);
    RobotLog.ii(TAG, "Tools Target=%s", format(toolsTargetLocationOnField));

    OpenGLMatrix legosTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(-mmFTCFieldWidth / 4, mmFTCFieldWidth / 2, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    beacons.get(2).setLocation(legosTargetLocationOnField);
    RobotLog.ii(TAG, "Legos Target=%s", format(legosTargetLocationOnField));

    OpenGLMatrix gearsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-mmFTCFieldWidth / 2, -mmFTCFieldWidth / 12, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    beacons.get(3).setLocation(gearsTargetLocationOnField);
    RobotLog.ii(TAG, "Gears Target=%s", format(gearsTargetLocationOnField));

    OpenGLMatrix phoneLocationOnRobot = OpenGLMatrix.translation(mmBotWidth / 2, 0, 0).multiplied(Orientation
            .getRotationMatrix(AxesReference.EXTRINSIC, AxesOrder.YZY, AngleUnit.DEGREES, -90, 0, 0));
    RobotLog.ii(TAG, "Phone=%s", format(phoneLocationOnRobot));

    ((VuforiaTrackableDefaultListener) beacons.get(0).getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) beacons.get(1).getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) beacons.get(2).getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) beacons.get(3).getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);

    /** Wait for the game to begin */
    telemetry.addData(">", "Press Play to start tracking");
    telemetry.addData("OpenCV", Core.NATIVE_LIBRARY_NAME);
    telemetry.update();/*from  w  ww.ja  v a 2 s .c  om*/
    waitForStart();

    /** Start tracking the data sets we care about. */
    beacons.activate();

    hitRed = true;
    isButtonHit = false;
    directionFoundInARow = 0;
    directionToHit = "";

    movingToFirstBeacon = false;
    liningUpWithFirstBeacon = false;
    movingToSecondBeacon = false;
    liningUpWithSecondBeacon = false;
    while (opModeIsActive()) {
        String visibleTarget = "";
        Mat img = null;
        Mat croppedImg = null;
        Point beaconImageCenter = null;

        if (movingToFirstBeacon) {
            // TODO Estimate distance to the beacon from a point TBD
            // TODO Estimate distance to move forward and turn to face the beacon until second movement set
            // Move this outside the loop?

            // Move forward until you see the beacon
            while (movingToFirstBeacon) {
                // Move in increments to minimize the times you check the trackables
                for (int i = 0; i < 50; i++) {
                    frontRightMotor.setPower(1);
                    backRightMotor.setPower(1);
                    frontLeftMotor.setPower(1);
                    backLeftMotor.setPower(1);
                }

                for (VuforiaTrackable beacon : beacons) {
                    // Add beacon to telemetry if visible
                    if (((VuforiaTrackableDefaultListener) beacon.getListener()).isVisible()) {
                        visibleTarget = beacon.getName();
                        telemetry.addData(visibleTarget, "Visible");
                    }

                    OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) beacon
                            .getListener()).getUpdatedRobotLocation();
                    if (robotLocationTransform != null) {
                        lastLocation = robotLocationTransform;
                    }
                }

                // Move to the beacon until the beacon is in sight
                if (lastLocation != null) {
                    movingToFirstBeacon = false; // Only execute this once
                }
            }
        }

        while (liningUpWithFirstBeacon) {
            for (VuforiaTrackable beacon : beacons) {
                // Add beacon to telemetry if visible
                if (((VuforiaTrackableDefaultListener) beacon.getListener()).isVisible()) {
                    visibleTarget = beacon.getName();
                    telemetry.addData(visibleTarget, "Visible");
                }

                OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) beacon.getListener())
                        .getUpdatedRobotLocation();
                if (robotLocationTransform != null) {
                    lastLocation = robotLocationTransform;
                }
            }

            RobotMovement movement = processLocation(lastLocation, visibleTarget);
            if (movement.isNoMovement()) {
                liningUpWithFirstBeacon = false;
            }

            processMovement(movement);
        }

        if (movingToSecondBeacon) {
            // TODO Estimate the movements/distance from the first beacon to the second
            movingToSecondBeacon = false; // Only execute this once
        }

        if (vuforia.rgb != null && !isButtonHit) {
            Bitmap bmp = Bitmap.createBitmap(vuforia.rgb.getWidth(), vuforia.rgb.getHeight(),
                    Bitmap.Config.RGB_565);
            bmp.copyPixelsFromBuffer(vuforia.rgb.getPixels());

            img = new Mat();
            Utils.bitmapToMat(bmp, img);
        }

        for (VuforiaTrackable beacon : beacons) {
            // Add beacon to telemetry if visible
            if (((VuforiaTrackableDefaultListener) beacon.getListener()).isVisible()) {
                visibleTarget = beacon.getName();
                telemetry.addData(visibleTarget, "Visible");
            }

            OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) beacon.getListener())
                    .getUpdatedRobotLocation();
            if (robotLocationTransform != null) {
                lastLocation = robotLocationTransform;
            }

            OpenGLMatrix pose = ((VuforiaTrackableDefaultListener) beacon.getListener()).getRawPose();

            if (pose != null) {
                Matrix34F rawPose = new Matrix34F();
                float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12);
                rawPose.setData(poseData);

                // Corners of beacon image in camera image
                Vec2F upperLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(-127, 92, 0));
                Vec2F upperRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(127, 92, 0));
                Vec2F lowerLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(-127, -92, 0));
                Vec2F lowerRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(127, -92, 0));

                VectorF translation = pose.getTranslation();
                /** First argument is get(1) if phone is vertical
                 First argument is get(0) if phone is horizontal */
                // DOES NOT WORK???
                degreesToTurn = Math.toDegrees(Math.atan2(translation.get(1), translation.get(2)));
                telemetry.addData("Degrees-", degreesToTurn);
                // TODO Check degreee turning threshold
                if (Math.abs(degreesToTurn) > 15) {
                    // Turn after doing calculating transformations
                    needToTurn = true;
                }

                if (img != null && !isButtonHit) {
                    telemetry.addData(beacon.getName() + "-Translation", translation);
                    telemetry.addData(beacon.getName() + "-Degrees", degreesToTurn);

                    // Vectors are stored (y,x).  Coordinate system starts in top right
                    int height = (int) (lowerLeft.getData()[0] - upperLeft.getData()[0]);
                    int width = (int) (upperLeft.getData()[1] - upperRight.getData()[1]);

                    int rowStart = (int) upperRight.getData()[0] - height < 0 ? 0
                            : (int) upperRight.getData()[0] - height;
                    int rowEnd = rowStart + height > img.rows() ? img.rows() - 1 : rowStart + height;
                    int colStart = (int) upperRight.getData()[1] < 0 ? 0 : (int) upperRight.getData()[1];
                    int colEnd = colStart + width > img.cols() ? img.cols() - 1 : colStart + width;

                    telemetry.addData("Target Location", "");
                    telemetry.addData("[" + upperLeft.getData()[0] + "," + upperLeft.getData()[1] + "]",
                            "[" + upperRight.getData()[0] + "," + upperRight.getData()[1] + "]");
                    telemetry.addData("[" + lowerLeft.getData()[0] + "," + lowerLeft.getData()[1] + "]",
                            "[" + lowerRight.getData()[0] + "," + lowerRight.getData()[1] + "]");

                    telemetry.addData(colStart + "", rowStart);
                    telemetry.addData(colEnd + "", rowEnd);

                    // Crop the image to look only at the beacon
                    // TODO Verify beacon is in cropped image
                    // NEED TO CHECK BEACON HEIGHT FOR INCLUSION IN CROPPED IMAGE
                    croppedImg = img.submat(rowStart, rowEnd, colStart, colEnd);
                }
            }
        }

        // Process the rgb image
        if (croppedImg != null && !isButtonHit) {
            // Find the color of the beacon you need to hit
            if (hitRed) {
                colorDetector.setHsvColor(new Scalar(180, 240, 240)); // Red detector, needs verification with beacon
            } else {
                colorDetector.setHsvColor(new Scalar(25, 255, 185)); // Blue detector, needs verification with beacon
            }
            colorDetector.process(croppedImg);

            // Calculate the center of the blob detected
            Point beaconToHitCenter = null;
            List<Moments> blueMu = new ArrayList<>(colorDetector.getContours().size());
            for (int i = 0; i < colorDetector.getContours().size(); i++) {
                blueMu.add(Imgproc.moments(colorDetector.getContours().get(i), false));
                Moments p = blueMu.get(i);
                int x = (int) (p.get_m10() / p.get_m00());
                int y = (int) (p.get_m01() / p.get_m00());
                beaconToHitCenter = new Point(x, y);
            }

            // Find the color of the beacon you are not hitting
            if (hitRed) {
                colorDetector.setHsvColor(new Scalar(25, 255, 185)); // Blue detector, needs verification with beacon
            } else {
                colorDetector.setHsvColor(new Scalar(180, 240, 240)); // Red detector, needs verification with beacon
            }
            colorDetector.process(croppedImg);

            // Calculate the center of the blob detected
            Point secondReferenceCenter = null;
            List<Moments> redMu = new ArrayList<>(colorDetector.getContours().size());
            for (int i = 0; i < colorDetector.getContours().size(); i++) {
                redMu.add(Imgproc.moments(colorDetector.getContours().get(i), false));
                Moments p = redMu.get(i);
                int x = (int) (p.get_m10() / p.get_m00());
                int y = (int) (p.get_m01() / p.get_m00());
                secondReferenceCenter = new Point(x, y);
            }

            // Use the two centers of the blobs to determine which direction to hit
            if (beaconToHitCenter != null && secondReferenceCenter != null && !isButtonHit && !needToTurn) {
                // (!isButtonHit) Only hit the button once
                // (!needToTurn) Do not hit the button if the robot is not straight centered
                //                    hitBeaconButton(isLeft(center, beaconImageCenter));
                if (isLeft(beaconToHitCenter, secondReferenceCenter)) {
                    if (!directionToHit.equals("Left")) {
                        directionFoundInARow = 0;
                    }
                    directionFoundInARow++;
                    directionToHit = "Left";
                } else {
                    if (!directionToHit.equals("Right")) {
                        directionFoundInARow = 0;
                    }
                    directionFoundInARow++;
                    directionToHit = "Right";
                }
            }

            // Find the color five times in a row before hitting it
            if (directionFoundInARow >= 3) {
                isButtonHit = true;
            }
        }

        if (isButtonHit) {
            telemetry.addData("Hit Button-", directionToHit);
        }

        //            if(needToTurn) {
        //                turn(degreesToTurn);
        //                telemetry.addData("Turn-", degreesToTurn);
        //            }

        /**
         * Provide feedback as to where the robot was last located (if we know).
         */
        if (lastLocation != null) {
            //  RobotLog.vv(TAG, "robot=%s", format(lastLocation));
            telemetry.addData("Pos", myFormat(lastLocation));

            if (!visibleTarget.equals("")) {
                telemetry.addData("Move", processLocation(lastLocation, visibleTarget));
            }
        } else {
            telemetry.addData("Pos", "Unknown");
        }

        telemetry.update();
        idle();
    }
}

From source file:org.lasarobotics.vision.detection.ColorBlobDetector.java

License:Open Source License

/**
 * Get the color center of the detector (average of min and max values)
 *
 * @return Color center as ColorHSV/*from   ww  w. j  a  v  a 2 s.c om*/
 */
public ColorHSV getColorCenter() {
    //if (isRadiusSet)
    //    return (ColorHSV)color.convertColor(ColorSpace.HSV);
    double[] l = lowerBound.getScalar().val;
    double[] u = upperBound.getScalar().val;
    Scalar mean = new Scalar((l[0] + u[0]) / 2, (l[1] + u[1]) / 2, (l[2] + u[2]) / 2);
    return new ColorHSV(mean);
}

From source file:org.lasarobotics.vision.detection.ColorBlobDetector.java

License:Open Source License

/**
 * Get color radius from center color/*from  ww w  . j  av  a 2  s.  co  m*/
 *
 * @return Color radius as Scalar (in HSV form)
 */
public Scalar getColorRadius() {
    double[] l = lowerBound.getScalar().val;
    double[] u = upperBound.getScalar().val;
    double[] mean = getColorCenter().getScalar().val;
    return new Scalar(Math.abs(mean[0] - l[0]), Math.abs(mean[1] - l[1]), Math.abs(mean[2] - l[2]));
}