Example usage for org.opencv.core Point Point

List of usage examples for org.opencv.core Point Point

Introduction

In this page you can find the example usage for org.opencv.core Point Point.

Prototype

public Point(double x, double y) 

Source Link

Usage

From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.ResultUtil.java

License:Open Source License

/**
 * Create a Mat to show the point at which the matched color occurs.
 *
 * @param colors        the range of colors
 * @param result        the result//from   w  w w.  j ava  2 s. c o  m
 * @param colorDetected the colors extracted from the patch
 * @param width         the width of the mat to be returned
 * @return the Mat with the point or arrow drawn
 */
@NonNull
public static Mat createValueMeasuredMatSingle(@NonNull JSONArray colors, double result,
        @NonNull ColorDetected colorDetected, int width) {

    Mat mat = new Mat(MEASURE_LINE_HEIGHT, width, CvType.CV_8UC3, LAB_WHITE);
    double xTranslate = (double) width / (double) colors.length();

    try {
        // determine where the circle should be placed
        for (int d = 0; d < colors.length(); d++) {

            double nextValue = colors.getJSONObject(Math.min(d + 1, colors.length() - 1))
                    .getDouble(SensorConstants.VALUE);

            if (result <= nextValue) {

                Scalar resultColor = colorDetected.getLab();
                double value = colors.getJSONObject(d).getDouble(SensorConstants.VALUE);

                //calculate number of pixels needed to translate in x direction
                double transX = xTranslate * ((result - value) / (nextValue - value));
                double left = xTranslate * d;
                double right = left + xTranslate - X_MARGIN;

                Point circleCenter = new Point(Math.max(10d, left + (right - left) / 2 + transX),
                        SINGLE_MEASURE_LINE_TOP_MARGIN);

                MatOfPoint matOfPoint = new MatOfPoint(
                        new Point((circleCenter.x - ARROW_TRIANGLE_LENGTH),
                                circleCenter.y + ARROW_TRIANGLE_LENGTH - 2),
                        new Point((circleCenter.x + ARROW_TRIANGLE_LENGTH),
                                circleCenter.y + ARROW_TRIANGLE_LENGTH - 2),
                        new Point(circleCenter.x, (circleCenter.y + ARROW_TRIANGLE_LENGTH * 2) - 2),
                        new Point((circleCenter.x - ARROW_TRIANGLE_LENGTH),
                                circleCenter.y + ARROW_TRIANGLE_LENGTH - 2));

                Imgproc.fillConvexPoly(mat, matOfPoint, resultColor);

                Imgproc.circle(mat, circleCenter, CIRCLE_RADIUS, resultColor, -1, Imgproc.LINE_AA, 0);

                break;
            }
        }
    } catch (JSONException e) {
        Timber.e(e);
    }

    return mat;
}

From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.ResultUtil.java

License:Open Source License

/**
 * Create a Mat to show the point at which the matched color occurs for group patch test.
 *
 * @param colors         the range of colors
 * @param result         the result//from  w  w w  . j  a v a 2  s .c  o  m
 * @param colorsDetected the colors extracted from the patch
 * @param width          the width of the mat to be returned
 * @return the Mat with the point or arrow drawn
 */
@NonNull
public static Mat createValueMeasuredMatGroup(@NonNull JSONArray colors, double result,
        @NonNull ColorDetected[] colorsDetected, int width) {
    int height = COLOR_INDICATOR_SIZE * colorsDetected.length;
    Mat valueMeasuredMat = new Mat(height, width, CvType.CV_8UC3, LAB_WHITE);
    double xTranslate = (double) width / (double) colors.length();

    try {

        // determine where the circle should be placed
        for (int d = 0; d < colors.length(); d++) {

            double nextValue = colors.getJSONObject(Math.min(d + 1, colors.length() - 1))
                    .getDouble(SensorConstants.VALUE);

            Scalar resultColor = null;
            if (result < nextValue) {

                double value = colors.getJSONObject(d).getDouble(SensorConstants.VALUE);

                //calculate number of pixels needed to translate in x direction
                double transX = xTranslate * ((result - value) / (nextValue - value));

                double left = xTranslate * d;
                double right = left + xTranslate - X_MARGIN;
                Point point = (transX) + xTranslate * d < X_MARGIN
                        ? new Point(X_MARGIN, MEASURE_LINE_TOP_MARGIN)
                        : new Point(left + (right - left) / 2 + transX, MEASURE_LINE_TOP_MARGIN);

                double offset = 5;
                for (ColorDetected aColorsDetected : colorsDetected) {
                    resultColor = aColorsDetected.getLab();

                    Imgproc.rectangle(valueMeasuredMat,
                            new Point(point.x - ARROW_TRIANGLE_LENGTH, point.y + offset),
                            new Point(point.x + ARROW_TRIANGLE_LENGTH,
                                    point.y + (ARROW_TRIANGLE_LENGTH * 2) + offset),
                            resultColor, -1, Imgproc.LINE_AA, 0);

                    offset += 2 * ARROW_TRIANGLE_LENGTH;
                }

                MatOfPoint matOfPoint = new MatOfPoint(
                        new Point((point.x - ARROW_TRIANGLE_LENGTH), point.y + offset),
                        new Point((point.x + ARROW_TRIANGLE_LENGTH), point.y + offset),
                        new Point(point.x, point.y + ARROW_TRIANGLE_LENGTH + offset),
                        new Point((point.x - ARROW_TRIANGLE_LENGTH), point.y + offset));

                Imgproc.fillConvexPoly(valueMeasuredMat, matOfPoint, resultColor);

                break;
            }
        }
    } catch (JSONException e) {
        Timber.e(e);
    }

    return valueMeasuredMat;
}

From source file:org.ar.rubik.Annotation.java

License:Open Source License

/**
 * Draw Unfolded Cube Layout Representations
 * //from  www . j  a v  a 2 s .  co m
 * Draw both the non-transformed unfolded Rubik Cube layout (this is as observed
 * from Face Recognizer directly), and the transformed unfolded Rubik Cube layout
 * (this is rotationally correct with respect unfolded layout definition, and is what the 
 * cube logical and what the cube logic solver is expecting.
 * 
 * 
 * @param image
 */
private void drawFlatCubeLayoutRepresentations(Mat image) {

    Core.rectangle(image, new Point(0, 0), new Point(450, 720), ColorTileEnum.BLACK.cvColor, -1);

    final int tSize = 35; // Tile Size in pixels

    // Faces are orientated as per Face Observation (and N, M axis)
    drawFlatFaceRepresentation(image, stateModel.getFaceByName(FaceNameEnum.UP), 3 * tSize, 0 * tSize + 70,
            tSize, true);
    drawFlatFaceRepresentation(image, stateModel.getFaceByName(FaceNameEnum.LEFT), 0 * tSize, 3 * tSize + 70,
            tSize, true);
    drawFlatFaceRepresentation(image, stateModel.getFaceByName(FaceNameEnum.FRONT), 3 * tSize, 3 * tSize + 70,
            tSize, true);
    drawFlatFaceRepresentation(image, stateModel.getFaceByName(FaceNameEnum.RIGHT), 6 * tSize, 3 * tSize + 70,
            tSize, true);
    drawFlatFaceRepresentation(image, stateModel.getFaceByName(FaceNameEnum.BACK), 9 * tSize, 3 * tSize + 70,
            tSize, true);
    drawFlatFaceRepresentation(image, stateModel.getFaceByName(FaceNameEnum.DOWN), 3 * tSize, 6 * tSize + 70,
            tSize, true);

    // Faces are transformed (rotate) as per Unfolded Layout representation convention.
    // Faces are orientated as per Face Observation (and N, M axis)
    drawFlatFaceRepresentation(image, stateModel.getFaceByName(FaceNameEnum.UP), 3 * tSize,
            0 * tSize + 70 + 350, tSize, false);
    drawFlatFaceRepresentation(image, stateModel.getFaceByName(FaceNameEnum.LEFT), 0 * tSize,
            3 * tSize + 70 + 350, tSize, false);
    drawFlatFaceRepresentation(image, stateModel.getFaceByName(FaceNameEnum.FRONT), 3 * tSize,
            3 * tSize + 70 + 350, tSize, false);
    drawFlatFaceRepresentation(image, stateModel.getFaceByName(FaceNameEnum.RIGHT), 6 * tSize,
            3 * tSize + 70 + 350, tSize, false);
    drawFlatFaceRepresentation(image, stateModel.getFaceByName(FaceNameEnum.BACK), 9 * tSize,
            3 * tSize + 70 + 350, tSize, false);
    drawFlatFaceRepresentation(image, stateModel.getFaceByName(FaceNameEnum.DOWN), 3 * tSize,
            6 * tSize + 70 + 350, tSize, false);
}

From source file:org.ar.rubik.Annotation.java

License:Open Source License

/**
 * Draw Logical Face Cube Layout Representations
 * /*w w w  .ja  v a2s . c o m*/
 * Draw the Rubik Face at the specified location.  
 * 
  * @param image
  * @param rubikFace
  * @param x
  * @param y
  * @param tSize
  * @param observed  If true, use observed tile array, otherwise use transformed tile array.
  */
private void drawFlatFaceRepresentation(Mat image, RubikFace rubikFace, int x, int y, int tSize,
        boolean observed) {

    if (rubikFace == null) {
        Core.rectangle(image, new Point(x, y), new Point(x + 3 * tSize, y + 3 * tSize),
                ColorTileEnum.GREY.cvColor, -1);
    }

    else if (rubikFace.faceRecognitionStatus != FaceRecognitionStatusEnum.SOLVED) {
        Core.rectangle(image, new Point(x, y), new Point(x + 3 * tSize, y + 3 * tSize),
                ColorTileEnum.GREY.cvColor, -1);
    } else

        for (int n = 0; n < 3; n++) {
            for (int m = 0; m < 3; m++) {

                // Choose observed rotation or transformed rotation.
                ColorTileEnum colorTile = observed == true ? rubikFace.observedTileArray[n][m]
                        : rubikFace.transformedTileArray[n][m];

                // Draw tile
                if (colorTile != null)
                    Core.rectangle(image, new Point(x + tSize * n, y + tSize * m),
                            new Point(x + tSize * (n + 1), y + tSize * (m + 1)), colorTile.cvColor, -1);
                else
                    Core.rectangle(image, new Point(x + tSize * n, y + tSize * m),
                            new Point(x + tSize * (n + 1), y + tSize * (m + 1)), ColorTileEnum.GREY.cvColor,
                            -1);
            }
        }
}

From source file:org.ar.rubik.Annotation.java

License:Open Source License

/**
 * Draw Face Overlay Annotation/* ww  w. j av a  2  s .c  o m*/
 * 
 * @param image
 */
private void drawFaceOverlayAnnotation(Mat img) {

    RubikFace face = stateModel.activeRubikFace;

    if (MenuAndParams.faceOverlayDisplay == false)
        return;

    if (face == null)
        return;

    Scalar color = ColorTileEnum.BLACK.cvColor;
    switch (face.faceRecognitionStatus) {
    case UNKNOWN:
    case INSUFFICIENT:
    case INVALID_MATH:
        color = ColorTileEnum.RED.cvColor;
        break;
    case BAD_METRICS:
    case INCOMPLETE:
    case INADEQUATE:
    case BLOCKED:
    case UNSTABLE:
        color = ColorTileEnum.ORANGE.cvColor;
        break;
    case SOLVED:
        if (stateModel.gestureRecogniztionState == GestureRecogniztionStateEnum.STABLE
                || stateModel.gestureRecogniztionState == GestureRecogniztionStateEnum.NEW_STABLE)
            color = ColorTileEnum.GREEN.cvColor;
        else
            color = ColorTileEnum.YELLOW.cvColor;
        break;
    }

    // Adjust drawing grid to start at edge of cube and not center of a tile.
    double x = face.lmsResult.origin.x - (face.alphaLatticLength * Math.cos(face.alphaAngle)
            + face.betaLatticLength * Math.cos(face.betaAngle)) / 2;
    double y = face.lmsResult.origin.y - (face.alphaLatticLength * Math.sin(face.alphaAngle)
            + face.betaLatticLength * Math.sin(face.betaAngle)) / 2;

    for (int n = 0; n < 4; n++) {
        Core.line(img,
                new Point(x + n * face.alphaLatticLength * Math.cos(face.alphaAngle),
                        y + n * face.alphaLatticLength * Math.sin(face.alphaAngle)),
                new Point(
                        x + (face.betaLatticLength * 3 * Math.cos(face.betaAngle))
                                + (n * face.alphaLatticLength * Math.cos(face.alphaAngle)),
                        y + (face.betaLatticLength * 3 * Math.sin(face.betaAngle))
                                + (n * face.alphaLatticLength * Math.sin(face.alphaAngle))),
                color, 3);
    }

    for (int m = 0; m < 4; m++) {
        Core.line(img,
                new Point(x + m * face.betaLatticLength * Math.cos(face.betaAngle),
                        y + m * face.betaLatticLength * Math.sin(face.betaAngle)),
                new Point(
                        x + (face.alphaLatticLength * 3 * Math.cos(face.alphaAngle))
                                + (m * face.betaLatticLength * Math.cos(face.betaAngle)),
                        y + (face.alphaLatticLength * 3 * Math.sin(face.alphaAngle))
                                + (m * face.betaLatticLength * Math.sin(face.betaAngle))),
                color, 3);
    }

    //      // Draw a circule at the Rhombus reported center of each tile.
    //      for(int n=0; n<3; n++) {
    //         for(int m=0; m<3; m++) {
    //            Rhombus rhombus = faceRhombusArray[n][m];
    //            if(rhombus != null)
    //               Core.circle(img, rhombus.center, 5, Constants.ColorBlue, 3);
    //         }
    //      }
    //      
    //      // Draw the error vector from center of tile to actual location of Rhombus.
    //      for(int n=0; n<3; n++) {
    //         for(int m=0; m<3; m++) {
    //            Rhombus rhombus = faceRhombusArray[n][m];
    //            if(rhombus != null) {
    //               
    //               Point tileCenter = getTileCenterInPixels(n, m);            
    //               Core.line(img, tileCenter, rhombus.center, Constants.ColorRed, 3);
    //               Core.circle(img, tileCenter, 5, Constants.ColorBlue, 1);
    //            }
    //         }
    //      }

    //      // Draw reported Logical Tile Color Characters in center of each tile.
    //      if(face.faceRecognitionStatus == FaceRecognitionStatusEnum.SOLVED)
    //         for(int n=0; n<3; n++) {
    //            for(int m=0; m<3; m++) {
    //
    //               // Draw tile character in UV plane
    //               Point tileCenterInPixels = face.getTileCenterInPixels(n, m);
    //               tileCenterInPixels.x -= 10.0;
    //               tileCenterInPixels.y += 10.0;
    //               String text = Character.toString(face.observedTileArray[n][m].symbol);
    //               Core.putText(img, text, tileCenterInPixels, Constants.FontFace, 3, ColorTileEnum.BLACK.cvColor, 3);
    //            }
    //         }

    // Also draw recognized Rhombi for clarity.
    if (face.faceRecognitionStatus != FaceRecognitionStatusEnum.SOLVED)
        for (Rhombus rhombus : face.rhombusList)
            rhombus.draw(img, ColorTileEnum.GREEN.cvColor);
}

From source file:org.ar.rubik.Annotation.java

License:Open Source License

/**
 * Draw Rhombus Recognition Metrics/*w ww .  ja  v  a2  s .c o  m*/
 * 
 * @param image
 * @param rhombusList
 */
private void drawRhombusRecognitionMetrics(Mat image, List<Rhombus> rhombusList) {

    Core.rectangle(image, new Point(0, 0), new Point(450, 720), ColorTileEnum.BLACK.cvColor, -1);

    int totalNumber = 0;
    int totalNumberValid = 0;

    int totalNumberUnknow = 0;
    int totalNumberNot4Points = 0;
    int totalNumberNotConvex = 0;
    int totalNumberBadArea = 0;
    int totalNumberClockwise = 0;
    int totalNumberOutlier = 0;

    // Loop over Rhombus list and total status types.
    for (Rhombus rhombus : rhombusList) {

        switch (rhombus.status) {
        case NOT_PROCESSED:
            totalNumberUnknow++;
            break;
        case NOT_4_POINTS:
            totalNumberNot4Points++;
            break;
        case NOT_CONVEX:
            totalNumberNotConvex++;
            break;
        case AREA:
            totalNumberBadArea++;
            break;
        case CLOCKWISE:
            totalNumberClockwise++;
            break;
        case OUTLIER:
            totalNumberOutlier++;
            break;
        case VALID:
            totalNumberValid++;
            break;
        default:
            break;
        }
        totalNumber++;
    }

    Core.putText(image, "Num Unknown: " + totalNumberUnknow, new Point(50, 300), Constants.FontFace, 2,
            ColorTileEnum.WHITE.cvColor, 2);
    Core.putText(image, "Num Not 4 Points: " + totalNumberNot4Points, new Point(50, 350), Constants.FontFace, 2,
            ColorTileEnum.WHITE.cvColor, 2);
    Core.putText(image, "Num Not Convex: " + totalNumberNotConvex, new Point(50, 400), Constants.FontFace, 2,
            ColorTileEnum.WHITE.cvColor, 2);
    Core.putText(image, "Num Bad Area: " + totalNumberBadArea, new Point(50, 450), Constants.FontFace, 2,
            ColorTileEnum.WHITE.cvColor, 2);
    Core.putText(image, "Num Clockwise: " + totalNumberClockwise, new Point(50, 500), Constants.FontFace, 2,
            ColorTileEnum.WHITE.cvColor, 2);
    Core.putText(image, "Num Outlier: " + totalNumberOutlier, new Point(50, 550), Constants.FontFace, 2,
            ColorTileEnum.WHITE.cvColor, 2);
    Core.putText(image, "Num Valid: " + totalNumberValid, new Point(50, 600), Constants.FontFace, 2,
            ColorTileEnum.WHITE.cvColor, 2);
    Core.putText(image, "Total Num: " + totalNumber, new Point(50, 650), Constants.FontFace, 2,
            ColorTileEnum.WHITE.cvColor, 2);
}

From source file:org.ar.rubik.Annotation.java

License:Open Source License

/**
 * Draw Diagnostic Text Rendering of Rubik Face Metrics
 * /*from   w  ww . jav  a 2 s.  co m*/
 * @param image
 * @param activeRubikFace
 */
private void drawRubikFaceMetrics(Mat image, RubikFace activeRubikFace) {

    Core.rectangle(image, new Point(0, 0), new Point(450, 720), ColorTileEnum.BLACK.cvColor, -1);

    if (activeRubikFace == null)
        return;

    RubikFace face = activeRubikFace;
    drawFlatFaceRepresentation(image, face, 50, 50, 50, true);

    Core.putText(image, "Status = " + face.faceRecognitionStatus, new Point(50, 300), Constants.FontFace, 2,
            ColorTileEnum.WHITE.cvColor, 2);
    Core.putText(image, String.format("AlphaA = %4.1f", face.alphaAngle * 180.0 / Math.PI), new Point(50, 350),
            Constants.FontFace, 2, ColorTileEnum.WHITE.cvColor, 2);
    Core.putText(image, String.format("BetaA  = %4.1f", face.betaAngle * 180.0 / Math.PI), new Point(50, 400),
            Constants.FontFace, 2, ColorTileEnum.WHITE.cvColor, 2);
    Core.putText(image, String.format("AlphaL = %4.0f", face.alphaLatticLength), new Point(50, 450),
            Constants.FontFace, 2, ColorTileEnum.WHITE.cvColor, 2);
    Core.putText(image, String.format("Beta L = %4.0f", face.betaLatticLength), new Point(50, 500),
            Constants.FontFace, 2, ColorTileEnum.WHITE.cvColor, 2);
    Core.putText(image, String.format("Gamma  = %4.2f", face.gammaRatio), new Point(50, 550),
            Constants.FontFace, 2, ColorTileEnum.WHITE.cvColor, 2);
    Core.putText(image, String.format("Sigma  = %5.0f", face.lmsResult.sigma), new Point(50, 600),
            Constants.FontFace, 2, ColorTileEnum.WHITE.cvColor, 2);
    Core.putText(image, String.format("Moves  = %d", face.numRhombusMoves), new Point(50, 650),
            Constants.FontFace, 2, ColorTileEnum.WHITE.cvColor, 2);
    Core.putText(image, String.format("#Rohmbi= %d", face.rhombusList.size()), new Point(50, 700),
            Constants.FontFace, 2, ColorTileEnum.WHITE.cvColor, 2);
}

From source file:org.ar.rubik.Annotation.java

License:Open Source License

/**
 * Draw Face Color Metrics//from w w w  .j  a v  a2s . c om
 * 
 * Draw a 2D representation of observed tile colors vs.  pre-defined constant rubik tile colors. 
 * Also, right side 1D representation of measured and adjusted luminous.  See ...... for 
 * existing luminous correction.
 * 
 * @param image
 * @param face
 */
private void drawFaceColorMetrics(Mat image, RubikFace face) {

    Core.rectangle(image, new Point(0, 0), new Point(570, 720), ColorTileEnum.BLACK.cvColor, -1);

    if (face == null || face.faceRecognitionStatus != FaceRecognitionStatusEnum.SOLVED)
        return;

    // Draw simple grid
    Core.rectangle(image, new Point(-256 + 256, -256 + 400), new Point(256 + 256, 256 + 400),
            ColorTileEnum.WHITE.cvColor);
    Core.line(image, new Point(0 + 256, -256 + 400), new Point(0 + 256, 256 + 400),
            ColorTileEnum.WHITE.cvColor);
    Core.line(image, new Point(-256 + 256, 0 + 400), new Point(256 + 256, 0 + 400),
            ColorTileEnum.WHITE.cvColor);
    //      Core.putText(image, String.format("Luminosity Offset = %4.0f", face.luminousOffset), new Point(0, -256 + 400 - 60), Constants.FontFace, 2, ColorTileEnum.WHITE.cvColor, 2);
    //      Core.putText(image, String.format("Color Error Before Corr = %4.0f", face.colorErrorBeforeCorrection), new Point(0, -256 + 400 - 30), Constants.FontFace, 2, ColorTileEnum.WHITE.cvColor, 2);
    //      Core.putText(image, String.format("Color Error After Corr = %4.0f", face.colorErrorAfterCorrection), new Point(0, -256 + 400), Constants.FontFace, 2, ColorTileEnum.WHITE.cvColor, 2);

    for (int n = 0; n < 3; n++) {
        for (int m = 0; m < 3; m++) {

            double[] measuredTileColor = face.measuredColorArray[n][m];
            //            Log.e(Constants.TAG, "RGB: " + logicalTileArray[n][m].character + "=" + actualTileColor[0] + "," + actualTileColor[1] + "," + actualTileColor[2] + " x=" + x + " y=" + y );
            double[] measuredTileColorYUV = Util.getYUVfromRGB(measuredTileColor);
            //            Log.e(Constants.TAG, "Lum: " + logicalTileArray[n][m].character + "=" + acutalTileYUV[0]);

            double luminousScaled = measuredTileColorYUV[0] * 2 - 256;
            double uChromananceScaled = measuredTileColorYUV[1] * 2;
            double vChromananceScaled = measuredTileColorYUV[2] * 2;

            String text = Character.toString(face.observedTileArray[n][m].symbol);

            // Draw tile character in UV plane
            Core.putText(image, text, new Point(uChromananceScaled + 256, vChromananceScaled + 400),
                    Constants.FontFace, 3, face.observedTileArray[n][m].cvColor, 3);

            // Draw tile characters on INSIDE right side for Y axis for adjusted luminosity.
            //            Core.putText(image, text, new Point(512 - 40, luminousScaled + 400 + face.luminousOffset), Constants.FontFace, 3, face.observedTileArray[n][m].cvColor, 3);

            // Draw tile characters on OUTSIDE right side for Y axis as directly measured.
            Core.putText(image, text, new Point(512 + 20, luminousScaled + 400), Constants.FontFace, 3,
                    face.observedTileArray[n][m].cvColor, 3);
            //            Log.e(Constants.TAG, "Lum: " + logicalTileArray[n][m].character + "=" + luminousScaled);
        }
    }

    Scalar rubikRed = ColorTileEnum.RED.rubikColor;
    Scalar rubikOrange = ColorTileEnum.ORANGE.rubikColor;
    Scalar rubikYellow = ColorTileEnum.YELLOW.rubikColor;
    Scalar rubikGreen = ColorTileEnum.GREEN.rubikColor;
    Scalar rubikBlue = ColorTileEnum.BLUE.rubikColor;
    Scalar rubikWhite = ColorTileEnum.WHITE.rubikColor;

    // Draw Color Calibration in UV plane as dots
    Core.circle(image, new Point(2 * Util.getYUVfromRGB(rubikRed.val)[1] + 256,
            2 * Util.getYUVfromRGB(rubikRed.val)[2] + 400), 10, rubikRed, -1);
    Core.circle(image, new Point(2 * Util.getYUVfromRGB(rubikOrange.val)[1] + 256,
            2 * Util.getYUVfromRGB(rubikOrange.val)[2] + 400), 10, rubikOrange, -1);
    Core.circle(image, new Point(2 * Util.getYUVfromRGB(rubikYellow.val)[1] + 256,
            2 * Util.getYUVfromRGB(rubikYellow.val)[2] + 400), 10, rubikYellow, -1);
    Core.circle(image, new Point(2 * Util.getYUVfromRGB(rubikGreen.val)[1] + 256,
            2 * Util.getYUVfromRGB(rubikGreen.val)[2] + 400), 10, rubikGreen, -1);
    Core.circle(image, new Point(2 * Util.getYUVfromRGB(rubikBlue.val)[1] + 256,
            2 * Util.getYUVfromRGB(rubikBlue.val)[2] + 400), 10, rubikBlue, -1);
    Core.circle(image, new Point(2 * Util.getYUVfromRGB(rubikWhite.val)[1] + 256,
            2 * Util.getYUVfromRGB(rubikWhite.val)[2] + 400), 10, rubikWhite, -1);

    // Draw Color Calibration on right side Y axis as dots
    Core.line(image, new Point(502, -256 + 2 * Util.getYUVfromRGB(rubikRed.val)[0] + 400),
            new Point(522, -256 + 2 * Util.getYUVfromRGB(rubikRed.val)[0] + 400), rubikRed, 3);
    Core.line(image, new Point(502, -256 + 2 * Util.getYUVfromRGB(rubikOrange.val)[0] + 400),
            new Point(522, -256 + 2 * Util.getYUVfromRGB(rubikOrange.val)[0] + 400), rubikOrange, 3);
    Core.line(image, new Point(502, -256 + 2 * Util.getYUVfromRGB(rubikGreen.val)[0] + 400),
            new Point(522, -256 + 2 * Util.getYUVfromRGB(rubikGreen.val)[0] + 400), rubikGreen, 3);
    Core.line(image, new Point(502, -256 + 2 * Util.getYUVfromRGB(rubikYellow.val)[0] + 400),
            new Point(522, -256 + 2 * Util.getYUVfromRGB(rubikYellow.val)[0] + 400), rubikYellow, 3);
    Core.line(image, new Point(502, -256 + 2 * Util.getYUVfromRGB(rubikBlue.val)[0] + 400),
            new Point(522, -256 + 2 * Util.getYUVfromRGB(rubikBlue.val)[0] + 400), rubikBlue, 3);
    Core.line(image, new Point(502, -256 + 2 * Util.getYUVfromRGB(rubikWhite.val)[0] + 400),
            new Point(522, -256 + 2 * Util.getYUVfromRGB(rubikWhite.val)[0] + 400), rubikWhite, 3);
}

From source file:org.ar.rubik.Annotation.java

License:Open Source License

/**
 * Draw Cube Color Metrics/* ww  w  .  jav a2  s.c o m*/
 * 
 * Draw a 2D representation of observed tile colors vs.  pre-defined constant rubik tile colors. 
 * Also, right side 1D representation of measured and adjusted luminous.  See ...... for 
 * existing luminous correction.
 * 
 * @param image
 */
private void drawCubeColorMetrics(Mat image) {

    Core.rectangle(image, new Point(0, 0), new Point(570, 720), ColorTileEnum.BLACK.cvColor, -1);

    // Draw simple grid
    Core.rectangle(image, new Point(-256 + 256, -256 + 400), new Point(256 + 256, 256 + 400),
            ColorTileEnum.WHITE.cvColor);
    Core.line(image, new Point(0 + 256, -256 + 400), new Point(0 + 256, 256 + 400),
            ColorTileEnum.WHITE.cvColor);
    Core.line(image, new Point(-256 + 256, 0 + 400), new Point(256 + 256, 0 + 400),
            ColorTileEnum.WHITE.cvColor);

    // Draw measured tile color as solid small circles on both the UV plane and the Y axis.
    for (RubikFace face : stateModel.nameRubikFaceMap.values()) {
        for (int n = 0; n < 3; n++) {
            for (int m = 0; m < 3; m++) {

                double[] measuredTileColor = face.measuredColorArray[n][m];
                //              Log.e(Constants.TAG, "RGB: " + logicalTileArray[n][m].character + "=" + actualTileColor[0] + "," + actualTileColor[1] + "," + actualTileColor[2] + " x=" + x + " y=" + y );
                double[] measuredTileColorYUV = Util.getYUVfromRGB(measuredTileColor);
                //              Log.e(Constants.TAG, "Lum: " + logicalTileArray[n][m].character + "=" + acutalTileYUV[0]);

                double luminousScaled = measuredTileColorYUV[0] * 2 - 256;
                double uChromananceScaled = measuredTileColorYUV[1] * 2;
                double vChromananceScaled = measuredTileColorYUV[2] * 2;

                // Draw solid circle in UV plane
                Core.circle(image, new Point(uChromananceScaled + 256, vChromananceScaled + 400), 10,
                        new Scalar(face.observedTileArray[n][m].cvColor.val), -1);

                // Draw line on OUTSIDE right side for Y axis as directly measured.
                Core.line(image, new Point(522 + 20, luminousScaled + 400),
                        new Point(542 + 20, luminousScaled + 400), face.observedTileArray[n][m].cvColor, 3);
                // Log.e(Constants.TAG, "Lum: " + logicalTileArray[n][m].character + "=" + luminousScaled);
            }
        }
    }

    // Draw predicted tile colors (i.e. "rubikColor" from Constants) as a large circle in UV plane and short solid line in the Y plane.
    for (ColorTileEnum colorTile : ColorTileEnum.values()) {

        if (colorTile.isRubikColor == false)
            continue;

        // Target color we are expecting measurement to be.
        double[] targetColorYUV = Util.getYUVfromRGB(colorTile.rubikColor.val);

        // Draw Color Calibration in UV plane as rectangle
        double x = 2 * targetColorYUV[1] + 256;
        double y = 2 * targetColorYUV[2] + 400;

        // Open large circle in UV plane
        Core.circle(image, new Point(x, y), 15, colorTile.cvColor, +3);

        // Open large circle in Y plane
        Core.circle(image, new Point(512, -256 + 2 * targetColorYUV[0] + 400), 15, colorTile.cvColor, +3);
    }
}

From source file:org.ar.rubik.Annotation.java

License:Open Source License

/**
 * Draw Cube Diagnostic Metrics/* w  ww  .j  a  v  a 2  s .c o  m*/
 * 
 * Count and display how many colors of each tile were found over the entire cube.
 * Also output the total tile count of each color.
 * 
 * @param image
 */
public void drawCubeMetrics(Mat image) {

    Core.rectangle(image, new Point(0, 0), new Point(450, 720), ColorTileEnum.BLACK.cvColor, -1);

    // Draw Face Types and their center tile color
    int pos = 1;
    for (RubikFace rubikFace : stateModel.nameRubikFaceMap.values()) {
        Core.putText(image,
                String.format("%s:    %s", rubikFace.faceNameEnum, rubikFace.observedTileArray[1][1]),
                new Point(50, 100 + 50 * pos++), Constants.FontFace, 2, ColorTileEnum.WHITE.cvColor, 2);
    }

    // Count how many tile colors entire cube has as a first check.
    int[] numColorTilesArray = new int[] { 0, 0, 0, 0, 0, 0 };
    for (RubikFace rubikFace : stateModel.nameRubikFaceMap.values()) {
        for (int n = 0; n < 3; n++) {
            for (int m = 0; m < 3; m++) {
                numColorTilesArray[rubikFace.observedTileArray[n][m].ordinal()]++;
            }
        }
    }

    // Draw total tile count of each tile color.
    for (ColorTileEnum colorTile : ColorTileEnum.values()) {
        if (colorTile.isRubikColor == true) {
            int count = numColorTilesArray[colorTile.ordinal()];
            Core.putText(image, String.format("%s:  %d", colorTile, count), new Point(50, 100 + 50 * pos++),
                    Constants.FontFace, 2, ColorTileEnum.WHITE.cvColor, 2);
        }
    }
}