Example usage for org.opencv.android Utils bitmapToMat

List of usage examples for org.opencv.android Utils bitmapToMat

Introduction

In this page you can find the example usage for org.opencv.android Utils bitmapToMat.

Prototype

public static void bitmapToMat(Bitmap bmp, Mat mat) 

Source Link

Document

Short form of the bitmapToMat(bmp, mat, unPremultiplyAlpha=false).

Usage

From source file:ac.robinson.ticqr.TickBoxImageParserTask.java

License:Apache License

@Override
protected ArrayList<PointF> doInBackground(Void... unused) {
    Log.d(TAG, "Searching for tick boxes of " + mBoxSize + " size");

    // we look for *un-ticked* boxes, rather than ticked, as they are uniform in appearance (and hence easier to
    // detect) - they show up as a box within a box
    ArrayList<PointF> centrePoints = new ArrayList<>();
    int minimumOuterBoxArea = (int) Math.round(Math.pow(mBoxSize, 2));
    int maximumOuterBoxArea = (int) Math.round(Math.pow(mBoxSize * 1.35f, 2));
    int minimumInnerBoxArea = (int) Math.round(Math.pow(mBoxSize * 0.5f, 2));

    // image adjustment - blurSize, blurSTDev and adaptiveThresholdSize must not be even numbers
    int blurSize = 9;
    int blurSTDev = 3;
    int adaptiveThresholdSize = Math.round(mBoxSize * 3); // (oddness ensured below)
    int adaptiveThresholdC = 4; // value to add to the mean (can be negative or zero)
    adaptiveThresholdSize = adaptiveThresholdSize % 2 == 0 ? adaptiveThresholdSize + 1 : adaptiveThresholdSize;

    // how similar the recognised polygon must be to its actual contour - lower is more similar
    float outerPolygonSimilarity = 0.045f;
    float innerPolygonSimilarity = 0.075f; // don't require as much accuracy for the inner part of the tick box

    // how large the maximum internal angle can be (e.g., for checking square shape)
    float maxOuterAngleCos = 0.3f;
    float maxInnerAngleCos = 0.4f;

    // use OpenCV to recognise boxes that have a box inside them - i.e. an un-ticked tick box
    // see: http://stackoverflow.com/a/11427501
    // Bitmap newBitmap = mBitmap.copy(Bitmap.Config.RGB_565, true); // not needed
    Mat bitMat = new Mat();
    Utils.bitmapToMat(mBitmap, bitMat);

    // blur and convert to grey
    // alternative (less flexible): Imgproc.medianBlur(bitMat, bitMat, blurSize);
    Imgproc.GaussianBlur(bitMat, bitMat, new Size(blurSize, blurSize), blurSTDev, blurSTDev);
    Imgproc.cvtColor(bitMat, bitMat, Imgproc.COLOR_RGB2GRAY); // need 8uC1 (1 channel, unsigned char) image type

    // perform adaptive thresholding to detect edges
    // alternative (slower): Imgproc.Canny(bitMat, bitMat, 10, 20, 3, false);
    Imgproc.adaptiveThreshold(bitMat, bitMat, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY,
            adaptiveThresholdSize, adaptiveThresholdC);

    // get the contours in the image, and their hierarchy
    Mat hierarchyMat = new Mat();
    List<MatOfPoint> contours = new ArrayList<>();
    Imgproc.findContours(bitMat, contours, hierarchyMat, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE);
    if (DEBUG) {/* w ww . j av  a 2  s. com*/
        Imgproc.drawContours(bitMat, contours, -1, new Scalar(30, 255, 255), 1);
    }

    // parse the contours and look for a box containing another box, with similar enough sizes
    int numContours = contours.size();
    ArrayList<Integer> searchedContours = new ArrayList<>();
    Log.d(TAG, "Found " + numContours + " possible tick box areas");
    if (numContours > 0 && !hierarchyMat.empty()) {
        for (int i = 0; i < numContours; i++) {

            // the original detected contour
            MatOfPoint boxPoints = contours.get(i);

            // hierarchy key: 0 = next sibling num, 1 = previous sibling num, 2 = first child num, 3 = parent num
            int childBox = (int) hierarchyMat.get(0, i)[2]; // usually the largest child (as we're doing RETR_TREE)
            if (childBox == -1) { // we only want elements that have children
                continue;
            } else {
                if (searchedContours.contains(childBox)) {
                    if (DEBUG) {
                        Log.d(TAG, "Ignoring duplicate box at first stage: " + childBox);
                    }
                    continue;
                } else {
                    searchedContours.add(childBox);
                }
            }

            // discard smaller (i.e. noise) outer box areas as soon as possible for speed
            // used to do Imgproc.isContourConvex(outerPoints) later, but the angle check covers this, so no need
            double originalArea = Math.abs(Imgproc.contourArea(boxPoints));
            if (originalArea < minimumOuterBoxArea) {
                // if (DEBUG) {
                // drawPoints(bitMat, boxPoints, new Scalar(255, 255, 255), 1);
                // Log.d(TAG, "Outer box too small");
                // }
                continue;
            }
            if (originalArea > maximumOuterBoxArea) {
                // if (DEBUG) {
                // drawPoints(bitMat, boxPoints, new Scalar(255, 255, 255), 1);
                // Log.d(TAG, "Outer box too big");
                // }
                continue;
            }

            // simplify the contours of the outer box - we want to detect four-sided shapes only
            MatOfPoint2f boxPoints2f = new MatOfPoint2f(boxPoints.toArray()); // Point2f for approxPolyDP
            Imgproc.approxPolyDP(boxPoints2f, boxPoints2f,
                    outerPolygonSimilarity * Imgproc.arcLength(boxPoints2f, true), true); // simplify the contour
            if (boxPoints2f.height() != 4) { // height is number of points
                if (DEBUG) {
                    // drawPoints(bitMat, new MatOfPoint(boxPoints2f.toArray()), new Scalar(255, 255, 255), 1);
                    Log.d(TAG, "Outer box not 4 points");
                }
                continue;
            }

            // check that the simplified outer box is approximately a square, angle-wise
            org.opencv.core.Point[] boxPointsArray = boxPoints2f.toArray();
            double maxCosine = 0;
            for (int j = 0; j < 4; j++) {
                org.opencv.core.Point pL = boxPointsArray[j];
                org.opencv.core.Point pIntersect = boxPointsArray[(j + 1) % 4];
                org.opencv.core.Point pR = boxPointsArray[(j + 2) % 4];
                getLineAngle(pL, pIntersect, pR);
                maxCosine = Math.max(maxCosine, getLineAngle(pL, pIntersect, pR));
            }
            if (maxCosine > maxOuterAngleCos) {
                if (DEBUG) {
                    // drawPoints(bitMat, new MatOfPoint(boxPoints2f.toArray()), new Scalar(255, 255, 255), 1);
                    Log.d(TAG, "Outer angles not square enough");
                }
                continue;
            }

            // check that the simplified outer box is approximately a square, line length-wise
            double minLine = Double.MAX_VALUE;
            double maxLine = 0;
            for (int p = 1; p < 4; p++) {
                org.opencv.core.Point p1 = boxPointsArray[p - 1];
                org.opencv.core.Point p2 = boxPointsArray[p];
                double xd = p1.x - p2.x;
                double yd = p1.y - p2.y;
                double lineLength = Math.sqrt((xd * xd) + (yd * yd));
                minLine = Math.min(minLine, lineLength);
                maxLine = Math.max(maxLine, lineLength);
            }
            if (maxLine - minLine > minLine) {
                if (DEBUG) {
                    // drawPoints(bitMat, new MatOfPoint(boxPoints2f.toArray()), new Scalar(255, 255, 255), 1);
                    Log.d(TAG, "Outer lines not square enough");
                }
                continue;
            }

            // draw the outer box if debugging
            if (DEBUG) {
                MatOfPoint debugBoxPoints = new MatOfPoint(boxPointsArray);
                Log.d(TAG,
                        "Potential tick box: " + boxPoints2f.size() + ", " + "area: "
                                + Math.abs(Imgproc.contourArea(debugBoxPoints)) + " (min:" + minimumOuterBoxArea
                                + ", max:" + maximumOuterBoxArea + ")");
                drawPoints(bitMat, debugBoxPoints, new Scalar(50, 255, 255), 2);
            }

            // loop through the children - they should be in descending size order, but sometimes this is wrong
            boolean wrongBox = false;
            while (true) {
                if (DEBUG) {
                    Log.d(TAG, "Looping with box: " + childBox);
                }

                // we've previously tried a child - try the next one
                // key: 0 = next sibling num, 1 = previous sibling num, 2 = first child num, 3 = parent num
                if (wrongBox) {
                    childBox = (int) hierarchyMat.get(0, childBox)[0];
                    if (childBox == -1) {
                        break;
                    }
                    if (searchedContours.contains(childBox)) {
                        if (DEBUG) {
                            Log.d(TAG, "Ignoring duplicate box at loop stage: " + childBox);
                        }
                        break;
                    } else {
                        searchedContours.add(childBox);
                    }
                    //noinspection UnusedAssignment
                    wrongBox = false;
                }

                // perhaps this is the outer box - check its child has no children itself
                // (removed so tiny children (i.e. noise) don't mean we mis-detect an un-ticked box as ticked)
                // if (hierarchyMat.get(0, childBox)[2] != -1) {
                // continue;
                // }

                // check the size of the child box is large enough
                boxPoints = contours.get(childBox);
                originalArea = Math.abs(Imgproc.contourArea(boxPoints));
                if (originalArea < minimumInnerBoxArea) {
                    if (DEBUG) {
                        // drawPoints(bitMat, boxPoints, new Scalar(255, 255, 255), 1);
                        Log.d(TAG, "Inner box too small");
                    }
                    wrongBox = true;
                    continue;
                }

                // simplify the contours of the inner box - again, we want four-sided shapes only
                boxPoints2f = new MatOfPoint2f(boxPoints.toArray());
                Imgproc.approxPolyDP(boxPoints2f, boxPoints2f,
                        innerPolygonSimilarity * Imgproc.arcLength(boxPoints2f, true), true);
                if (boxPoints2f.height() != 4) { // height is number of points
                    // if (DEBUG) {
                    // drawPoints(bitMat, boxPoints, new Scalar(255, 255, 255), 1);
                    // }
                    Log.d(TAG, "Inner box fewer than 4 points"); // TODO: allow > 4 for low quality images?
                    wrongBox = true;
                    continue;
                }

                // check that the simplified inner box is approximately a square, angle-wise
                // higher tolerance because noise means if we get several inners, the box may not be quite square
                boxPointsArray = boxPoints2f.toArray();
                maxCosine = 0;
                for (int j = 0; j < 4; j++) {
                    org.opencv.core.Point pL = boxPointsArray[j];
                    org.opencv.core.Point pIntersect = boxPointsArray[(j + 1) % 4];
                    org.opencv.core.Point pR = boxPointsArray[(j + 2) % 4];
                    getLineAngle(pL, pIntersect, pR);
                    maxCosine = Math.max(maxCosine, getLineAngle(pL, pIntersect, pR));
                }
                if (maxCosine > maxInnerAngleCos) {
                    Log.d(TAG, "Inner angles not square enough");
                    wrongBox = true;
                    continue;
                }

                // this is probably an inner box - log if debugging
                if (DEBUG) {
                    Log.d(TAG,
                            "Un-ticked inner box: " + boxPoints2f.size() + ", " + "area: "
                                    + Math.abs(Imgproc.contourArea(new MatOfPoint2f(boxPointsArray)))
                                    + " (min: " + minimumInnerBoxArea + ")");
                }

                // find the inner box centre
                double centreX = (boxPointsArray[0].x + boxPointsArray[1].x + boxPointsArray[2].x
                        + boxPointsArray[3].x) / 4f;
                double centreY = (boxPointsArray[0].y + boxPointsArray[1].y + boxPointsArray[2].y
                        + boxPointsArray[3].y) / 4f;

                // draw the inner box if debugging
                if (DEBUG) {
                    drawPoints(bitMat, new MatOfPoint(boxPointsArray), new Scalar(255, 255, 255), 1);
                    Core.circle(bitMat, new org.opencv.core.Point(centreX, centreY), 3,
                            new Scalar(255, 255, 255));
                }

                // add to the list of boxes to check
                centrePoints.add(new PointF((float) centreX, (float) centreY));
                break;
            }
        }
    }

    Log.d(TAG, "Found " + centrePoints.size() + " un-ticked boxes");
    return centrePoints;
}

From source file:android.google.com.basiccamera.imageprocessing.CannyEdgeDetector.java

License:BSD License

protected void runTask() {
    Log.i(TAG, "Starting heavy image processing task");
    while (running) {
        Long begin, end;/*from   w  ww .  j  a v a2s.c o  m*/
        //begin = System.currentTimeMillis();

        // requests picture and blocks till it receives one
        mTaskManager.requestPreviewFrame();
        //end = System.currentTimeMillis();
        //Log.i(TAG, "Process took " + String.valueOf(end - begin) + " ms");
        byte[] image = getImage();
        if (image == null) {
            Log.w(TAG, "Received null as picture");
        }

        // do Canny edge detection
        Mat img = new Mat();
        Bitmap bmp = BitmapFactory.decodeByteArray(image, 0, image.length);
        Utils.bitmapToMat(bmp, img);
        Imgproc.cvtColor(img, img, Imgproc.COLOR_RGB2GRAY);
        Imgproc.blur(img, img, new Size(3, 3));
        Imgproc.Canny(img, img, 20, 100);
        Utils.matToBitmap(img, bmp);

        mTaskManager.drawResult(bmp);
    }
    return;
}

From source file:club.towr5291.Concepts.ConceptVuforiaOpGrabImage.java

@Override
public void runOpMode() throws InterruptedException {

    //load variables
    sharedPreferences = PreferenceManager.getDefaultSharedPreferences(hardwareMap.appContext);
    teamNumber = sharedPreferences.getString("club.towr5291.Autonomous.TeamNumber", "0000");
    allianceColor = sharedPreferences.getString("club.towr5291.Autonomous.Color", "Red");
    allianceStartPosition = sharedPreferences.getString("club.towr5291.Autonomous.StartPosition", "Left");
    allianceParkPosition = sharedPreferences.getString("club.towr5291.Autonomous.ParkPosition", "Vortex");
    delay = Integer.parseInt(sharedPreferences.getString("club.towr5291.Autonomous.Delay", "0"));
    numBeacons = sharedPreferences.getString("club.towr5291.Autonomous.Beacons", "One");
    robotConfig = sharedPreferences.getString("club.towr5291.Autonomous.RobotConfig", "TileRunner-2x40");
    debug = Integer.parseInt(sharedPreferences.getString("club.towr5291.Autonomous.Debug", null));

    debug = 3;//  w ww.ja v  a 2  s  . c  o  m

    //start the log
    startDate = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss").format(new Date());
    fileLogger = new FileLogger(runtime);
    fileLogger.open();
    fileLogger.write("Time,SysMS,Thread,Event,Desc");
    fileLogger.writeEvent("init()", "Log Started");

    //BeaconAnalysisOCV beaconColour = new BeaconAnalysisOCV();
    //BeaconAnalysisOCV2 beaconColour = new BeaconAnalysisOCV2();
    //BeaconAnalysisOCVAnalyse beaconColour = new BeaconAnalysisOCVAnalyse();
    BeaconAnalysisOCVPlayground beaconColour = new BeaconAnalysisOCVPlayground();

    VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(R.id.cameraMonitorViewId);
    parameters.cameraDirection = VuforiaLocalizer.CameraDirection.BACK;
    parameters.vuforiaLicenseKey = "AVATY7T/////AAAAGQJxfNYzLUgGjSx0aOEU0Q0rpcfZO2h2sY1MhUZUr+Bu6RgoUMUP/nERGmD87ybv1/lM2LBFDxcBGRHkXvxtkHel4XEUCsNHFTGWYcVkMIZqctQsIrTe13MnUvSOfQj8ig7xw3iULcwDpY+xAftW61dKTJ0IAOCxx2F0QjJWqRJBxrEUR/DfQi4LyrgnciNMXCiZ8KFyBdC63XMYkQj2joTN579+2u5f8aSCe8jkAFnBLcB1slyaU9lhnlTEMcFjwrLBcWoYIFAZluvFT0LpqZRlS1/XYf45QBSJztFKHIsj1rbCgotAE36novnAQBs74ewnWsJifokJGOYWdFJveWzn3GE9OEH23Y5l7kFDu4wc";
    //parameters.cameraMonitorFeedback = VuforiaLocalizer.Parameters.CameraMonitorFeedback.AXES;

    VuforiaLocalizer vuforia = ClassFactory.createVuforiaLocalizer(parameters);
    Vuforia.setFrameFormat(PIXEL_FORMAT.RGB565, true); //enables RGB565 format for the image
    vuforia.setFrameQueueCapacity(1); //tells VuforiaLocalizer to only store one frame at a time
    //ConceptVuforiaGrabImage vuforia = new ConceptVuforiaGrabImage(parameters);
    Vuforia.setHint(HINT.HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 4);

    VuforiaTrackables velocityVortex = vuforia.loadTrackablesFromAsset("FTC_2016-17");
    VuforiaTrackable wheels = velocityVortex.get(0);
    wheels.setName("wheels"); // wheels target

    VuforiaTrackable tools = velocityVortex.get(1);
    tools.setName("tools"); // tools target

    VuforiaTrackable legos = velocityVortex.get(2);
    legos.setName("legos"); // legos target

    VuforiaTrackable gears = velocityVortex.get(3);
    gears.setName("gears"); // gears target

    /** For convenience, gather together all the trackable objects in one easily-iterable collection */
    List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
    allTrackables.addAll(velocityVortex);

    Vuforia.setFrameFormat(PIXEL_FORMAT.RGB565, true);

    /**
     * We use units of mm here because that's the recommended units of measurement for the
     * size values specified in the XML for the ImageTarget trackables in data sets. E.g.:
     *      <ImageTarget name="stones" size="247 173"/>
     * You don't *have to* use mm here, but the units here and the units used in the XML
     * target configuration files *must* correspond for the math to work out correctly.
     */
    float mmPerInch = 25.4f;
    float mmBotWidth = 18 * mmPerInch; // ... or whatever is right for your robot
    float mmFTCFieldWidth = (12 * 12 - 2) * mmPerInch; // the FTC field is ~11'10" center-to-center of the glass panels

    /**
     * In order for localization to work, we need to tell the system where each target we
     * wish to use for navigation resides on the field, and we need to specify where on the robot
     * the phone resides. These specifications are in the form of <em>transformation matrices.</em>
     * Transformation matrices are a central, important concept in the math here involved in localization.
     * See <a href="https://en.wikipedia.org/wiki/Transformation_matrix">Transformation Matrix</a>
     * for detailed information. Commonly, you'll encounter transformation matrices as instances
     * of the {@link OpenGLMatrix} class.
     *
     * For the most part, you don't need to understand the details of the math of how transformation
     * matrices work inside (as fascinating as that is, truly). Just remember these key points:
     * <ol>
     *
     *     <li>You can put two transformations together to produce a third that combines the effect of
     *     both of them. If, for example, you have a rotation transform R and a translation transform T,
     *     then the combined transformation matrix RT which does the rotation first and then the translation
     *     is given by {@code RT = T.multiplied(R)}. That is, the transforms are multiplied in the
     *     <em>reverse</em> of the chronological order in which they applied.</li>
     *
     *     <li>A common way to create useful transforms is to use methods in the {@link OpenGLMatrix}
     *     class and the Orientation class. See, for example, {@link OpenGLMatrix#translation(float,
     *     float, float)}, {@link OpenGLMatrix#rotation(AngleUnit, float, float, float, float)}, and
     *     {@link Orientation#getRotationMatrix(AxesReference, AxesOrder, AngleUnit, float, float, float)}.
     *     Related methods in {@link OpenGLMatrix}, such as {@link OpenGLMatrix#rotated(AngleUnit,
     *     float, float, float, float)}, are syntactic shorthands for creating a new transform and
     *     then immediately multiplying the receiver by it, which can be convenient at times.</li>
     *
     *     <li>If you want to break open the black box of a transformation matrix to understand
     *     what it's doing inside, use {@link MatrixF#getTranslation()} to fetch how much the
     *     transform will move you in x, y, and z, and use {@link Orientation#getOrientation(MatrixF,
     *     AxesReference, AxesOrder, AngleUnit)} to determine the rotational motion that the transform
     *     will impart. See {@link #format(OpenGLMatrix)} below for an example.</li>
     *
     * </ol>
     *
     * This example places the "stones" image on the perimeter wall to the Left
     *  of the Red Driver station wall.  Similar to the Red Beacon Location on the Res-Q
     *
     * This example places the "chips" image on the perimeter wall to the Right
     *  of the Blue Driver station.  Similar to the Blue Beacon Location on the Res-Q
     *
     * See the doc folder of this project for a description of the field Axis conventions.
     *
     * Initially the target is conceptually lying at the origin of the field's coordinate system
     * (the center of the field), facing up.
     *
     * In this configuration, the target's coordinate system aligns with that of the field.
     *
     * In a real situation we'd also account for the vertical (Z) offset of the target,
     * but for simplicity, we ignore that here; for a real robot, you'll want to fix that.
     *
     * To place the Wheels Target on the Red Audience wall:
     * - First we rotate it 90 around the field's X axis to flip it upright
     * - Then we rotate it  90 around the field's Z access to face it away from the audience.
     * - Finally, we translate it back along the X axis towards the red audience wall.
     *
     */

    // RED Targets
    // To Place GEARS Target
    // position is approximately - (-6feet, -1feet)

    OpenGLMatrix gearsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-mmFTCFieldWidth / 2, -1 * 12 * mmPerInch, 0).multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    gears.setLocation(gearsTargetLocationOnField);
    //RobotLog.ii(TAG, "Gears Target=%s", format(gearsTargetLocationOnField));

    // To Place GEARS Target
    // position is approximately - (-6feet, 3feet)
    OpenGLMatrix toolsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(-mmFTCFieldWidth / 2, 3 * 12 * mmPerInch, 0)
            //.translation(0, mmFTCFieldWidth/2, 0)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    tools.setLocation(toolsTargetLocationOnField);
    //RobotLog.ii(TAG, "Tools Target=%s", format(toolsTargetLocationOnField));

    //Finsih RED Targets

    // BLUE Targets
    // To Place LEGOS Target
    // position is approximately - (-3feet, 6feet)

    OpenGLMatrix legosTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-3 * 12 * mmPerInch, mmFTCFieldWidth / 2, 0).multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    legos.setLocation(legosTargetLocationOnField);
    //RobotLog.ii(TAG, "Gears Target=%s", format(legosTargetLocationOnField));

    // To Place WHEELS Target
    // position is approximately - (1feet, 6feet)
    OpenGLMatrix wheelsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(1 * 12 * mmPerInch, mmFTCFieldWidth / 2, 0).multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    wheels.setLocation(wheelsTargetLocationOnField);
    //RobotLog.ii(TAG, "Tools Target=%s", format(wheelsTargetLocationOnField));

    //Finsih BLUE Targets

    /**
     * Create a transformation matrix describing where the phone is on the robot. Here, we
     * put the phone on the right hand side of the robot with the screen facing in (see our
     * choice of BACK camera above) and in landscape mode. Starting from alignment between the
     * robot's and phone's axes, this is a rotation of -90deg along the Y axis.
     *
     * When determining whether a rotation is positive or negative, consider yourself as looking
     * down the (positive) axis of rotation from the positive towards the origin. Positive rotations
     * are then CCW, and negative rotations CW. An example: consider looking down the positive Z
     * axis towards the origin. A positive rotation about Z (ie: a rotation parallel to the the X-Y
     * plane) is then CCW, as one would normally expect from the usual classic 2D geometry.
     */
    OpenGLMatrix phoneLocationOnRobot = OpenGLMatrix.translation((mmBotWidth / 2), 50, 0).multiplied(Orientation
            .getRotationMatrix(AxesReference.EXTRINSIC, AxesOrder.YZY, AngleUnit.DEGREES, -90, 0, 0));
    //RobotLog.ii(TAG, "phone=%s", format(phoneLocationOnRobot));

    /**
     * Let the trackable listeners we care about know where the phone is. We know that each
     * listener is a {@link VuforiaTrackableDefaultListener} and can so safely cast because
     * we have not ourselves installed a listener of a different type.
     */
    ((VuforiaTrackableDefaultListener) gears.getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) tools.getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) legos.getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) wheels.getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);

    /**
     * A brief tutorial: here's how all the math is going to work:
     *
     * C = phoneLocationOnRobot  maps   phone coords -> robot coords
     * P = tracker.getPose()     maps   image target coords -> phone coords
     * L = redTargetLocationOnField maps   image target coords -> field coords
     *
     * So
     *
     * C.inverted()              maps   robot coords -> phone coords
     * P.inverted()              maps   phone coords -> imageTarget coords
     *
     * Putting that all together,
     *
     * L x P.inverted() x C.inverted() maps robot coords to field coords.
     *
     * @see VuforiaTrackableDefaultListener#getRobotLocation()
     */

    waitForStart();

    //Mat tmp = new Mat();

    velocityVortex.activate();

    Image rgb = null;

    int loop = 0;
    Constants.BeaconColours Colour = Constants.BeaconColours.UNKNOWN;

    while (opModeIsActive()) {

        boolean gotBeacomDims = false;
        Point beaconBotRight = new Point(0, 0);
        Point beaconTopLeft = new Point(0, 0);
        Point beaconMiddle = new Point(0, 0);

        for (VuforiaTrackable beac : velocityVortex) {

            OpenGLMatrix pose = ((VuforiaTrackableDefaultListener) beac.getListener()).getRawPose();

            if (pose != null) {

                Matrix34F rawPose = new Matrix34F();
                float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12);
                rawPose.setData(poseData);

                Vec2F upperLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(-TARGET_WIDTH / 2, TARGET_HEIGHT / 2, 0));
                Vec2F upperRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(TARGET_WIDTH / 2, TARGET_HEIGHT / 2, 0));
                Vec2F lowerRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(TARGET_WIDTH / 2, -TARGET_HEIGHT / 2, 0));
                Vec2F lowerLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(-TARGET_WIDTH / 2, -TARGET_HEIGHT / 2, 0));

                double dblMidPointTopx = (upperRight.getData()[0] + upperLeft.getData()[0]) / 2;
                double dblMidPointTopy = (upperRight.getData()[1] + upperLeft.getData()[1]) / 2;
                double dblMidPointBotx = (lowerRight.getData()[0] + lowerLeft.getData()[0]) / 2;
                double dblMidPointBoty = (lowerRight.getData()[1] + lowerLeft.getData()[1]) / 2;

                double width = Math.sqrt((Math.pow((upperRight.getData()[1] - upperLeft.getData()[1]), 2))
                        + (Math.pow((upperRight.getData()[0] - upperLeft.getData()[0]), 2)));
                double height = Math.sqrt((Math.pow((dblMidPointTopy - dblMidPointBoty), 2))
                        + (Math.pow((dblMidPointTopx - dblMidPointBotx), 2)));

                //width is equal to 254 mm, so width of beacon is 220mm, height of beacon is 150mm
                //pixels per mm width, using a known size of the target
                double dblWidthPixelsPermm = width / TARGET_WIDTH;
                double dblHeightPixelsPermm = height / TARGET_HEIGHT;

                //beacon base is about 25mm above top of target
                beaconBotRight = new Point((dblMidPointTopx + (110 * dblWidthPixelsPermm)),
                        dblMidPointTopy - (30 * dblHeightPixelsPermm));
                beaconTopLeft = new Point((dblMidPointTopx - (110 * dblWidthPixelsPermm)),
                        dblMidPointTopy - (160 * dblHeightPixelsPermm));

                beaconMiddle.x = dblMidPointTopx;
                beaconMiddle.y = dblMidPointTopy + (105 * dblHeightPixelsPermm);

                gotBeacomDims = true;

                if (debug >= 1) {
                    fileLogger.writeEvent("Vuforia", "upperLeft 0 " + upperLeft.getData()[0]);
                    fileLogger.writeEvent("Vuforia", "upperLeft 1 " + upperLeft.getData()[1]);
                    Log.d("Vuforia", "upperLeft 0 " + upperLeft.getData()[0]);
                    Log.d("Vuforia", "upperLeft 1 " + upperLeft.getData()[1]);

                    fileLogger.writeEvent("Vuforia", "upperRight 0 " + upperRight.getData()[0]);
                    fileLogger.writeEvent("Vuforia", "upperRight 1 " + upperRight.getData()[1]);
                    Log.d("Vuforia", "upperRight 0 " + upperRight.getData()[0]);
                    Log.d("Vuforia", "upperRight 1 " + upperRight.getData()[1]);

                    fileLogger.writeEvent("Vuforia", "lowerLeft 0 " + lowerLeft.getData()[0]);
                    fileLogger.writeEvent("Vuforia", "lowerLeft 1 " + lowerLeft.getData()[1]);
                    Log.d("Vuforia", "lowerLeft 0 " + lowerLeft.getData()[0]);
                    Log.d("Vuforia", "lowerLeft 1 " + lowerLeft.getData()[1]);

                    fileLogger.writeEvent("Vuforia", "lowerRight 0 " + lowerRight.getData()[0]);
                    fileLogger.writeEvent("Vuforia", "lowerRight 1 " + lowerRight.getData()[1]);
                    Log.d("Vuforia", "lowerRight 0 " + lowerRight.getData()[0]);
                    Log.d("Vuforia", "lowerRight 1 " + lowerRight.getData()[1]);

                    fileLogger.writeEvent("Vuforia", "dblMidPointTopx " + dblMidPointTopx);
                    fileLogger.writeEvent("Vuforia", "dblMidPointTopy " + dblMidPointTopy);
                    fileLogger.writeEvent("Vuforia", "dblMidPointBotx " + dblMidPointBotx);
                    fileLogger.writeEvent("Vuforia", "dblMidPointBoty " + dblMidPointBoty);
                    Log.d("Vuforia", "dblMidPointTopx " + dblMidPointTopx);
                    Log.d("Vuforia", "dblMidPointTopy " + dblMidPointTopy);
                    Log.d("Vuforia", "dblMidPointBotx " + dblMidPointBotx);
                    Log.d("Vuforia", "dblMidPointBoty " + dblMidPointBoty);

                    fileLogger.writeEvent("Vuforia", "width in pixels " + width);
                    fileLogger.writeEvent("Vuforia", "height in pixels " + height);
                    Log.d("Vuforia", "width in pixels " + width);
                    Log.d("Vuforia", "height in pixels " + height);
                }
            }
        }

        if (gotBeacomDims) {

            VuforiaLocalizer.CloseableFrame frame = vuforia.getFrameQueue().take(); //takes the frame at the head of the queue

            long numImages = frame.getNumImages();

            for (int i = 0; i < numImages; i++) {
                if (frame.getImage(i).getFormat() == PIXEL_FORMAT.RGB565) {
                    rgb = frame.getImage(i);
                    break;
                }
            }

            /*rgb is now the Image object that weve used in the video*/
            Log.d("OPENCV", "Height " + rgb.getHeight() + " Width " + rgb.getWidth());

            Bitmap bm = Bitmap.createBitmap(rgb.getWidth(), rgb.getHeight(), Bitmap.Config.RGB_565);
            bm.copyPixelsFromBuffer(rgb.getPixels());
            Mat tmp = new Mat(rgb.getWidth(), rgb.getHeight(), CvType.CV_8UC4);
            Utils.bitmapToMat(bm, tmp);

            if (beaconTopLeft.x < 0)
                beaconTopLeft.x = 0;
            if (beaconTopLeft.y < 0)
                beaconTopLeft.y = 0;
            if (beaconBotRight.x > rgb.getWidth())
                beaconBotRight.x = rgb.getWidth();
            if (beaconBotRight.y > rgb.getHeight())
                beaconBotRight.y = rgb.getHeight();

            frame.close();
            //Constants.BeaconColours Colour = beaconColour.beaconAnalysisOCV(tmp, loop);
            //Constants.BeaconColours Colour = beaconColour.beaconAnalysisOCV2(tmp, loop, debug);
            Colour = beaconColour.BeaconAnalysisOCVPlayground(debug, tmp, loop, beaconTopLeft, beaconBotRight,
                    beaconMiddle);
            loop++;
            Log.d("OPENCV", "Returned " + Colour);
        }

        for (VuforiaTrackable trackable : allTrackables) {
            /**
             * getUpdatedRobotLocation() will return null if no new information is available since
             * the last time that call was made, or if the trackable is not currently visible.
             * getRobotLocation() will return null if the trackable is not currently visible.
             */
            telemetry.addData(trackable.getName(),
                    ((VuforiaTrackableDefaultListener) trackable.getListener()).isVisible() ? "Visible"
                            : "Not Visible"); //

            OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) trackable.getListener())
                    .getUpdatedRobotLocation();
            if (robotLocationTransform != null) {
                lastLocation = robotLocationTransform;

            }
        }
        /**
         * Provide feedback as to where the robot was last located (if we know).
         */
        if (lastLocation != null) {
            // Then you can extract the positions and angles using the getTranslation and getOrientation methods.
            VectorF trans = lastLocation.getTranslation();
            Orientation rot = Orientation.getOrientation(lastLocation, AxesReference.EXTRINSIC, AxesOrder.XYZ,
                    AngleUnit.DEGREES);
            // Robot position is defined by the standard Matrix translation (x and y)
            robotX = trans.get(0);
            robotY = trans.get(1);

            // Robot bearing (in Cartesian system) position is defined by the standard Matrix z rotation
            robotBearing = rot.thirdAngle;
            if (robotBearing < 0) {
                robotBearing = 360 + robotBearing;
            }

            telemetry.addData("Pos X ", robotX);
            telemetry.addData("Pos Y ", robotY);
            telemetry.addData("Bear  ", robotBearing);
            //  RobotLog.vv(TAG, "robot=%s", format(lastLocation));
            telemetry.addData("Pos   ", format(lastLocation));

            telemetry.addData("Text ", "*** Vision Data***");
            //telemetry.addData("Red  ", "Red :  " + redpoint);
            //telemetry.addData("Blue ", "Blue:  " + bluepoint);
            //telemetry.addData("Dir  ", "Direction:  " + directionOfBeacon);
        } else {
            telemetry.addData("Pos   ", "Unknown");
        }

        switch (Colour) {
        case BEACON_BLUE_RED:
            telemetry.addData("Beacon ", "Blue Red");
            break;
        case BEACON_RED_BLUE:
            telemetry.addData("Beacon ", "Red Blue");
            break;
        case BEACON_BLUE_LEFT:
            telemetry.addData("Beacon ", "Blue Left");
            break;
        case BEACON_RED_LEFT:
            telemetry.addData("Beacon ", "Red Left");
            break;
        case BEACON_BLUE_RIGHT:
            telemetry.addData("Beacon ", "Blue Right");
            break;
        case BEACON_RED_RIGHT:
            telemetry.addData("Beacon ", "Red Right");
            break;
        case UNKNOWN:
            telemetry.addData("Beacon ", "Unknown");
            break;
        }

        telemetry.update();

    }

    //stop the log
    if (fileLogger != null) {
        fileLogger.writeEvent("stop()", "Stopped");
        fileLogger.close();
        fileLogger = null;
    }

}

From source file:club.towr5291.opmodes.AutoDriveTeam5291.java

License:Open Source License

@Override
public void runOpMode() throws InterruptedException {
    final boolean LedOn = false;
    final boolean LedOff = true;

    //load menu settings and setup robot and debug level
    sharedPreferences = PreferenceManager.getDefaultSharedPreferences(hardwareMap.appContext);
    teamNumber = sharedPreferences.getString("club.towr5291.Autonomous.TeamNumber", "0000");
    allianceColor = sharedPreferences.getString("club.towr5291.Autonomous.Color", "Red");
    allianceStartPosition = sharedPreferences.getString("club.towr5291.Autonomous.StartPosition", "Left");
    allianceParkPosition = sharedPreferences.getString("club.towr5291.Autonomous.ParkPosition", "Vortex");
    delay = Integer.parseInt(sharedPreferences.getString("club.towr5291.Autonomous.Delay", "0"));
    numBeacons = sharedPreferences.getString("club.towr5291.Autonomous.Beacons", "One");
    robotConfig = sharedPreferences.getString("club.towr5291.Autonomous.RobotConfig", "TileRunner-2x40");
    debug = Integer.parseInt(sharedPreferences.getString("club.towr5291.Autonomous.Debug", "1"));

    telemetry.addData("Init1     ", "Starting!");
    telemetry.update();/* w  w  w .j  a va2 s . c o  m*/
    //
    // get a reference to a Modern Robotics DIM, and IO channels.
    dim = hardwareMap.get(DeviceInterfaceModule.class, "dim"); //  Use generic form of device mapping
    dim.setDigitalChannelMode(GREEN1_LED_CHANNEL, DigitalChannelController.Mode.OUTPUT); // Set the direction of each channel
    dim.setDigitalChannelMode(RED1_LED_CHANNEL, DigitalChannelController.Mode.OUTPUT); // Set the direction of each channel
    dim.setDigitalChannelMode(BLUE1_LED_CHANNEL, DigitalChannelController.Mode.OUTPUT); // Set the direction of each channel
    dim.setDigitalChannelMode(GREEN2_LED_CHANNEL, DigitalChannelController.Mode.OUTPUT); // Set the direction of each channel
    dim.setDigitalChannelMode(RED2_LED_CHANNEL, DigitalChannelController.Mode.OUTPUT); // Set the direction of each channel
    dim.setDigitalChannelMode(BLUE2_LED_CHANNEL, DigitalChannelController.Mode.OUTPUT); // Set the direction of each channel
    LedState(LedOn, LedOn, LedOn, LedOn, LedOn, LedOn);

    //load variables
    LibraryStateSegAuto processingSteps = new LibraryStateSegAuto(0, 0, "", false, false, 0, 0, 0, 0, 0, 0, 0);
    sixValues[] pathValues = new sixValues[1000];
    A0Star a0Star = new A0Star();
    String fieldOutput;
    HashMap<String, LibraryStateSegAuto> autonomousStepsAStar = new HashMap<>();

    if (debug >= 1) {
        fileLogger = new FileLogger(runtime);
        fileLogger.open();
        fileLogger.write("Time,SysMS,Thread,Event,Desc");
        fileLogger.writeEvent(TAG, "Log Started");
        Log.d(TAG, "Log Started");
        runtime.reset();
        telemetry.addData("FileLogger: ", runtime.toString());
        telemetry.addData("FileLogger Op Out File: ", fileLogger.getFilename());
    }

    //load all the vuforia stuff
    VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(R.id.cameraMonitorViewId);
    parameters.cameraDirection = VuforiaLocalizer.CameraDirection.BACK;

    switch (teamNumber) {
    case "5291":
        parameters.vuforiaLicenseKey = "AVATY7T/////AAAAGQJxfNYzLUgGjSx0aOEU0Q0rpcfZO2h2sY1MhUZUr+Bu6RgoUMUP/nERGmD87ybv1/lM2LBFDxcBGRHkXvxtkHel4XEUCsNHFTGWYcVkMIZqctQsIrTe13MnUvSOfQj8ig7xw3iULcwDpY+xAftW61dKTJ0IAOCxx2F0QjJWqRJBxrEUR/DfQi4LyrgnciNMXCiZ8KFyBdC63XMYkQj2joTN579+2u5f8aSCe8jkAFnBLcB1slyaU9lhnlTEMcFjwrLBcWoYIFAZluvFT0LpqZRlS1/XYf45QBSJztFKHIsj1rbCgotAE36novnAQBs74ewnWsJifokJGOYWdFJveWzn3GE9OEH23Y5l7kFDu4wc";
        break;
    case "11230":
        parameters.vuforiaLicenseKey = "Not Provided";
        break;
    case "11231":
        parameters.vuforiaLicenseKey = "Aai2GEX/////AAAAGaIIK9GK/E5ZsiRZ/jrJzdg7wYZCIFQ7uzKqQrMx/0Hh212zumzIy4raGwDY6Mf6jABMShH2etZC/BcjIowIHeAG5ShG5lvZIZEplTO+1zK1nFSiGFTPV59iGVqH8KjLbQdgUbsCBqp4f3tI8BWYqAS27wYIPfTK697SuxdQnpEZAOhHpgz+S2VoShgGr+EElzYMBFEaj6kdA/Lq5OwQp31JPet7NWYph6nN+TNHJAxnQBkthYmQg687WlRZhYrvNJepnoEwsDO3NSyeGlFquwuQwgdoGjzq2qn527I9tvM/XVZt7KR1KyWCn3PIS/LFvADSuyoQ2lsiOFtM9C+KCuNWiqQmj7dPPlpvVeUycoDH";
        break;
    }

    VuforiaLocalizer vuforia = ClassFactory.createVuforiaLocalizer(parameters);
    Vuforia.setFrameFormat(PIXEL_FORMAT.RGB565, true); //enables RGB565 format for the image
    vuforia.setFrameQueueCapacity(1); //tells VuforiaLocalizer to only store one frame at a time
    Vuforia.setHint(HINT.HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 4);

    VuforiaTrackables velocityVortex = vuforia.loadTrackablesFromAsset("FTC_2016-17");
    VuforiaTrackable wheels = velocityVortex.get(0);
    wheels.setName("wheels"); // wheels target

    VuforiaTrackable tools = velocityVortex.get(1);
    tools.setName("tools"); // tools target

    VuforiaTrackable legos = velocityVortex.get(2);
    legos.setName("legos"); // legos target

    VuforiaTrackable gears = velocityVortex.get(3);
    gears.setName("gears"); // gears target

    /** For convenience, gather together all the trackable objects in one easily-iterable collection */
    List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
    allTrackables.addAll(velocityVortex);

    Vuforia.setFrameFormat(PIXEL_FORMAT.RGB565, true);

    /**
     * We use units of mm here because that's the recommended units of measurement for the
     * size values specified in the XML for the ImageTarget trackables in data sets. E.g.:
     *      <ImageTarget name="stones" size="247 173"/>
     * You don't *have to* use mm here, but the units here and the units used in the XML
     * target configuration files *must* correspond for the math to work out correctly.
     */
    float mmPerInch = 25.4f;
    float mmBotWidth = 18 * mmPerInch; // ... or whatever is right for your robot
    float mmFTCFieldWidth = (12 * 12 - 2) * mmPerInch; // the FTC field is ~11'10" center-to-center of the glass panels

    /**
     * In order for localization to work, we need to tell the system where each target we
     * wish to use for navigation resides on the field, and we need to specify where on the robot
     * the phone resides. These specifications are in the form of <em>transformation matrices.</em>
     * Transformation matrices are a central, important concept in the math here involved in localization.
     * See <a href="https://en.wikipedia.org/wiki/Transformation_matrix">Transformation Matrix</a>
     * for detailed information. Commonly, you'll encounter transformation matrices as instances
     * of the {@link OpenGLMatrix} class.
     *
     * For the most part, you don't need to understand the details of the math of how transformation
     * matrices work inside (as fascinating as that is, truly). Just remember these key points:
     * <ol>
     *
     *     <li>You can put two transformations together to produce a third that combines the effect of
     *     both of them. If, for example, you have a rotation transform R and a translation transform T,
     *     then the combined transformation matrix RT which does the rotation first and then the translation
     *     is given by {@code RT = T.multiplied(R)}. That is, the transforms are multiplied in the
     *     <em>reverse</em> of the chronological order in which they applied.</li>
     *
     *     <li>A common way to create useful transforms is to use methods in the {@link OpenGLMatrix}
     *     class and the Orientation class. See, for example, {@link OpenGLMatrix#translation(float,
     *     float, float)}, {@link OpenGLMatrix#rotation(AngleUnit, float, float, float, float)}, and
     *     {@link Orientation#getRotationMatrix(AxesReference, AxesOrder, AngleUnit, float, float, float)}.
     *     Related methods in {@link OpenGLMatrix}, such as {@link OpenGLMatrix#rotated(AngleUnit,
     *     float, float, float, float)}, are syntactic shorthands for creating a new transform and
     *     then immediately multiplying the receiver by it, which can be convenient at times.</li>
     *
     *     <li>If you want to break open the black box of a transformation matrix to understand
     *     what it's doing inside, use {@link MatrixF#getTranslation()} to fetch how much the
     *     transform will move you in x, y, and z, and use {@link Orientation#getOrientation(MatrixF,
     *     AxesReference, AxesOrder, AngleUnit)} to determine the rotational motion that the transform
     *     will impart. See {@link #format(OpenGLMatrix)} below for an example.</li>
     *
     * </ol>
     *
     * This example places the "stones" image on the perimeter wall to the Left
     *  of the Red Driver station wall.  Similar to the Red Beacon Location on the Res-Q
     *
     * This example places the "chips" image on the perimeter wall to the Right
     *  of the Blue Driver station.  Similar to the Blue Beacon Location on the Res-Q
     *
     * See the doc folder of this project for a description of the field Axis conventions.
     *
     * Initially the target is conceptually lying at the origin of the field's coordinate system
     * (the center of the field), facing up.
     *
     * In this configuration, the target's coordinate system aligns with that of the field.
     *
     * In a real situation we'd also account for the vertical (Z) offset of the target,
     * but for simplicity, we ignore that here; for a real robot, you'll want to fix that.
     *
     * To place the Wheels Target on the Red Audience wall:
     * - First we rotate it 90 around the field's X axis to flip it upright
     * - Then we rotate it  90 around the field's Z access to face it away from the audience.
     * - Finally, we translate it back along the X axis towards the red audience wall.
     *
     */

    // RED Targets
    // To Place GEARS Target
    // position is approximately - (-6feet, -1feet)

    OpenGLMatrix gearsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-mmFTCFieldWidth / 2, -1 * 12 * mmPerInch, 0).multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    gears.setLocation(gearsTargetLocationOnField);

    // To Place GEARS Target
    // position is approximately - (-6feet, 3feet)
    OpenGLMatrix toolsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(-mmFTCFieldWidth / 2, 3 * 12 * mmPerInch, 0)
            //.translation(0, mmFTCFieldWidth/2, 0)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    tools.setLocation(toolsTargetLocationOnField);

    //Finish RED Targets

    // BLUE Targets
    // To Place LEGOS Target
    // position is approximately - (-3feet, 6feet)

    OpenGLMatrix legosTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-3 * 12 * mmPerInch, mmFTCFieldWidth / 2, 0).multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    legos.setLocation(legosTargetLocationOnField);

    // To Place WHEELS Target
    // position is approximately - (1feet, 6feet)
    OpenGLMatrix wheelsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(1 * 12 * mmPerInch, mmFTCFieldWidth / 2, 0).multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    wheels.setLocation(wheelsTargetLocationOnField);

    //Finish BLUE Targets

    /**
     * Create a transformation matrix describing where the phone is on the robot. Here, we
     * put the phone on the right hand side of the robot with the screen facing in (see our
     * choice of BACK camera above) and in landscape mode. Starting from alignment between the
     * robot's and phone's axes, this is a rotation of -90deg along the Y axis.
     *
     * When determining whether a rotation is positive or negative, consider yourself as looking
     * down the (positive) axis of rotation from the positive towards the origin. Positive rotations
     * are then CCW, and negative rotations CW. An example: consider looking down the positive Z
     * axis towards the origin. A positive rotation about Z (ie: a rotation parallel to the the X-Y
     * plane) is then CCW, as one would normally expect from the usual classic 2D geometry.
     */
    OpenGLMatrix phoneLocationOnRobot = OpenGLMatrix.translation((mmBotWidth / 2), 0, 300)
            .multiplied(Orientation.getRotationMatrix(AxesReference.EXTRINSIC, AxesOrder.YZY, AngleUnit.DEGREES,
                    -90, 0, 0));
    //RobotLog.ii(TAG, "phone=%s", format(phoneLocationOnRobot));

    /**
     * Let the trackable listeners we care about know where the phone is. We know that each
     * listener is a {@link VuforiaTrackableDefaultListener} and can so safely cast because
     * we have not ourselves installed a listener of a different type.
     */
    ((VuforiaTrackableDefaultListener) gears.getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) tools.getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) legos.getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) wheels.getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);

    /**
     * A brief tutorial: here's how all the math is going to work:
     *
     * C = phoneLocationOnRobot  maps   phone coords -> robot coords
     * P = tracker.getPose()     maps   image target coords -> phone coords
     * L = redTargetLocationOnField maps   image target coords -> field coords
     *
     * So
     *
     * C.inverted()              maps   robot coords -> phone coords
     * P.inverted()              maps   phone coords -> imageTarget coords
     *
     * Putting that all together,
     *
     * L x P.inverted() x C.inverted() maps robot coords to field coords.
     *
     * @see VuforiaTrackableDefaultListener#getRobotLocation()
     */
    telemetry.addData("Init2     ", "Vuforia Options Loaded!");
    telemetry.update();

    //to add more config options edit strings.xml and AutonomousConfiguration.java
    switch (robotConfig) {
    case "TileRunner-2x40": //Velocity Vortex Competition Base
        REVERSE_DIRECTION = 1; // Reverse the direction without significant code changes, (using motor FORWARD REVERSE will affect the driver station as we use same robotconfig file
        COUNTS_PER_MOTOR_REV = 1120; // eg: TETRIX = 1440 pulses, NeveRest 20 = 560 pulses, NeveRest 40 =  1120, NeveRest 60 = 1680 pulses
        DRIVE_GEAR_REDUCTION = 0.7; // This is < 1.0 if geared UP, Tilerunner is geared up
        WHEEL_DIAMETER_INCHES = 4.0; // For figuring circumference
        WHEEL_ACTUAL_FUDGE = 1; // Fine tuning amount
        COUNTS_PER_INCH = ((COUNTS_PER_MOTOR_REV * DRIVE_GEAR_REDUCTION) / (WHEEL_DIAMETER_INCHES * 3.1415))
                * WHEEL_ACTUAL_FUDGE * REVERSE_DIRECTION;
        ROBOT_TRACK = 16.5; //  distance between centerline of rear wheels robot will pivot on rear wheel of omni on front, 16.5 track is 103.67 inches full circle
        WHEEL_TURN_FUDGE = 1.0; // Fine tuning amount
        COUNTS_PER_DEGREE = (((2 * 3.1415 * ROBOT_TRACK) * COUNTS_PER_INCH) / 360) * WHEEL_TURN_FUDGE;
        loadPowerTableTileRunner(); //load the power table
        break;
    case "5291 Tank Tread-2x40 Custom": //for tank tread base
        REVERSE_DIRECTION = 1;
        COUNTS_PER_MOTOR_REV = 1120; // eg: TETRIX = 1440 pulses, NeveRest 20 = 560 pulses, NeveRest 40 =  1120, NeveRest 60 = 1680 pulses
        DRIVE_GEAR_REDUCTION = 1.0; // Tank Tread is 1:1 ration
        WHEEL_DIAMETER_INCHES = 3.75; // For figuring circumference
        WHEEL_ACTUAL_FUDGE = 1; // Fine tuning amount
        COUNTS_PER_INCH = ((COUNTS_PER_MOTOR_REV * DRIVE_GEAR_REDUCTION) / (WHEEL_DIAMETER_INCHES * 3.1415))
                * WHEEL_ACTUAL_FUDGE * REVERSE_DIRECTION;
        ROBOT_TRACK = 18; //  distance between centerline of rear wheels robot will pivot on rear wheel of omni on front, 16.5 track is 103.67 inches full circle
        WHEEL_TURN_FUDGE = 1.12; // Fine tuning amount
        COUNTS_PER_DEGREE = (((2 * 3.1415 * ROBOT_TRACK) * COUNTS_PER_INCH) / 360) * WHEEL_TURN_FUDGE;
        loadPowerTableTankTread(); //load the power table
        break;
    case "11231 2016 Custom": //2016 - 11231 Drivetrain
        COUNTS_PER_MOTOR_REV = 1120; // eg: TETRIX = 1440 pulses, NeveRest 20 = 560 pulses, NeveRest 40 =  1120, NeveRest 60 = 1680 pulses
        DRIVE_GEAR_REDUCTION = .667; // (.665) UP INCREASES THE DISTANCE This is < 1.0 if geared UP, Tilerunner is geared up
        WHEEL_DIAMETER_INCHES = 4.0; // For figuring circumference
        WHEEL_ACTUAL_FUDGE = 1; // Fine tuning amount
        COUNTS_PER_INCH = ((COUNTS_PER_MOTOR_REV * DRIVE_GEAR_REDUCTION)
                / (WHEEL_DIAMETER_INCHES * 3.1415926535)) * WHEEL_ACTUAL_FUDGE;
        ROBOT_TRACK = 18; //  distance between centerline of rear wheels robot will pivot on rear wheel of omni on front, 16.5 track is 103.67 inches full circle
        COUNTS_PER_DEGREE = ((2 * 3.1415926535 * ROBOT_TRACK) * COUNTS_PER_INCH) / 360;
        //loadPowerTableTileRunner();                                                         //load the power table
        break;
    default: //default for competition TileRunner-2x40
        REVERSE_DIRECTION = 1;
        COUNTS_PER_MOTOR_REV = 1120; // eg: TETRIX = 1440 pulses, NeveRest 20 = 560 pulses, NeveRest 40 =  1120, NeveRest 60 = 1680 pulses
        DRIVE_GEAR_REDUCTION = 1.28; // This is < 1.0 if geared UP, Tilerunner is geared up
        WHEEL_DIAMETER_INCHES = 4.0; // For figuring circumference
        WHEEL_ACTUAL_FUDGE = 1; // Fine tuning amount
        COUNTS_PER_INCH = ((COUNTS_PER_MOTOR_REV * DRIVE_GEAR_REDUCTION) / (WHEEL_DIAMETER_INCHES * 3.1415))
                * WHEEL_ACTUAL_FUDGE * REVERSE_DIRECTION;
        ROBOT_TRACK = 16.5; //  distance between centerline of rear wheels robot will pivot on rear wheel of omni on front, 16.5 track is 103.67 inches full circle
        COUNTS_PER_DEGREE = ((2 * 3.1415 * ROBOT_TRACK) * COUNTS_PER_INCH) / 360;
        loadPowerTableTileRunner(); //load the power table
        break;
    }

    if (debug >= 1) {
        fileLogger.writeEvent(TAG, "Team #             " + teamNumber);
        fileLogger.writeEvent(TAG, "Alliance Colour    " + allianceColor);
        fileLogger.writeEvent(TAG, "Alliance Start Pos " + allianceStartPosition);
        fileLogger.writeEvent(TAG, "Alliance Park Pos  " + allianceParkPosition);
        fileLogger.writeEvent(TAG, "Alliance Delay     " + delay);
        fileLogger.writeEvent(TAG, "Alliance Beacons   " + numBeacons);
        fileLogger.writeEvent(TAG, "Robot Config       " + robotConfig);
        Log.d(TAG, "Team #             " + teamNumber);
        Log.d(TAG, "Alliance Colour    " + allianceColor);
        Log.d(TAG, "Alliance Start Pos " + allianceStartPosition);
        Log.d(TAG, "Alliance Park Pos  " + allianceParkPosition);
        Log.d(TAG, "Alliance Delay     " + delay);
        Log.d(TAG, "Alliance Beacons   " + numBeacons);
        Log.d(TAG, "Robot Config       " + robotConfig);
    }

    telemetry.addData("Init3       ", "Loading Steps");
    telemetry.update();

    //load the sequence based on alliance colour and team
    switch (teamNumber) {
    case "5291":
        switch (allianceColor) {
        case "Red":
            LedState(LedOff, LedOn, LedOff, LedOff, LedOn, LedOff);
            switch (allianceStartPosition) {
            case "Left":
                autonomousSteps = autonomousStepsFile.ReadStepFile("5291RedLeft.csv", allianceParkPosition,
                        numBeacons);
                break;
            case "Right":
                autonomousSteps = autonomousStepsFile.ReadStepFile("5291RedRight.csv", allianceParkPosition,
                        numBeacons);
                break;
            }
            break;
        case "Blue":
            LedState(LedOff, LedOff, LedOn, LedOff, LedOff, LedOn);
            switch (allianceStartPosition) {
            case "Left":
                autonomousSteps = autonomousStepsFile.ReadStepFile("5291BlueLeft.csv", allianceParkPosition,
                        numBeacons);
                break;
            case "Right":
                autonomousSteps = autonomousStepsFile.ReadStepFile("5291BlueRight.csv", allianceParkPosition,
                        numBeacons);
                break;
            }
            break;
        case "Test":
            autonomousSteps = autonomousStepsFile.ReadStepFile("5291Test.csv", allianceParkPosition,
                    numBeacons);
            break;
        }
        break;

    case "11230":
        switch (allianceColor) {
        case "Red":
            switch (allianceStartPosition) {
            case "Left":
                autonomousSteps = autonomousStepsFile.ReadStepFile("11230RedLeft.csv", allianceParkPosition,
                        numBeacons);
                break;
            case "Right":
                autonomousSteps = autonomousStepsFile.ReadStepFile("11230RedRight.csv", allianceParkPosition,
                        numBeacons);
                break;
            }
            break;
        case "Blue":
            switch (allianceStartPosition) {
            case "Left":
                autonomousSteps = autonomousStepsFile.ReadStepFile("5291BlueRight.csv", allianceParkPosition,
                        numBeacons);
                break;
            case "Right":
                autonomousSteps = autonomousStepsFile.ReadStepFile("5291BlueRight.csv", allianceParkPosition,
                        numBeacons);
                break;
            }
            break;
        case "Test":
            autonomousSteps = autonomousStepsFile.ReadStepFile("11230Test.csv", allianceParkPosition,
                    numBeacons);
            break;
        }
        break;

    case "11231":
        switch (allianceColor) {
        case "Red":
            switch (allianceStartPosition) {
            case "Left":
                autonomousSteps = autonomousStepsFile.ReadStepFile("11231RedLeft.csv", allianceParkPosition,
                        numBeacons);
                break;
            case "Right":
                autonomousSteps = autonomousStepsFile.ReadStepFile("11231RedRight.csv", allianceParkPosition,
                        numBeacons);
                break;
            }
            break;
        case "Blue":
            switch (allianceStartPosition) {
            case "Left":
                autonomousSteps = autonomousStepsFile.ReadStepFile("11231BleLeft.csv", allianceParkPosition,
                        numBeacons);
                break;
            case "Right":
                autonomousSteps = autonomousStepsFile.ReadStepFile("11231BlueRight.csv", allianceParkPosition,
                        numBeacons);
                break;
            }
            break;
        case "Test":
            autonomousSteps = autonomousStepsFile.ReadStepFile("11231Test.csv", allianceParkPosition,
                    numBeacons);
            break;
        }
        break;
    }

    //need to load initial step of a delay based on user input
    autonomousStepsFile.insertSteps(delay + 1, "DEL" + (delay * 1000), false, false, 0, 0, 0, 0, 0, 0, 0, 1);

    // Set up the parameters with which we will use our IMU. Note that integration
    // algorithm here just reports accelerations to the logcat log; it doesn't actually
    // provide positional information.
    BNO055IMU.Parameters parametersAdafruitImu = new BNO055IMU.Parameters();
    parametersAdafruitImu.angleUnit = BNO055IMU.AngleUnit.DEGREES;
    parametersAdafruitImu.accelUnit = BNO055IMU.AccelUnit.METERS_PERSEC_PERSEC;
    parametersAdafruitImu.calibrationDataFile = "AdafruitIMUCalibration.json"; // see the calibration sample opmode
    parametersAdafruitImu.loggingEnabled = true;
    parametersAdafruitImu.loggingTag = "IMU";
    parametersAdafruitImu.accelerationIntegrationAlgorithm = new JustLoggingAccelerationIntegrator();

    //don't crash the program if the GRYO is faulty, just bypass it
    try {
        // Retrieve and initialize the IMU. We expect the IMU to be attached to an I2C port
        // on a Core Device Interface Module, configured to be a sensor of type "AdaFruit IMU",
        // and named "imu".
        imu = hardwareMap.get(BNO055IMU.class, "imu");
        imu.initialize(parametersAdafruitImu);

        // get a reference to a Modern Robotics GyroSensor object.
        gyro = (ModernRoboticsI2cGyro) hardwareMap.gyroSensor.get("gyro");
        // calibrate the gyro, this takes a few seconds
        gyro.calibrate();
        telemetry.addData("Init4       ", "Calibrating Gyro");
        telemetry.update();
    } catch (Exception e) {
        if (debug >= 1) {
            fileLogger.writeEvent(TAG, "Gyro Error " + e.getMessage());
            Log.d(TAG, "Gyro Error " + e.getMessage());
        }
        gyroError = true;
    }

    telemetry.addData("Init5       ", "Line Sensors");
    telemetry.update();

    LineSensor1 = hardwareMap.get(AnalogInput.class, "linesensor1");
    LineSensor2 = hardwareMap.get(AnalogInput.class, "linesensor2");
    LineSensor3 = hardwareMap.get(AnalogInput.class, "linesensor3");
    LineSensor4 = hardwareMap.get(AnalogInput.class, "linesensor4");
    LineSensor5 = hardwareMap.get(AnalogInput.class, "linesensor5");
    /*
    * Initialize the drive system variables.
    * The init() method of the hardware class does all the work here
    */

    telemetry.addData("Init6       ", "Base and Arm Motors");
    telemetry.update();

    robotDrive.init(hardwareMap);
    armDrive.init(hardwareMap);

    telemetry.addData("Init7       ", "Range Sensors");
    telemetry.update();

    RANGE1 = hardwareMap.i2cDevice.get("range1");
    RANGE1Reader = new I2cDeviceSynchImpl(RANGE1, RANGE1ADDRESS, false);
    RANGE1Reader.engage();
    RANGE2 = hardwareMap.i2cDevice.get("range2");
    RANGE2Reader = new I2cDeviceSynchImpl(RANGE2, RANGE2ADDRESS, false);
    RANGE2Reader.engage();

    // get a reference to our ColorSensor object.
    try {
        telemetry.addData("Init8       ", "Colour Sensor");
        telemetry.update();
        colorSensor = hardwareMap.colorSensor.get("sensorcolor");
    } catch (Exception e) {
        if (debug >= 1) {
            fileLogger.writeEvent(TAG, "colour Error " + e.getMessage());
            Log.d(TAG, "colour Error " + e.getMessage());
        }
        colourError = true;
    }

    telemetry.addData("Init9       ", "Servos");
    telemetry.update();

    //config the servos
    servoBeaconRight = hardwareMap.servo.get("servobeaconright");
    servoBeaconLeft = hardwareMap.servo.get("servobeaconleft");
    servoBeaconRight.setDirection(Servo.Direction.REVERSE);
    servoLifterRight = hardwareMap.servo.get("servoliftright");
    servoLifterLeft = hardwareMap.servo.get("servoliftleft");
    servoLifterRight.setDirection(Servo.Direction.REVERSE);
    //lock the arms up
    moveServo(servoLifterRight, 135, SERVOLIFTRIGHT_MIN_RANGE, SERVOLIFTRIGHT_MAX_RANGE);
    moveServo(servoLifterLeft, 135, SERVOLIFTLEFT_MIN_RANGE, SERVOLIFTLEFT_MAX_RANGE);

    // Move the beacon pushers to home
    moveServo(servoBeaconRight, SERVOBEACONRIGHT_HOME, SERVOBEACONRIGHT_MIN_RANGE, SERVOBEACONRIGHT_MAX_RANGE);
    moveServo(servoBeaconLeft, SERVOBEACONLEFT_HOME, SERVOBEACONLEFT_MIN_RANGE, SERVOBEACONLEFT_MAX_RANGE);

    // Send telemetry message to signify robot waiting;
    telemetry.addData("Status", "Resetting Encoders"); //

    robotDrive.leftMotor1.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
    robotDrive.leftMotor2.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
    robotDrive.rightMotor1.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
    robotDrive.rightMotor2.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);

    robotDrive.leftMotor1.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
    robotDrive.leftMotor2.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
    robotDrive.rightMotor1.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
    robotDrive.rightMotor2.setMode(DcMotor.RunMode.RUN_USING_ENCODER);

    mintCurStStep = stepState.STATE_INIT;
    mintCurStTankTurn = stepState.STATE_COMPLETE;
    mintCurStDrive = stepState.STATE_COMPLETE;
    mintCurStDriveHeading = stepState.STATE_COMPLETE;
    mintCurStPivotTurn = stepState.STATE_COMPLETE;
    mintCurStRadiusTurn = stepState.STATE_COMPLETE;
    mintCurStDelay = stepState.STATE_COMPLETE;
    mintCurStVuforiaLoc5291 = stepState.STATE_COMPLETE;
    mintCurStVuforiaMove5291 = stepState.STATE_COMPLETE;
    mintCurStVuforiaTurn5291 = stepState.STATE_COMPLETE;
    mintCurStAttackBeacon5291 = stepState.STATE_COMPLETE;
    mintCurStBeaconColour5291 = stepState.STATE_COMPLETE;
    mintCurStShootParticle5291 = stepState.STATE_COMPLETE;
    mintCurStSweeper5291 = stepState.STATE_COMPLETE;
    mintCurStEyelids5291 = stepState.STATE_COMPLETE;
    mintCurStTankTurnGyroHeading = stepState.STATE_COMPLETE;
    mintCurStLineFind5291 = stepState.STATE_COMPLETE;
    mintCurStGyroTurnEncoder5291 = stepState.STATE_COMPLETE;

    mint5291LEDStatus = LEDState.STATE_TEAM;

    mblnNextStepLastPos = false;

    if (!gyroError) {
        while (!isStopRequested() && gyro.isCalibrating()) {
            sleep(50);
            idle();
        }
        telemetry.addData("Init10      ", "Calibrated Gyro");
        telemetry.update();
    }

    if (debug >= 1) {
        fileLogger.writeEvent(TAG, "Init Complete");
        Log.d(TAG, "Init Complete");
    }

    //set up variable for our capturedimage
    Image rgb = null;

    //activate vuforia
    velocityVortex.activate();

    //show options on the driver station phone
    telemetry.addData("Init11     ", "Complete");
    telemetry.addData("Team #     ", teamNumber);
    telemetry.addData("Alliance   ", allianceColor);
    telemetry.addData("Start Pos  ", allianceStartPosition);
    telemetry.addData("Park Pos   ", allianceParkPosition);
    telemetry.addData("Start Del  ", delay);
    telemetry.addData("# Beacons  ", numBeacons);
    telemetry.addData("Robot      ", robotConfig);
    telemetry.update();

    // Wait for the game to start (driver presses PLAY)
    waitForStart();
    if (debug >= 1) {
        fileLogger.writeEvent(TAG, "Value of Gyro Before Reset " + gyro.getIntegratedZValue());
        Log.d(TAG, "Value of Gyro Before Reset " + gyro.getIntegratedZValue());
    }

    imu.startAccelerationIntegration(new Position(), new Velocity(), 1000);
    gyro.resetZAxisIntegrator();

    //the main loop.  this is where the action happens
    while (opModeIsActive()) {
        if (!mblnDisableVisionProcessing) {
            //start capturing frames for analysis
            if (mblnReadyToCapture) {

                boolean gotBeacomDims = false;
                boolean beacFound = false;
                Point beaconBotRight = new Point(0, 0);
                Point beaconTopLeft = new Point(0, 0);
                Point beaconMiddle = new Point(0, 0);

                if (mStateTime.milliseconds() < 1000) {
                    gotBeacomDims = true;
                    beacFound = false;
                }

                if (!gotBeacomDims) {
                    for (VuforiaTrackable beac : velocityVortex) {

                        OpenGLMatrix pose = ((VuforiaTrackableDefaultListener) beac.getListener()).getRawPose();

                        if (pose != null) {

                            Matrix34F rawPose = new Matrix34F();
                            float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12);
                            rawPose.setData(poseData);

                            Vec2F upperLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                                    new Vec3F(-TARGET_WIDTH / 2, TARGET_HEIGHT / 2, 0));
                            Vec2F upperRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                                    new Vec3F(TARGET_WIDTH / 2, TARGET_HEIGHT / 2, 0));
                            Vec2F lowerRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                                    new Vec3F(TARGET_WIDTH / 2, -TARGET_HEIGHT / 2, 0));
                            Vec2F lowerLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                                    new Vec3F(-TARGET_WIDTH / 2, -TARGET_HEIGHT / 2, 0));

                            double dblMidPointTopx = (upperRight.getData()[0] + upperLeft.getData()[0]) / 2;
                            double dblMidPointTopy = (upperRight.getData()[1] + upperLeft.getData()[1]) / 2;
                            double dblMidPointBotx = (lowerRight.getData()[0] + lowerLeft.getData()[0]) / 2;
                            double dblMidPointBoty = (lowerRight.getData()[1] + lowerLeft.getData()[1]) / 2;

                            double width = Math.sqrt(
                                    (Math.pow((upperRight.getData()[1] - upperLeft.getData()[1]), 2)) + (Math
                                            .pow((upperRight.getData()[0] - upperLeft.getData()[0]), 2)));
                            double height = Math.sqrt((Math.pow((dblMidPointTopy - dblMidPointBoty), 2))
                                    + (Math.pow((dblMidPointTopx - dblMidPointBotx), 2)));

                            //width is equal to 254 mm, so width of beacon is 220mm, height of beacon is 150mm
                            //pixels per mm width, using a known size of the target
                            double dblWidthPixelsPermm = width / TARGET_WIDTH;
                            double dblHeightPixelsPermm = height / TARGET_HEIGHT;

                            //beacon base is about 25mm above top of target
                            beaconBotRight = new Point((dblMidPointTopx + (110 * dblWidthPixelsPermm)),
                                    dblMidPointTopy - (30 * dblHeightPixelsPermm));
                            beaconTopLeft = new Point((dblMidPointTopx - (110 * dblWidthPixelsPermm)),
                                    dblMidPointTopy - (160 * dblHeightPixelsPermm));

                            beaconMiddle.x = dblMidPointTopx;
                            beaconMiddle.y = dblMidPointTopy + (110 * dblHeightPixelsPermm);

                            gotBeacomDims = true;
                            beacFound = true;

                            if (debug >= 1) {
                                fileLogger.writeEvent("Vuforia", "upperLeft 0 " + upperLeft.getData()[0]);
                                fileLogger.writeEvent("Vuforia", "upperLeft 1 " + upperLeft.getData()[1]);
                                Log.d("Vuforia", "upperLeft 0 " + upperLeft.getData()[0]);
                                Log.d("Vuforia", "upperLeft 1 " + upperLeft.getData()[1]);

                                fileLogger.writeEvent("Vuforia", "upperRight 0 " + upperRight.getData()[0]);
                                fileLogger.writeEvent("Vuforia", "upperRight 1 " + upperRight.getData()[1]);
                                Log.d("Vuforia", "upperRight 0 " + upperRight.getData()[0]);
                                Log.d("Vuforia", "upperRight 1 " + upperRight.getData()[1]);

                                fileLogger.writeEvent("Vuforia", "lowerLeft 0 " + lowerLeft.getData()[0]);
                                fileLogger.writeEvent("Vuforia", "lowerLeft 1 " + lowerLeft.getData()[1]);
                                Log.d("Vuforia", "lowerLeft 0 " + lowerLeft.getData()[0]);
                                Log.d("Vuforia", "lowerLeft 1 " + lowerLeft.getData()[1]);

                                fileLogger.writeEvent("Vuforia", "lowerRight 0 " + lowerRight.getData()[0]);
                                fileLogger.writeEvent("Vuforia", "lowerRight 1 " + lowerRight.getData()[1]);
                                Log.d("Vuforia", "lowerRight 0 " + lowerRight.getData()[0]);
                                Log.d("Vuforia", "lowerRight 1 " + lowerRight.getData()[1]);

                                fileLogger.writeEvent("Vuforia", "dblMidPointTopx " + dblMidPointTopx);
                                fileLogger.writeEvent("Vuforia", "dblMidPointTopy " + dblMidPointTopy);
                                fileLogger.writeEvent("Vuforia", "dblMidPointBotx " + dblMidPointBotx);
                                fileLogger.writeEvent("Vuforia", "dblMidPointBoty " + dblMidPointBoty);
                                Log.d("Vuforia", "dblMidPointTopx " + dblMidPointTopx);
                                Log.d("Vuforia", "dblMidPointTopy " + dblMidPointTopy);
                                Log.d("Vuforia", "dblMidPointBotx " + dblMidPointBotx);
                                Log.d("Vuforia", "dblMidPointBoty " + dblMidPointBoty);

                                fileLogger.writeEvent("Vuforia", "width in pixels " + width);
                                fileLogger.writeEvent("Vuforia", "height in pixels " + height);
                                Log.d("Vuforia", "width in pixels " + width);
                                Log.d("Vuforia", "height in pixels " + height);
                            }
                        }
                    }
                }

                if (gotBeacomDims) {
                    VuforiaLocalizer.CloseableFrame frame = vuforia.getFrameQueue().take(); //takes the frame at the head of the queue
                    long numImages = frame.getNumImages();

                    for (int i = 0; i < numImages; i++) {
                        if (frame.getImage(i).getFormat() == PIXEL_FORMAT.RGB565) {
                            rgb = frame.getImage(i);
                            break;
                        }
                    }
                    /*rgb is now the Image object that weve used in the video*/
                    Bitmap bm = Bitmap.createBitmap(rgb.getWidth(), rgb.getHeight(), Bitmap.Config.RGB_565);
                    bm.copyPixelsFromBuffer(rgb.getPixels());

                    //put the image into a MAT for OpenCV
                    Mat tmp = new Mat(rgb.getWidth(), rgb.getHeight(), CvType.CV_8UC4);
                    Utils.bitmapToMat(bm, tmp);

                    if (beaconTopLeft.x < 0)
                        beaconTopLeft.x = 0;
                    if (beaconTopLeft.y < 0)
                        beaconTopLeft.y = 0;
                    if (beaconBotRight.x > rgb.getWidth())
                        beaconBotRight.x = rgb.getWidth();
                    if (beaconBotRight.y > rgb.getHeight())
                        beaconBotRight.y = rgb.getHeight();

                    //close the frame, prevents memory leaks and crashing
                    frame.close();

                    //analyse the beacons
                    //Constants.BeaconColours Colour = beaconColour.beaconAnalysisOCV(tmp, loop));
                    //mColour = beaconColour.beaconAnalysisOCV(tmp, mintCaptureLoop);
                    mColour = beaconColour.beaconAnalysisOCV2(debug, tmp, mintCaptureLoop, beaconTopLeft,
                            beaconBotRight, beaconMiddle, beacFound);
                    if (debug >= 1) {
                        fileLogger.writeEvent("OPENCV", "Returned " + mColour);
                        Log.d("OPENCV", "Returned " + mColour);
                    }

                    telemetry.addData("Beacon ", mColour);
                    mintCaptureLoop++;
                }
            }

            //use vuforia to get locations informatio
            for (VuforiaTrackable trackable : allTrackables) {
                /**
                 * getUpdatedRobotLocation() will return null if no new information is available since
                 * the last time that call was made, or if the trackable is not currently visible.
                 * getRobotLocation() will return null if the trackable is not currently visible.
                 */
                telemetry.addData(trackable.getName(),
                        ((VuforiaTrackableDefaultListener) trackable.getListener()).isVisible() ? "Visible"
                                : "Not Visible"); //

                OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) trackable
                        .getListener()).getUpdatedRobotLocation();
                if (robotLocationTransform != null) {
                    lastLocation = robotLocationTransform;
                }
            }
            /**
             * Provide feedback as to where the robot was last located (if we know).
             */
            if (lastLocation != null) {
                // Then you can extract the positions and angles using the getTranslation and getOrientation methods.
                VectorF trans = lastLocation.getTranslation();
                Orientation rot = Orientation.getOrientation(lastLocation, AxesReference.EXTRINSIC,
                        AxesOrder.XYZ, AngleUnit.DEGREES);
                // Robot position is defined by the standard Matrix translation (x and y)
                localisedRobotX = trans.get(0);
                localisedRobotY = trans.get(1);

                // Robot bearing (in Cartesian system) position is defined by the standard Matrix z rotation
                localisedRobotBearing = rot.thirdAngle;
                if (localisedRobotBearing < 0) {
                    localisedRobotBearing = 360 + localisedRobotBearing;
                }

                telemetry.addData("Pos X ", localisedRobotX);
                telemetry.addData("Pos Y ", localisedRobotY);
                telemetry.addData("Bear  ", localisedRobotBearing);
                telemetry.addData("Pos   ", format(lastLocation));
                localiseRobotPos = true;
            } else {
                localiseRobotPos = false;
                telemetry.addData("Pos   ", "Unknown");
            }
        }

        switch (mintCurStStep) {
        case STATE_INIT:

            if (debug >= 1) {
                fileLogger.writeEvent(TAG,
                        "mintCurStStep:- " + mintCurStStep + " mintCurStStep " + mintCurStStep);
                fileLogger.writeEvent(TAG, "About to check if step exists " + mintCurrentStep);
                Log.d(TAG, "mintCurStStep:- " + mintCurStStep + " mintCurStStep " + mintCurStStep);
                Log.d(TAG, "About to check if step exists " + mintCurrentStep);
            }
            // get step from hashmap, send it to the initStep for decoding
            if (autonomousSteps.containsKey(String.valueOf(mintCurrentStep))) {
                if (debug >= 1) {
                    fileLogger.writeEvent(TAG,
                            "Step Exists TRUE " + mintCurrentStep + " about to get the values from the step");
                    Log.d(TAG,
                            "Step Exists TRUE " + mintCurrentStep + " about to get the values from the step");
                }
                //processingSteps = autonomousSteps.get(String.valueOf(mintCurrentStep));
                //if (debug >= 1)
                //{
                //    fileLogger.writeEvent(TAG, "Got the values for step " + mintCurrentStep + " about to decode");
                //    Log.d(TAG, "Got the values for step " + mintCurrentStep + " about to decode");
                //}
                //decode the step from hashmap
                //initStep(processingSteps);
                initStep();
            } else {
                mintCurStStep = stepState.STATE_FINISHED;
            }
            break;
        case STATE_START:

            break;
        case STATE_RUNNING:

            loadParallelSteps();
            for (String stKey : mintActiveStepsCopy.keySet()) {
                if (debug >= 1) {
                    fileLogger.writeEvent("STATE_RUNNING", "Looping through Parallel steps, found " + stKey);
                    Log.d("STATE_RUNNING", "Looping through Parallel steps, found " + stKey);
                }
                mintStepNumber = mintActiveStepsCopy.get(stKey);
                loadActiveStep(mintStepNumber);
                if (debug >= 1) {
                    fileLogger.writeEvent("STATE_RUNNING", "About to run " + mstrRobotCommand.substring(0, 3));
                    Log.d("STATE_RUNNING", "About to run " + mstrRobotCommand.substring(0, 3));
                }
                processSteps(mstrRobotCommand.substring(0, 3));
            }

            if ((mintCurStDelay == stepState.STATE_COMPLETE)
                    && (mintCurStBeaconColour5291 == stepState.STATE_COMPLETE)
                    && (mintCurStAttackBeacon5291 == stepState.STATE_COMPLETE)
                    && (mintCurStVuforiaTurn5291 == stepState.STATE_COMPLETE)
                    && (mintCurStVuforiaLoc5291 == stepState.STATE_COMPLETE)
                    && (mintCurStVuforiaMove5291 == stepState.STATE_COMPLETE)
                    && (mintCurStDrive == stepState.STATE_COMPLETE)
                    && (mintCurStDriveHeading == stepState.STATE_COMPLETE)
                    && (mintCurStPivotTurn == stepState.STATE_COMPLETE)
                    && (mintCurStTankTurn == stepState.STATE_COMPLETE)
                    && (mintCurStShootParticle5291 == stepState.STATE_COMPLETE)
                    && (mintCurStSweeper5291 == stepState.STATE_COMPLETE)
                    && (mintCurStEyelids5291 == stepState.STATE_COMPLETE)
                    && (mintCurStLineFind5291 == stepState.STATE_COMPLETE)
                    && (mintCurStGyroTurnEncoder5291 == stepState.STATE_COMPLETE)
                    && (mintCurStTankTurnGyroHeading == stepState.STATE_COMPLETE)
                    && (mintCurStRadiusTurn == stepState.STATE_COMPLETE)) {
                mintCurStStep = stepState.STATE_COMPLETE;
            }

            //make sure we load the current step to determine if parallel, if the steps are run out of order and a previous step was parallel
            //things get all messed up and a step that isn't parallel can be assumed to be parallel
            loadActiveStep(mintCurrentStep);
            if (mblnParallel) {
                //mark this step as complete and do next one, the current step should continue to run.  Not all steps are compatible with being run in parallel
                // like drive steps, turns etc
                // Drive forward and shoot
                // Drive forward and detect beacon
                // are examples of when parallel steps should be run
                // errors will occur if other combinations are run
                // only go to next step if current step equals the one being processed for parallelism.
                for (String stKey : mintActiveStepsCopy.keySet()) {
                    mintStepNumber = mintActiveStepsCopy.get(stKey);
                    if (mintCurrentStep == mintStepNumber)
                        mintCurStStep = stepState.STATE_COMPLETE;
                }
            }
            break;
        case STATE_PAUSE:
            break;
        case STATE_COMPLETE:
            if (debug >= 1) {
                fileLogger.writeEvent(TAG, "Step Complete - Current Step:- " + mintCurrentStep);
                Log.d(TAG, "Step Complete - Current Step:- " + mintCurrentStep);
            }

            //  Transition to a new state and next step.
            mintCurrentStep++;
            mintCurStStep = stepState.STATE_INIT;
            break;
        case STATE_TIMEOUT:
            setDriveMotorPower(0);
            //  Transition to a new state.
            mintCurStStep = stepState.STATE_FINISHED;
            break;
        case STATE_ERROR:
            telemetry.addData("STATE", "ERROR WAITING TO FINISH " + mintCurrentStep);
            break;
        case STATE_FINISHED:
            setDriveMotorPower(0);

            //deactivate vuforia
            velocityVortex.deactivate();

            telemetry.addData("STATE", "FINISHED " + mintCurrentStep);
            if (debug >= 1) {
                if (fileLogger != null) {
                    fileLogger.writeEvent(TAG, "Step FINISHED - FINISHED");
                    fileLogger.writeEvent(TAG, "Stopped");
                    Log.d(TAG, "FileLogger Stopped");
                    fileLogger.close();
                    fileLogger = null;
                }
            }
            break;
        case STATE_ASTAR_PRE_INIT:
            mintCurrentStepAStar = 1; //init the Step for AStar
            //get start point
            //get end point
            int startX = (int) processingSteps.getmRobotParm1();
            int startY = (int) processingSteps.getmRobotParm2();
            int startZ = (int) processingSteps.getmRobotParm3();
            int endX = (int) processingSteps.getmRobotParm4();
            int endY = (int) processingSteps.getmRobotParm5();
            int endDir = (int) processingSteps.getmRobotParm6();

            //before using the path in the command lets check if we can localise
            if (lastLocation != null) {
                //lets get locations for AStar, direction is most important
                //x and y position for Vuforia are in mm, AStar in Inches
                //counter clockwise rotation (x,y) = (-x, y)
                //origin is center of field
                //Astar is top right so need to add in 6 feet to each value
                startX = (int) (localisedRobotX / 25.4) + 72;
                startY = (int) (localisedRobotY / 25.4) + 72;
                //need to rotate the axis -90 degrees
                startZ = (int) localisedRobotBearing;

                if ((startZ > 357) && (startZ < 3))
                    startZ = 90;
                else if ((startZ > 267) && (startZ < 273))
                    startZ = 0;
                else if ((startZ > 177) && (startZ < 183))
                    startZ = 270;
                else if ((startZ > 87) && (startZ < 93))
                    startZ = 180;

                if (debug >= 1) {
                    fileLogger.writeEvent(TAG, "AStar Init - Localised Values");
                    fileLogger.writeEvent(TAG, "AStar Init - localisedRobotX:        " + localisedRobotX);
                    fileLogger.writeEvent(TAG, "AStar Init - localisedRobotY:        " + localisedRobotY);
                    fileLogger.writeEvent(TAG, "AStar Init - localisedRobotBearing:  " + localisedRobotBearing);
                    fileLogger.writeEvent(TAG, "AStar Init - startX:                 " + startX);
                    fileLogger.writeEvent(TAG, "AStar Init - startY:                 " + startY);
                    fileLogger.writeEvent(TAG, "AStar Init - startZ:                 " + startZ);
                    Log.d(TAG, "AStar Init - Localised Values");
                    Log.d(TAG, "AStar Init - localisedRobotX:        " + localisedRobotX);
                    Log.d(TAG, "AStar Init - localisedRobotY:        " + localisedRobotY);
                    Log.d(TAG, "AStar Init - localisedRobotBearing:  " + localisedRobotBearing);
                    Log.d(TAG, "AStar Init - startX:                 " + startX);
                    Log.d(TAG, "AStar Init - startY:                 " + startY);
                    Log.d(TAG, "AStar Init - startZ:                 " + startZ);
                }
            }

            //process path
            pathValues = getPathValues.findPathAStar(startX, startY, startZ, endX, endY, endDir); //for enhanced
            if (debug >= 1) {
                fileLogger.writeEvent(TAG, "AStar Path - length:                 " + pathValues.length);
                Log.d(TAG, "AStar Path - length:                 " + pathValues.length);
            }

            String[][] mapComplete = new String[A0Star.FIELDWIDTH][A0Star.FIELDWIDTH];

            //write path to logfile to verify path
            for (int y = 0; y < a0Star.fieldLength; y++) {
                for (int x = 0; x < a0Star.fieldWidth; x++) {
                    switch (allianceColor) {
                    case "Red":
                        if (a0Star.walkableRed[y][x]) {
                            mapComplete[y][x] = "1";
                            if ((x == startX) && (y == startY))
                                mapComplete[y][x] = "S";
                            else if ((x == endX) && (y == endY))
                                mapComplete[y][x] = "E";
                        } else {
                            mapComplete[y][x] = "0";
                        }
                        break;

                    case "Blue":
                        if (a0Star.walkableBlue[y][x]) {
                            mapComplete[y][x] = "1";
                            if ((x == startX) && (y == startY))
                                mapComplete[y][x] = "S";
                            else if ((x == endX) && (y == endY))
                                mapComplete[y][x] = "E";
                        } else {
                            if ((x == startX) && (y == startY)) {
                                mapComplete[y][x] = "1";
                            } else {
                                mapComplete[y][x] = "0";
                            }
                        }
                        break;

                    default:
                        if (a0Star.walkable[y][x]) {
                            mapComplete[y][x] = "1";
                            if ((x == startX) && (y == startY))
                                mapComplete[y][x] = "S";
                            else if ((x == endX) && (y == endY))
                                mapComplete[y][x] = "E";
                        }
                        break;
                    }
                }
            }

            //plot out path..
            for (int i = 0; i < pathValues.length; i++) {
                if (debug >= 1) {
                    fileLogger.writeEvent(TAG, "Path " + pathValues[i].val1 + " " + pathValues[i].val2 + " "
                            + pathValues[i].val3 + " Dir:= " + pathValues[i].val4);
                    Log.d(TAG, "Path " + pathValues[i].val1 + " " + pathValues[i].val2 + " "
                            + pathValues[i].val3 + " Dir:= " + pathValues[i].val4);
                }
                if (((int) pathValues[i].val1 == 0) && ((int) pathValues[i].val3 == 0)
                        && ((int) pathValues[i].val2 == 0) && ((int) pathValues[i].val4 == 0))
                    break;
                mapComplete[(int) pathValues[i].val3][(int) pathValues[i].val2] = "P";
                if ((pathValues[i].val2 == startX) && (pathValues[i].val3 == startY)) {
                    mapComplete[(int) pathValues[i].val3][(int) pathValues[i].val2] = "S";
                }
            }
            mapComplete[endY][endX] = "E";
            fieldOutput = "";

            for (int y = 0; y < a0Star.fieldLength; y++) {
                for (int x = 0; x < a0Star.fieldWidth; x++) {
                    fieldOutput = "" + fieldOutput + mapComplete[y][x];
                }
                if (debug >= 2) {
                    fileLogger.writeEvent(TAG, fieldOutput);
                    Log.d(TAG, fieldOutput);
                }
                fieldOutput = "";
            }

            //load path in Hashmap
            boolean dirChanged;
            boolean processingAStarSteps = true;
            int startSegment = 1;
            int numberOfMoves;
            int key = 0;
            int lastDirection = 0;
            int lasti = 0;
            String strAngleChange = "RT00";

            while (processingAStarSteps) {
                numberOfMoves = 0;
                for (int i = startSegment; i < pathValues.length; i++) {
                    numberOfMoves++;
                    if (debug >= 2) {
                        fileLogger.writeEvent(TAG, "Path " + pathValues[i].val1 + " " + pathValues[i].val2 + " "
                                + pathValues[i].val3 + " Dir:= " + pathValues[i].val4);
                        Log.d(TAG, "Path " + pathValues[i].val1 + " " + pathValues[i].val2 + " "
                                + pathValues[i].val3 + " Dir:= " + pathValues[i].val4);
                    }
                    if (((int) pathValues[i].val1 == 0) && ((int) pathValues[i].val2 == 0)
                            && ((int) pathValues[i].val3 == 0)) {
                        if (debug >= 2) {
                            fileLogger.writeEvent(TAG, "End Detected");
                            Log.d(TAG, "End Detected");
                        }
                        //end of the sequence,
                        lastDirection = (int) pathValues[i - 1].val4;
                        processingAStarSteps = false;
                        lasti = i;
                    }
                    //need to check if the first step is in a different direction that the start
                    if (i == 1) {
                        if (startZ != pathValues[i].val4) { //need to turn
                            strAngleChange = getAngle(startZ, (int) pathValues[i].val4);
                            if (debug >= 2) {
                                fileLogger.writeEvent(TAG,
                                        "First Step Need to turn Robot " + strAngleChange + " Path "
                                                + pathValues[i].val1 + " " + pathValues[i].val2 + " "
                                                + pathValues[i].val3 + " Dir:= " + pathValues[i].val4);
                                fileLogger.writeEvent(TAG, "Adding Command (" + key + ", 10, " + strAngleChange
                                        + ", 0, 0, 0, 0, 0, 0, 1, false) ");
                                Log.d(TAG,
                                        "First Step Need to turn Robot " + strAngleChange + " Path "
                                                + pathValues[i].val1 + " " + pathValues[i].val2 + " "
                                                + pathValues[i].val3 + " Dir:= " + pathValues[i].val4);
                                Log.d(TAG, "Adding Command (" + key + ", 10, " + strAngleChange
                                        + ", 0, 0, 0, 0, 0, 0, 1, false) ");
                            }
                            autonomousStepsAStar.put(String.valueOf(key), new LibraryStateSegAuto(key, 10,
                                    strAngleChange, false, false, 0, 0, 0, 0, 0, 0, 1));
                            key++;
                            dirChanged = true;
                        } else {
                            dirChanged = false; //no change in direction
                        }
                    } else {
                        //work out the sequence not the first step
                        if (pathValues[i - 1].val4 != pathValues[i].val4) { //need to turn
                            strAngleChange = getAngle((int) pathValues[i - 1].val4, (int) pathValues[i].val4);
                            dirChanged = true;
                        } else {
                            dirChanged = false; //no change in direction
                        }
                    }
                    if ((dirChanged) || (!processingAStarSteps)) {
                        //found end of segment
                        int AStarPathAngle;
                        if (i == 1) {
                            AStarPathAngle = startZ;
                        } else {
                            AStarPathAngle = (int) pathValues[i - 1].val4;
                        }
                        switch (AStarPathAngle) {
                        case 0:
                        case 90:
                        case 180:
                        case 270:
                            if (debug >= 2) {
                                fileLogger.writeEvent(TAG,
                                        "Heading on a Straight line " + (numberOfMoves) + " Path");
                                fileLogger.writeEvent(TAG, "Adding Command (" + key + ", 10, " + "FW"
                                        + (numberOfMoves) + ", false, false, 0, 0, 0, 0, 0, 0, 0.8, false) ");
                                Log.d(TAG, "Heading on a Straight line " + (numberOfMoves) + " Path");
                                Log.d(TAG, "Adding Command (" + key + ", 10, " + "FW" + (numberOfMoves)
                                        + ", false, false, 0, 0, 0, 0, 0, 0, 0.8, false) ");
                            }
                            autonomousStepsAStar.put(String.valueOf(key), new LibraryStateSegAuto(key, 10,
                                    "FW" + numberOfMoves, false, false, 0, 0, 0, 0, 0, 0, 0.8));
                            numberOfMoves = 0;
                            key++;
                            break;
                        case 45:
                        case 135:
                        case 225:
                        case 315:
                            if (debug >= 2) {
                                fileLogger.writeEvent(TAG, "Heading on a Straight line "
                                        + (int) ((numberOfMoves) * 1.4142) + " Path");
                                fileLogger.writeEvent(TAG,
                                        "Adding Command (" + key + ", 10, " + "FW"
                                                + (int) ((numberOfMoves) * 1.4142)
                                                + ", false, false, 0, 0, 0, 0, 0, 0, .8, false) ");
                                Log.d(TAG, "Heading on a Straight line " + (int) ((numberOfMoves) * 1.4142)
                                        + " Path");
                                Log.d(TAG,
                                        "Adding Command (" + key + ", 10, " + "FW"
                                                + (int) ((numberOfMoves) * 1.4142)
                                                + ", false, false, 0, 0, 0, 0, 0, 0, .8, false) ");
                            }
                            autonomousStepsAStar.put(String.valueOf(key), new LibraryStateSegAuto(key, 10,
                                    "FW" + (int) (numberOfMoves * 1.4142), false, false, 0, 0, 0, 0, 0, 0, 1));
                            numberOfMoves = 0;
                            key++;
                            break;
                        }
                        if (debug >= 2) {
                            fileLogger.writeEvent(TAG,
                                    "Need to turn Robot " + strAngleChange + " Path " + pathValues[i].val1 + " "
                                            + pathValues[i].val2 + " " + pathValues[i].val3 + " Dir:= "
                                            + pathValues[i].val4);
                            fileLogger.writeEvent(TAG, "Adding Command (" + key + ", 10, " + strAngleChange
                                    + ", 0, 0, 0, 0, 0, 0, 1, false) ");
                            Log.d(TAG,
                                    "Need to turn Robot " + strAngleChange + " Path " + pathValues[i].val1 + " "
                                            + pathValues[i].val2 + " " + pathValues[i].val3 + " Dir:= "
                                            + pathValues[i].val4);
                            Log.d(TAG, "Adding Command (" + key + ", 10, " + strAngleChange
                                    + ", 0, 0, 0, 0, 0, 0, 1, false) ");
                        }
                        autonomousStepsAStar.put(String.valueOf(key), new LibraryStateSegAuto(key, 10,
                                strAngleChange, false, false, 0, 0, 0, 0, 0, 0, 0.4));
                        key++;
                    }
                    if (!processingAStarSteps)
                        break;

                }
                //need to work out the direction we are facing and the required direction
                if ((lastDirection != endDir) && (!processingAStarSteps)) {
                    if (debug >= 2) {
                        fileLogger.writeEvent(TAG, "Sraight Moves Robot End Of Sequence - Need to Trun Robot");
                        Log.d(TAG, "Sraight Moves Robot End Of Sequence - Need to Trun Robot");
                    }
                    strAngleChange = getAngle((int) pathValues[lasti - 1].val4, endDir);
                    fileLogger.writeEvent(TAG, "Adding Command (" + key + ", 10, " + strAngleChange
                            + ", 0, 0, 0, 0, 0, 0, 1, false) ");
                    Log.d(TAG, "Adding Command (" + key + ", 10, " + strAngleChange
                            + ", 0, 0, 0, 0, 0, 0, 1, false) ");
                    autonomousStepsAStar.put(String.valueOf(key), new LibraryStateSegAuto(key, 10,
                            strAngleChange, false, false, 0, 0, 0, 0, 0, 0, 0.4));
                    key++;
                }
            }
            mintCurStStep = stepState.STATE_ASTAR_INIT;

            break;
        case STATE_ASTAR_INIT: {
            if (debug >= 1) {
                fileLogger.writeEvent(TAG, "About to check if step exists " + mintCurrentStepAStar);
                Log.d(TAG, "About to check if step exists " + mintCurrentStepAStar);
            }
            // get step from hashmap, send it to the initStep for decoding
            if (autonomousStepsAStar.containsKey(String.valueOf(mintCurrentStepAStar))) {
                if (debug >= 1) {
                    fileLogger.writeEvent(TAG, "Step Exists TRUE " + mintCurrentStepAStar
                            + " about to get the values from the step");
                    Log.d(TAG, "Step Exists TRUE " + mintCurrentStepAStar
                            + " about to get the values from the step");
                }
                processingSteps = autonomousStepsAStar.get(String.valueOf(mintCurrentStepAStar)); //read the step from the hashmap
                autonomousStepsAStar.remove(String.valueOf(mintCurrentStepAStar)); //remove the step from the hashmap
                if (debug >= 1) {
                    fileLogger.writeEvent(TAG, "Got the values for step " + mintCurrentStepAStar
                            + " about to decode and removed them");
                    Log.d(TAG, "Got the values for step " + mintCurrentStepAStar
                            + " about to decode and removed them");
                }
                //decode the step from hashmap
                initAStarStep(processingSteps);
            } else {
                //if no steps left in hashmap then complete
                mintCurStStep = stepState.STATE_ASTAR_COMPLETE;
            }
        }
            break;
        case STATE_ASTAR_RUNNING: {
            //move robot according AStar hashmap
            TankTurnStep();
            PivotTurnStep();
            RadiusTurnStep();
            DriveStepHeading();
            if ((mintCurStDriveHeading == stepState.STATE_COMPLETE)
                    && (mintCurStPivotTurn == stepState.STATE_COMPLETE)
                    && (mintCurStTankTurn == stepState.STATE_COMPLETE)
                    && (mintCurStRadiusTurn == stepState.STATE_COMPLETE)) {
                //increment ASTar Steps Counter
                mintCurrentStepAStar++;
                mintCurStStep = stepState.STATE_ASTAR_INIT;
            }

        }
            break;
        case STATE_ASTAR_ERROR: {
            //do something on error
        }
            break;
        case STATE_ASTAR_COMPLETE: {
            //empty hashmap ready for next AStar processing.
            //clear AStar step counter ready for next AStar process
            mintCurrentStepAStar = 0;

            //when complete, keep processing normal step
            if (debug >= 1) {
                fileLogger.writeEvent(TAG, "A* Path Completed:- " + mintCurrentStep);
                Log.d(TAG, "A* Path Completed:- " + mintCurrentStep);
            }

            //  Transition to a new state and next step.
            mintCurrentStep++;
            mintCurStStep = stepState.STATE_INIT;
        }
            break;
        }

        //process LED status
        //ERROR - FLASH RED 3 TIMES
        switch (mint5291LEDStatus) {
        case STATE_TEAM: //FLASH Alliance Colour
            switch (allianceColor) {
            case "Red":
                LedState(LedOff, LedOn, LedOff, LedOff, LedOn, LedOff);
                break;
            case "Blue":
                LedState(LedOff, LedOff, LedOn, LedOff, LedOff, LedOn);
                break;
            default:
                LedState(LedOn, LedOn, LedOn, LedOn, LedOn, LedOn);
                break;
            }
        case STATE_ERROR: //Flash RED 3 times Rapidly
            if ((!mblnLEDON) && (mStateTime.milliseconds() > (mdblLastOff + 250))) {
                mdblLastOn = mStateTime.milliseconds();
                mblnLEDON = true;
                LedState(LedOff, LedOn, LedOff, LedOff, LedOn, LedOff);
            } else if ((mblnLEDON) && (mStateTime.milliseconds() > (mdblLastOn + 750))) {
                mdblLastOff = mStateTime.milliseconds();
                mblnLEDON = false;
                LedState(LedOff, LedOff, LedOff, LedOff, LedOff, LedOff);
            }
            break;
        case STATE_SUCCESS: //Flash GREEN 3 times Rapidly
            if ((!mblnLEDON) && (mStateTime.milliseconds() > (mdblLastOff + 250))) {
                mdblLastOn = mStateTime.milliseconds();
                mblnLEDON = true;
                LedState(LedOn, LedOff, LedOff, LedOn, LedOff, LedOff);
            } else if ((mblnLEDON) && (mStateTime.milliseconds() > (mdblLastOn + 250))) {
                mdblLastOff = mStateTime.milliseconds();
                mblnLEDON = false;
                LedState(LedOff, LedOff, LedOff, LedOff, LedOff, LedOff);
                mintCounts++;
            }
            if (mintCounts >= 5) {
                mintCounts = 0;
                mint5291LEDStatus = LEDState.STATE_TEAM;
            }
            break;
        case STATE_BEACON: //

            if ((!mblnLEDON) && (mStateTime.milliseconds() > (mdblLastOff + 500))) {
                mdblLastOn = mStateTime.milliseconds();
                mblnLEDON = true;
                if (mColour == Constants.BeaconColours.BEACON_BLUE_RED) { //means red is to the right
                    LedState(LedOff, LedOff, LedOn, LedOff, LedOff, LedOff);
                } else if (mColour == Constants.BeaconColours.BEACON_RED_BLUE) {
                    LedState(LedOff, LedOn, LedOff, LedOff, LedOff, LedOff);
                } else if (mColour == Constants.BeaconColours.BEACON_BLUE) {
                    LedState(LedOn, LedOff, LedOff, LedOff, LedOff, LedOff);
                } else if (mColour == Constants.BeaconColours.BEACON_BLUE) {
                    LedState(LedOff, LedOn, LedOff, LedOff, LedOff, LedOff);
                }
            } else if ((mblnLEDON) && (mStateTime.milliseconds() > (mdblLastOn + 500))) {
                mdblLastOff = mStateTime.milliseconds();
                mblnLEDON = false;
                if (mColour == Constants.BeaconColours.BEACON_BLUE_RED) { //means red is to the right
                    LedState(LedOff, LedOff, LedOff, LedOff, LedOn, LedOff);
                } else if (mColour == Constants.BeaconColours.BEACON_RED_BLUE) {
                    LedState(LedOff, LedOff, LedOff, LedOff, LedOff, LedOn);
                } else if (mColour == Constants.BeaconColours.BEACON_BLUE) {
                    LedState(LedOff, LedOff, LedOff, LedOn, LedOff, LedOff);
                } else if (mColour == Constants.BeaconColours.BEACON_BLUE) {
                    LedState(LedOff, LedOff, LedOff, LedOff, LedOn, LedOff);
                }
                mintCounts++;
            }
            if (mintCounts >= 10) {
                mintCounts = 0;
                mint5291LEDStatus = LEDState.STATE_TEAM;
            }
            break;
        case STATE_FINISHED: //Solid Green
            LedState(LedOn, LedOff, LedOff, LedOn, LedOff, LedOff);
            break;

        }
        telemetry.update();
    }
    //opmode not active anymore
}

From source file:cn.xiongyihui.webcam.setup.java

License:Open Source License

@Override
public boolean onTouchEvent(MotionEvent event) {

    if (event.getAction() == MotionEvent.ACTION_DOWN) {
        try {//from   w ww  .j  a v a  2s. com
            final ImageView imageView = (ImageView) findViewById(R.id.imageView);

            int X = (int) event.getX();
            int Y = (int) event.getY();

            int[] coordinates = new int[2];//{0,0};
            imageView.getLocationOnScreen(coordinates);
            int viewTop = coordinates[1];
            int viewBottom = coordinates[1] + imageView.getHeight();
            try {
                int viewLeft = coordinates[2];
                int viewRight = coordinates[2] + imageView.getWidth();
            } catch (Exception e) {
                Log.e(TAG, "getLocationOnScreen:Error!");
            }

            imageViewHeight = (double) viewBottom - viewTop;
            imageViewWidth = aspectRatio * imageViewHeight;

            int imageViewWidthINT = (int) imageViewWidth;
            int imageViewHeightINT = (int) imageViewHeight;

            Display display = getWindowManager().getDefaultDisplay();
            Point size = new Point();
            display.getSize(size);
            int widthScreen = (int) size.x;
            int heightScreen = (int) size.y;

            int Yoffset = heightScreen - viewBottom;
            int Xoffset = widthScreen - imageView.getWidth();

            int virtualOriginX = (int) ((widthScreen - imageViewWidthINT + Xoffset) / 2);
            int virtualOriginY = (int) (heightScreen - imageViewHeightINT - Yoffset / 2);

            x0 = X - virtualOriginX;
            y0 = Y - virtualOriginY;

            double openCVratio = (double) bitmapHeight / imageViewHeight;

            x0final = (int) ((double) x0 * openCVratio);
            y0final = (int) ((double) y0 * openCVratio);
        } catch (Exception e) {
            Log.e(TAG, "Touch events are not working!");
        }
    }

    if (event.getAction() == MotionEvent.ACTION_UP) {
        try {
            final ImageView imageView = (ImageView) findViewById(R.id.imageView);

            int X = (int) event.getX();
            int Y = (int) event.getY();

            int[] coordinates = new int[2];//{0,0};
            imageView.getLocationOnScreen(coordinates);
            int viewTop = coordinates[1];
            int viewBottom = coordinates[1] + imageView.getHeight();
            try {
                int viewLeft = coordinates[2];
                int viewRight = coordinates[2] + imageView.getWidth();
            } catch (Exception e) {
                Log.e(TAG, "getLocationOnScreen:Error!");
            }

            imageViewHeight = (double) viewBottom - viewTop;
            imageViewWidth = aspectRatio * imageViewHeight;

            int imageViewWidthINT = (int) imageViewWidth;
            int imageViewHeightINT = (int) imageViewHeight;

            Display display = getWindowManager().getDefaultDisplay();
            android.graphics.Point size = new android.graphics.Point();
            display.getSize(size);
            int widthScreen = (int) size.x;
            int heightScreen = (int) size.y;

            int Yoffset = heightScreen - viewBottom;
            int Xoffset = widthScreen - imageView.getWidth();

            int virtualOriginX = (int) ((widthScreen - imageViewWidthINT + Xoffset) / 2);
            int virtualOriginY = (int) (heightScreen - imageViewHeightINT - Yoffset / 2);

            x1 = X - virtualOriginX;
            y1 = Y - virtualOriginY;

            double openCVratio = (double) bitmapHeight / imageViewHeight;

            x1final = (int) ((double) x1 * openCVratio);
            y1final = (int) ((double) y1 * openCVratio);

            bitmap = BitmapFactory.decodeFile(filePath);
            bitmap = Bitmap.createScaledBitmap(bitmap, bitmapWidth, bitmapHeight, true);
            Mat frame = new Mat(bitmap.getHeight(), bitmap.getHeight(), CvType.CV_8UC3);
            Utils.bitmapToMat(bitmap, frame);
            rect = new Rect(x0final, y0final, x1final - x0final, y1final - y0final);
            Core.rectangle(frame, rect.tl(), rect.br(), color, 3);
            Utils.matToBitmap(frame, bitmap);
            imageView.setImageBitmap(bitmap);
        } catch (Exception e) {
            Log.e(TAG, "Touch events are not working!");
        }
    }

    return true;
}

From source file:cn.xiongyihui.webcam.setup.java

License:Open Source License

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_setup);

    this.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);

    final Button cameraButton = (Button) findViewById(R.id.cameraButton);
    final Button selectButton = (Button) findViewById(R.id.selectButton);
    final Button templateButton = (Button) findViewById(R.id.templateButton);
    final Button instructionButton = (Button) findViewById(R.id.instructionButton);
    final ImageView imageView = (ImageView) findViewById(R.id.imageView);

    try {//from ww w . j  a v  a 2  s.  co m
        int NUMBER_OF_CORES = Runtime.getRuntime().availableProcessors();

        Toast.makeText(this, NUMBER_OF_CORES, Toast.LENGTH_SHORT).show();
    } catch (Exception e) {
        Log.e(TAG, "Processor-cores are not getting detected!");
    }

    try {
        final Toast toast = Toast.makeText(this,
                "Please capture image; \n" + "select image; \n"
                        + "Drag-and-drop, swipe on the desired region and confirm template!",
                Toast.LENGTH_LONG);
        final TextView v = (TextView) toast.getView().findViewById(android.R.id.message);
        instructionButton.setOnClickListener(new View.OnClickListener() {

            @Override
            public void onClick(View arg0) {
                if (v != null)
                    v.setGravity(Gravity.CENTER);
                toast.show();
            }
        });
    } catch (Exception e) {
        Log.e(TAG, "Instructions are not getting displayed!");
    }

    try {
        cameraButton.setOnClickListener(new View.OnClickListener() {

            @Override
            public void onClick(View arg0) {
                Intent intent = new Intent("android.media.action.IMAGE_CAPTURE");
                startActivityForResult(intent, requestCode);
            }
        });
    } catch (Exception e) {
        Log.e(TAG, "Camera is not working!");
    }

    try {
        selectButton.setOnClickListener(new View.OnClickListener() {

            @Override
            public void onClick(View arg0) {
                Intent i = new Intent(Intent.ACTION_PICK,
                        android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);

                startActivityForResult(i, requestCode);

                bitmap = BitmapFactory.decodeFile(filePath);
                imageView.setImageBitmap(bitmap);
            }
        });
    } catch (Exception e) {
        Log.e(TAG, "Selection is not working!");
    }

    try {
        templateButton.setOnClickListener(new View.OnClickListener() {

            @Override
            public void onClick(View arg0) {
                if (imageView.getDrawable() == null) {
                    Log.e(TAG, "Null ImageView!");
                }
                Log.e(TAG, "Button is working.");
                try {
                    bitmap = BitmapFactory.decodeFile(filePath);
                    bitmap = Bitmap.createScaledBitmap(bitmap, bitmapWidth, bitmapHeight, true);
                    Mat frame = new Mat(bitmap.getWidth(), bitmap.getHeight(), CvType.CV_8UC4);
                    Utils.bitmapToMat(bitmap, frame);

                    GlobalClass globalVariable = (GlobalClass) getApplicationContext();
                    globalVariable.setTemplateCapturedBitmapHeight(bitmapHeight);
                    globalVariable.setTemplateCapturedBitmapWidth(bitmapWidth);
                    Log.e(TAG, "Bitmap has been set successfully; Template is being generated!");

                    rect = new Rect(x0final, y0final, x1final - x0final, y1final - y0final);
                    Utils.matToBitmap(frame, bitmap);

                    if (x0final < x1final) {
                        x0display = x0final;
                        x1display = x1final;
                    }
                    if (x0final > x1final) {
                        x1display = x0final;
                        x0display = x1final;
                    }
                    if (y0final < y1final) {
                        y0display = y0final;
                        y1display = y1final;
                    }
                    if (y0final > y1final) {
                        y1display = y0final;
                        y0display = y1final;
                    }

                    long timeBegin = (int) System.currentTimeMillis();

                    bitmap = Bitmap.createBitmap(bitmap, x0display, y0display, x1display - x0display,
                            y1display - y0display);

                    /*String path = Environment.getExternalStorageDirectory().toString();
                            
                    Log.e(TAG, "File is about to be written!");
                            
                    //File file = new File(path, "TraQuad");
                    //bitmap.compress(Bitmap.CompressFormat.PNG, 100, fOutputStream);
                            
                    //Log.e(TAG, "Stored image successfully!");
                    //fOutputStream.flush();
                    //fOutputStream.close();
                            
                    //MediaStore.Images.Media.insertImage(getContentResolver(), file.getAbsolutePath(), file.getName(), file.getName());*/

                    /*Prominent colors code; This is not working in Android; OpenCV assertion error
                    Log.e(TAG, "Retrieved image successfully!");
                            
                    Imgproc.medianBlur(frame, frame, 3);
                    Log.e(TAG, "Filtered image successfully!");
                            
                    try {
                    Mat mask = new Mat(bitmap.getWidth(), bitmap.getHeight(), CvType.CV_8UC1);
                    MatOfFloat range = new MatOfFloat(0f, 255f);
                    Mat hist = new Mat();
                    MatOfInt mHistSize = new MatOfInt(256);
                    List<Mat> lHsv = new ArrayList<Mat>(3);
                    Mat hsv = new Mat();
                    Imgproc.cvtColor(frame, hsv, Imgproc.COLOR_RGB2HSV);
                    Core.split(frame, lHsv);
                    Mat mH = lHsv.get(0);
                    Mat mS = lHsv.get(1);
                    Mat mV = lHsv.get(2);
                    ArrayList<Mat> ListMat = new ArrayList<Mat>();
                    ListMat.add(mH);
                    Log.e(TAG, String.valueOf(ListMat));
                    MatOfInt channels = new MatOfInt(0, 1);
                    Imgproc.calcHist(Arrays.asList(mH), channels, mask, hist, mHistSize, range);
                    ListMat.clear();
                    }catch (Exception e){
                    Log.e(TAG, "Prominent colors are not getting detected!");
                    }*/

                    Mat colorFrame = frame;
                    colorFrame = frame.clone();

                    Utils.bitmapToMat(bitmap, frame);
                    Imgproc.cvtColor(frame, frame, Imgproc.COLOR_RGB2GRAY);

                    Log.e(TAG, "Converted color successfully!");

                    int detectorType = FeatureDetector.ORB;
                    //int detectorType = FeatureDetector.SIFT; //SIFT and SURF are not working!
                    //int detectorType = FeatureDetector.SURF;
                    FeatureDetector featureDetector = FeatureDetector.create(detectorType);

                    Log.e(TAG, "Feature detection has begun!");

                    MatOfKeyPoint keypoints = new MatOfKeyPoint();

                    featureDetector.detect(frame, keypoints);

                    Log.e(TAG, "Feature detection has ended successfully!");

                    /*if (!featureDetector.empty()) {
                    //Draw the detected keypoints
                    int flagDraw = Features2d.NOT_DRAW_SINGLE_POINTS;
                    Features2d.drawKeypoints(frame, keypoints, frame, color, flagDraw);
                    Utils.matToBitmap(frame, bitmap);
                    }*/

                    imageView.setImageBitmap(bitmap);

                    Log.e(TAG, "Final bitmap has been loaded!");

                    KeyPoint[] referenceKeypoints = keypoints.toArray();

                    Log.e(TAG, "Number of keypoints detected is " + String.valueOf(referenceKeypoints.length));

                    int iterationMax = referenceKeypoints.length;
                    int iterate = 0;
                    double xFeaturePoint, yFeaturePoint;
                    double xSum = 0, ySum = 0;
                    double totalResponse = 0;
                    double keyPointResponse = 0;
                    double xTemplateCentroid = 0, yTemplateCentroid = 0;

                    DescriptorExtractor descriptorExtractor = DescriptorExtractor
                            .create(DescriptorExtractor.ORB);

                    Mat templateDescriptor = new Mat();

                    descriptorExtractor.compute(frame, keypoints, templateDescriptor);

                    for (iterate = 0; iterate < iterationMax; iterate++) {
                        xFeaturePoint = referenceKeypoints[iterate].pt.x;
                        yFeaturePoint = referenceKeypoints[iterate].pt.y;
                        keyPointResponse = referenceKeypoints[iterate].response;

                        if (keyPointResponse > 0) {
                            xSum = xSum + keyPointResponse * xFeaturePoint;
                            ySum = ySum + keyPointResponse * yFeaturePoint;
                            totalResponse = totalResponse + keyPointResponse;

                            //Log.e(TAG, "Feature " + String.valueOf(iterate) + ":" + String.valueOf(referenceKeypoints[iterate]));
                        }
                    }

                    xTemplateCentroid = xSum / totalResponse;
                    yTemplateCentroid = ySum / totalResponse;
                    Log.e(TAG, "Finished conversion of features to points!");
                    Log.e(TAG, "Centroid location is: (" + xTemplateCentroid + "," + yTemplateCentroid + ")");

                    double xSquareDistance = 0, ySquareDistance = 0;
                    double distanceTemplateFeatures = 0;
                    int numberOfPositiveResponses = 0;

                    double[] colorValue;
                    double rSum = 0, gSum = 0, bSum = 0;
                    double rCentral, gCentral, bCentral;

                    for (iterate = 0; iterate < iterationMax; iterate++) {
                        xFeaturePoint = referenceKeypoints[iterate].pt.x;
                        yFeaturePoint = referenceKeypoints[iterate].pt.y;
                        keyPointResponse = referenceKeypoints[iterate].response;

                        colorValue = colorFrame.get((int) yFeaturePoint, (int) xFeaturePoint);
                        rSum = rSum + colorValue[0];
                        gSum = gSum + colorValue[1];
                        bSum = bSum + colorValue[2];

                        if (keyPointResponse > 0) {
                            xSquareDistance = xSquareDistance
                                    + (xFeaturePoint - xTemplateCentroid) * (xFeaturePoint - xTemplateCentroid);
                            ySquareDistance = ySquareDistance
                                    + (yFeaturePoint - yTemplateCentroid) * (yFeaturePoint - yTemplateCentroid);
                            numberOfPositiveResponses++;
                        }
                    }

                    rCentral = rSum / iterationMax;
                    gCentral = gSum / iterationMax;
                    bCentral = bSum / iterationMax;

                    double deltaColor = 21;

                    double rLow = rCentral - deltaColor;
                    double rHigh = rCentral + deltaColor;
                    double gLow = rCentral - deltaColor;
                    double gHigh = rCentral + deltaColor;
                    double bLow = rCentral - deltaColor;
                    double bHigh = rCentral + deltaColor;

                    Log.e(TAG, "Prominent color (R,G,B): (" + rCentral + "," + gCentral + "," + bCentral + ")");

                    distanceTemplateFeatures = Math
                            .sqrt((xSquareDistance + ySquareDistance) / numberOfPositiveResponses);

                    KeyPoint[] offsetCompensatedKeyPoints = keypoints.toArray();

                    double xMaxNormalisation, yMaxNormalisation;

                    xMaxNormalisation = x1display - x0display;
                    yMaxNormalisation = y1display - y0display;

                    for (iterate = 0; iterate < iterationMax; iterate++) {
                        offsetCompensatedKeyPoints[iterate].pt.x = offsetCompensatedKeyPoints[iterate].pt.x
                                / xMaxNormalisation;
                        offsetCompensatedKeyPoints[iterate].pt.y = offsetCompensatedKeyPoints[iterate].pt.y
                                / yMaxNormalisation;

                        //Log.e(TAG, "Compensated: (" + String.valueOf(offsetCompensatedKeyPoints[iterate].pt.x) + "," + String.valueOf(offsetCompensatedKeyPoints[iterate].pt.y) + ")");
                    }

                    double xCentroidNormalised, yCentroidNormalised;

                    xCentroidNormalised = (xTemplateCentroid - x0display) / xMaxNormalisation;
                    yCentroidNormalised = (yTemplateCentroid - y0display) / yMaxNormalisation;

                    Log.e(TAG, "Normalised Centroid: (" + String.valueOf(xCentroidNormalised) + ","
                            + String.valueOf(yCentroidNormalised));

                    long timeEnd = (int) System.currentTimeMillis();
                    Log.e(TAG, "Time consumed is " + String.valueOf(timeEnd - timeBegin) + " milli-seconds!");

                    Log.e(TAG, "RMS distance is: " + distanceTemplateFeatures);

                    globalVariable.setDistanceTemplateFeatures(distanceTemplateFeatures);
                    globalVariable.setX0display(x0display);
                    globalVariable.setY0display(y0display);
                    globalVariable.setX1display(x1display);
                    globalVariable.setY1display(y1display);
                    globalVariable.setKeypoints(keypoints);
                    globalVariable.setXtemplateCentroid(xTemplateCentroid);
                    globalVariable.setYtemplateCentroid(yTemplateCentroid);
                    globalVariable.setTemplateDescriptor(templateDescriptor);
                    globalVariable.setNumberOfTemplateFeatures(iterationMax);
                    globalVariable.setNumberOfPositiveTemplateFeatures(numberOfPositiveResponses);
                    globalVariable.setRhigh(rHigh);
                    globalVariable.setRlow(rLow);
                    globalVariable.setGhigh(gHigh);
                    globalVariable.setGlow(gLow);
                    globalVariable.setBhigh(bHigh);
                    globalVariable.setBlow(bLow);
                    globalVariable.setXnormalisedCentroid(xCentroidNormalised);
                    globalVariable.setYnormalisedCentroid(yCentroidNormalised);
                    globalVariable.setNormalisedTemplateKeyPoints(offsetCompensatedKeyPoints);

                    Log.e(TAG, "Finished setting the global variables!");

                } catch (Exception e) {
                    Log.e(TAG, "Please follow instructions!");
                }
            }
        });
    } catch (Exception e) {
        Log.e(TAG, "Template is not working!");
    }

}

From source file:com.dft.fingerwizardsampleapp.FingerWizardSample.java

@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
    super.onActivityResult(requestCode, resultCode, data);
    if (FINGER_WIZARD_REQUEST_CODE == requestCode) {
        // If you want to display the preprocessed image on screen, put it in an ImageView
        // or similar
        ImageView preprocessedBitmapImageView = (ImageView) findViewById(R.id.preprocessed_bitmap_image_view);
        Bitmap processedBitmap = null;//from   w  w w. ja  v  a 2  s.  co m
        // Decode the bitmap from the file
        try {
            FileInputStream fis = new FileInputStream(mPreprocessedImageFile);
            InputStream buffer = new BufferedInputStream(fis);
            processedBitmap = BitmapFactory.decodeStream(buffer);
            buffer.close();
        } catch (IOException iox) {
            Log.e(TAG, "Cannot perform input of processedBitmapFile. " + iox);
        }
        // TODO: this is an example of how to show the preprocessedBitmap.  In a complete application, we'd need
        // to handle screen orientation changes
        preprocessedBitmapImageView.setImageBitmap(processedBitmap);

        /**
         * It is possible to use Onyx's built-in image pyramiding as follows
         */
        if (processedBitmap != null) {
            double[] imageScales = new double[] { 0.8, 1.0, 1.2 }; // 80%, 100%, and 120%
            ArrayList<byte[]> scaledWSQs = new ArrayList<byte[]>();

            Mat mat = new Mat();
            Utils.bitmapToMat(processedBitmap, mat);
            Imgproc.cvtColor(mat, mat, Imgproc.COLOR_RGB2GRAY); // ensure image is grayscale

            MatVector vector = core.pyramidImage(mat, imageScales);
            for (int i = 0; i < imageScales.length; i++) {
                scaledWSQs.add(core.matToWsq(vector.get(i)));
            }

            for (int i = 0; i < scaledWSQs.size(); i++) {
                // TODO: send scaledWSQs.get(i) to server for matching...
                File inputFile = new File(Environment.getExternalStorageDirectory(),
                        "matToWsQ" + System.currentTimeMillis() / 1000 + ".wsq");

                try {
                    FileOutputStream fos = new FileOutputStream(inputFile.getPath());
                    fos.write(scaledWSQs.get(i));
                    fos.close();
                } catch (IOException e) {
                    Log.e(TAG, e.getMessage());
                }
            }
        }

        // Get the EnrollmentMetric
        EnrollmentMetric em = null;
        if (data != null && data.hasExtra(Consts.EXTRA_ENROLLMENT_METRIC)) {
            em = (EnrollmentMetric) data.getSerializableExtra(Consts.EXTRA_ENROLLMENT_METRIC);
        }

        // Get the finger location
        if (em != null) {
            String fingerLocation = em.getFingerLocation().toString();
            Log.d(TAG, "The fingerLocation, " + fingerLocation + ", is the String "
                    + "representation of the finger in the enum, EnumFinger.");

            // If you want a fingerprint template for enrollment, and can be
            // matched using Onyx, get it in the following manner
            FingerprintTemplate ft = em.getFingerprintTemplateArray()[0];
            // The fingerprint template contains the NFIQ score of the pre-processed image
            // that was used to create it
            Log.d(TAG, "FingerprintTemplate NFIQ Score = " + ft.getNfiqScore());

            // The EnrollmentMetric also contains the NFIQ score
            int nfiqScore = em.getHighestNFIQScore();
            Log.d(TAG, "NFIQ Score = " + nfiqScore);
        }
    }
}

From source file:com.example.root.dipproj.MainActivity.java

@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
    super.onActivityResult(requestCode, resultCode, data);
    if (resultCode == RESULT_OK) {
        if (requestCode == 1) {
            File f = new File(Environment.getExternalStorageDirectory().toString());
            for (File temp : f.listFiles()) {
                if (temp.getName().equals("temp.jpg")) {
                    f = temp;/*from   w ww  .j  a  va  2 s . c  om*/
                    break;
                }
            }
            try {
                Bitmap bitmap;
                BitmapFactory.Options bitmapOptions = new BitmapFactory.Options();
                bitmap = BitmapFactory.decodeFile(f.getAbsolutePath(), bitmapOptions);
                viewImage.setImageBitmap(bitmap);
                String path = android.os.Environment.getExternalStorageDirectory() + File.separator + "Phoenix"
                        + File.separator + "default";
                f.delete();
                OutputStream outFile = null;
                File file = new File(path, String.valueOf(System.currentTimeMillis()) + ".jpg");
                try {
                    outFile = new FileOutputStream(file);
                    bitmap.compress(Bitmap.CompressFormat.JPEG, 85, outFile);
                    outFile.flush();
                    outFile.close();
                } catch (FileNotFoundException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } catch (Exception e) {
                    e.printStackTrace();
                }
            } catch (Exception e) {
                e.printStackTrace();
            }
        } else if (requestCode == 2) {
            Uri selectedImage = data.getData();
            String[] filePath = { MediaStore.Images.Media.DATA };
            Cursor c = getContentResolver().query(selectedImage, filePath, null, null, null);
            c.moveToFirst();
            int columnIndex = c.getColumnIndex(filePath[0]);
            String picturePath = c.getString(columnIndex);
            c.close();
            Bitmap thumbnail = (BitmapFactory.decodeFile(picturePath));
            Log.w("path of image", picturePath + "");
            Mat imgMat = new Mat();
            Mat imgMat2 = new Mat();
            Mat imgMat3 = new Mat();
            Utils.bitmapToMat(thumbnail, imgMat);
            Imgproc.cvtColor(imgMat, imgMat, Imgproc.COLOR_RGB2GRAY);
            org.opencv.core.Size s = new Size(3, 3);
            Imgproc.createCLAHE();
            Imgproc.GaussianBlur(imgMat, imgMat, s, 2);
            Imgproc.erode(imgMat, imgMat2, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2, 2)));
            Imgproc.dilate(imgMat2, imgMat3, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2, 2)));
            Imgproc.Sobel(imgMat, imgMat, CvType.CV_8UC1, 1, 0);
            Core.absdiff(imgMat, imgMat3, imgMat);
            Imgproc.threshold(imgMat, imgMat, 123, 255, Imgproc.THRESH_OTSU);
            Utils.matToBitmap(imgMat, thumbnail);
            viewImage.setImageBitmap(thumbnail);
            saveBitmaptoSDCard(thumbnail);
        }
    }
}

From source file:com.team.formal.eyeshopping.ActivityFindingResults.java

License:Open Source License

private void callCloudVision(final Bitmap bitmap) throws IOException {
    // Do the real work in an async task, because we need to use the network anyway
    new AsyncTask<Object, Void, ArrayList<String>>() {
        final ProgressDialog asyncDialog = new ProgressDialog(ActivityFindingResults.this);

        @Override/*from w  w  w . j  a v  a2s .co  m*/
        protected void onPreExecute() {
            super.onPreExecute();
            asyncDialog.setProgressStyle(ProgressDialog.STYLE_SPINNER);
            asyncDialog.setMessage("Loading Products ...");
            asyncDialog.show();
        }

        @Override
        protected ArrayList<String> doInBackground(Object... params) {
            try {
                HttpTransport httpTransport = AndroidHttp.newCompatibleTransport();
                JsonFactory jsonFactory = GsonFactory.getDefaultInstance();

                VisionRequestInitializer requestInitializer = new VisionRequestInitializer(
                        CLOUD_VISION_API_KEY) {
                    /**
                     * We override this so we can inject important identifying fields into the HTTP
                     * headers. This enables use of a restricted cloud platform API key.
                     */
                    @Override
                    protected void initializeVisionRequest(VisionRequest<?> visionRequest) throws IOException {
                        super.initializeVisionRequest(visionRequest);

                        String packageName = getPackageName();
                        visionRequest.getRequestHeaders().set(ANDROID_PACKAGE_HEADER, packageName);

                        String sig = PackageManagerUtils.getSignature(getPackageManager(), packageName);

                        visionRequest.getRequestHeaders().set(ANDROID_CERT_HEADER, sig);
                    }
                };

                Vision.Builder builder = new Vision.Builder(httpTransport, jsonFactory, null);
                builder.setVisionRequestInitializer(requestInitializer);

                Vision vision = builder.build();

                BatchAnnotateImagesRequest batchAnnotateImagesRequest = new BatchAnnotateImagesRequest();
                batchAnnotateImagesRequest.setRequests(new ArrayList<AnnotateImageRequest>() {
                    {
                        AnnotateImageRequest annotateImageRequest = new AnnotateImageRequest();

                        // Add the image
                        Image base64EncodedImage = new Image();
                        // Convert the bitmap to a JPEG
                        // Just in case it's a format that Android understands but Cloud Vision
                        ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
                        bitmap.compress(Bitmap.CompressFormat.JPEG, 90, byteArrayOutputStream);
                        byte[] imageBytes = byteArrayOutputStream.toByteArray();

                        // Base64 encode the JPEG
                        base64EncodedImage.encodeContent(imageBytes);
                        annotateImageRequest.setImage(base64EncodedImage);
                        // add the features we want
                        annotateImageRequest.setFeatures(new ArrayList<Feature>() {
                            {
                                Feature webDetection = new Feature();
                                webDetection.setType("WEB_DETECTION");
                                webDetection.setMaxResults(20);
                                add(webDetection);
                            }
                        });

                        // Add the list of one thing to the request
                        add(annotateImageRequest);
                    }
                });

                Vision.Images.Annotate annotateRequest = vision.images().annotate(batchAnnotateImagesRequest);
                // Due to a bug: requests to Vision API containing large images fail when GZipped.
                annotateRequest.setDisableGZipContent(true);
                Log.d(TAG, "created Cloud Vision request object, sending request");

                BatchAnnotateImagesResponse response = annotateRequest.execute();

                return convertResponseToString(response);
            } catch (GoogleJsonResponseException e) {
                Log.d(TAG, "failed to make API request because " + e.getContent());
            } catch (IOException e) {
                Log.d(TAG, "failed to make API request because of other IOException " + e.getMessage());
            }
            return null;
        }

        @Override
        protected void onPostExecute(ArrayList<String> urls) {
            super.onPostExecute(urls);
            //   urls .

            for (int i = 0; i < urls.size(); i++) {
                Log.d("pages", urls.get(i));
            }
            ArrayList<String[]> parsedUrl = urlParsing(urls);
            Map<String, Integer> map = new HashMap<String, Integer>();

            // 1. url  /   _-
            for (int i = 0; i < parsedUrl.size(); i++) {
                for (int j = 0; j < parsedUrl.get(i).length; j++) {
                    System.out.println(parsedUrl.get(i)[j]);
                    Integer count = map.get(parsedUrl.get(i)[j]);
                    map.put(parsedUrl.get(i)[j], (count == null) ? 1 : count + 1);
                }
                System.out.println("");
            }

            //   ,   keyword    
            //  .
            //   naver Shop api 
            ArrayList<String> rankCount = new ArrayList<>();
            ArrayList<ArrayList<String>> resultArr = new ArrayList<ArrayList<String>>();

            for (Map.Entry<String, Integer> entry : map.entrySet()) {
                if (entry.getValue() >= 4) {
                    System.out.println("keyword : " + entry.getKey() + " Count : " + entry.getValue());
                    rankCount.add(entry.getKey());
                }
            }

            final ArrayList<String> coreKeywords = new ArrayList<>();

            if (!rankCount.isEmpty()) {
                for (int k = 0; k < 7; k++) {
                    int randomCount = randomRange(1, rankCount.size());
                    boolean checkDuplicate[] = new boolean[rankCount.size()];
                    String combiw = "";
                    for (int i = 0; i < randomCount; i++) {
                        int rand1;
                        while (checkDuplicate[(rand1 = randomRange(0, rankCount.size() - 1))]) {
                        }
                        combiw += rankCount.get(rand1) + "%20";
                        Log.d("combi", combiw);
                        checkDuplicate[rand1] = true;
                    }
                    coreKeywords.add(combiw);
                }
                for (int i = 0; i < coreKeywords.size(); i++) {
                    Log.d("coreKey", coreKeywords.get(i));
                }
                //                    for (String arr[] : parsedUrl) {
                //                        int count = 0;
                //                        boolean check[] = new boolean[arr.length];
                //                        ArrayList<String> strArr = new ArrayList<>();
                //
                //                        for (int i = 0; i < arr.length; i++) {
                //                            for (String rank : rankCount) {
                //                                if (arr[i].equals(rank)) {
                //                                    check[i] = true;
                //                                    strArr.add(arr[i]);
                //                                    count++;
                //                                }
                //                            }
                //                            Log.d("strArr", strArr.toString());
                //                        }
                //
                //                        int rand;
                //                        int randSize = randomRange(1, arr.length - count);
                //                        for (int i = 0; i < randSize; i++) {
                //                            while (check[(rand = randomRange(0, arr.length - 1))]) {
                //                            }
                //                            strArr.add(arr[rand]);
                //                            check[rand] = true;
                //                        }
                //                        resultArr.add(strArr);
                //                        Log.d("raa", resultArr.toString());
                //                    }
            } // end of isEmpty()

            new AsyncTask<Object, Void, List<Shop>>() {
                @Override
                protected List<Shop> doInBackground(Object... params) {
                    List<Shop> results = new ArrayList<>();

                    if (results.size() > 5)
                        results = results.subList(0, 5);

                    for (int i = 0; i < coreKeywords.size(); i++) {
                        System.out.println(coreKeywords.get(i).toString().replaceAll(", ", "%20"));
                        Log.d("uri", coreKeywords.get(i).toString().replaceAll(", ", "%20"));

                        final String xmlRaw = coreKeywords.get(i).toString().replaceAll(", ", "%20");

                        // 1
                        URL url = null;
                        try {
                            url = new URL("https://openapi.naver.com/v1/search/shop.xml?query=" + xmlRaw
                                    + "&display=50");
                        } catch (MalformedURLException e) {
                            e.printStackTrace();
                        }

                        HttpURLConnection urlConnection = null;
                        try {
                            urlConnection = (HttpURLConnection) url.openConnection();
                            urlConnection.setRequestProperty("X-Naver-Client-ID", clientID);
                            urlConnection.setRequestProperty("X-Naver-Client-Secret", clientSecret);
                            urlConnection.setRequestProperty("User-Agent", "Mozilla/5.0 ( compatible ) ");
                            urlConnection.setRequestProperty("Accept", "*/*");
                        } catch (IOException e) {
                            e.printStackTrace();
                        }

                        InputStream in = null;
                        try {
                            in = new BufferedInputStream(urlConnection.getInputStream());
                        } catch (IOException e) {
                            e.printStackTrace();
                        }

                        String data = "";
                        String msg = null;

                        BufferedReader br = null;
                        try {
                            if (in != null) {
                                br = new BufferedReader(new InputStreamReader(in, "UTF-8"));
                            }
                        } catch (UnsupportedEncodingException e) {
                            e.printStackTrace();
                        }

                        try {
                            if (br != null) {
                                while ((msg = br.readLine()) != null) {
                                    data += msg;
                                }
                            }
                        } catch (IOException e) {
                            e.printStackTrace();
                        }

                        Log.i("msg of br: ", data);

                        // 2
                        String shopResult = data;
                        try {
                            List<Shop> parsingResult = parsingShopResultXml(shopResult);
                            if (parsingResult.size() > 5)
                                parsingResult = parsingResult.subList(0, 5);

                            for (final Shop shop : parsingResult) {
                                Bitmap thumbImg = getBitmapFromURL(shop.getImage());
                                if (thumbImg != null) {
                                    ArrayList<String> keywords = new ArrayList<>(Arrays.asList(coreKeywords
                                            .get(i).replace("[", "").replace("]", "").split("%20")));
                                    String combinationKeyword = coreKeywords.get(i).replace("[", "")
                                            .replace("]", "").replaceAll("%20", " ");

                                    shop.setThumbBmp(thumbImg); //  ? Url
                                    shop.setCombinationKeyword(combinationKeyword);
                                    shop.setKeywords(keywords);
                                    results.add(shop);
                                }
                            }

                            if (results.size() > 10) // must be
                                results = results.subList(0, 10);

                            for (Shop dummyShop : results) {
                                mNaverPrImg = dummyShop.getThumbBmp();
                                Mat userSelImgTarget = new Mat(userSelImg.width(), userSelImg.height(),
                                        CvType.CV_8UC4);
                                Mat naverPrImgTarget = new Mat(mNaverPrImg.getWidth(), mNaverPrImg.getHeight(),
                                        CvType.CV_8UC4);

                                Utils.bitmapToMat(mNaverPrImg, naverPrImgTarget);
                                Imgproc.cvtColor(userSelImg, userSelImgTarget, Imgproc.COLOR_BGR2RGB);
                                Imgproc.cvtColor(naverPrImgTarget, naverPrImgTarget, Imgproc.COLOR_RGBA2RGB);

                                int ret = AkazeFeatureMatching(userSelImg.getNativeObjAddr(),
                                        naverPrImgTarget.getNativeObjAddr());

                                if (ret == 1) { // find one!
                                    DecimalFormat df = new DecimalFormat("#,###");
                                    String num = df.format(dummyShop.getLprice());
                                    int exist_flag = 0;
                                    for (int ii = 0; ii < findingItems.size(); ii++) {
                                        if (findingItems.get(ii).getProductName()
                                                .equals(dummyShop.getTitle())) {
                                            exist_flag = 1;
                                            break;
                                        }
                                    }
                                    if (exist_flag == 0) {
                                        findingItems.add(new Results_GridItem(dummyShop.getTitle(), mNaverPrImg,
                                                " " + num + "?", dummyShop.getLprice(),
                                                dummyShop.getLink(), dummyShop.getKeywords(),
                                                dummyShop.getCombinationKeyword(), dummyShop.getImage()));
                                    }
                                }
                            }
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    } // end of for

                    return results;
                } // end of doinbackground

                @Override
                protected void onPostExecute(List<Shop> shops) {
                    super.onPostExecute(shops);

                    TextView t = (TextView) findViewById(R.id.loadingText);
                    t.setVisibility(View.GONE);
                    GridView g = (GridView) findViewById(R.id.list_view);
                    g.setVisibility(View.VISIBLE);

                    if (findingItems.size() == 0) {
                        TextView tLoad = (TextView) findViewById(R.id.loadingText);
                        tLoad.setText("   .");
                        tLoad.setVisibility(View.VISIBLE);
                        gridView.setVisibility(View.GONE);
                    } else {
                        Log.d(TAG, "finding Size!!!!" + Integer.toString(findingItems.size()));
                        Collections.sort(findingItems, new Comparator<Results_GridItem>() {
                            @Override
                            public int compare(Results_GridItem o1, Results_GridItem o2) {
                                if (o1.getPrice() > o2.getPrice()) {
                                    return 1;
                                } else if (o1.getPrice() < o2.getPrice()) {
                                    return -1;
                                } else {
                                    return 0;
                                }
                            }
                        });
                        for (int i = 0; i < findingItems.size(); i++) {
                            Log.d(TAG, "List !! " + Integer.toString(findingItems.get(i).getPrice()));
                        }
                        Log.d(TAG, "finding Size!!!!" + Integer.toString(findingItems.size()));
                        gridViewAdapter = new GridViewAdapter(getApplicationContext(), findingItems);
                        gridView.setAdapter(gridViewAdapter);
                    }
                    asyncDialog.dismiss();
                }
            }.execute();

        } // end of PostExcute
    }.execute();
}

From source file:com.wallerlab.processing.utilities.ImageUtils.java

License:BSD License

public static Mat toMat(Bitmap bmp) {
    Mat mat = new Mat();
    Utils.bitmapToMat(bmp, mat);
    return mat;
}