Example usage for org.opencv.core Size Size

List of usage examples for org.opencv.core Size Size

Introduction

In this page you can find the example usage for org.opencv.core Size Size.

Prototype

public Size() 

Source Link

Usage

From source file:net.pandorica.opencv.pano.PanoActivity.java

License:Open Source License

/**
 * Prepares dialogs dynamic content//from  w  w w. j ava2  s  . c  om
 */
@Override
protected void onPrepareDialog(int id, Dialog dialog) {
    switch (id) {
    case DIALOG_RESULTS:
        refreshView();

        Mat mIntermediate = new Mat();
        Mat mYuv = new Mat();

        mIntermediate = Highgui.imread(mDirPath + mSubDir + mImagePrefix + mCurrentImage + mType);

        Core.transpose(mIntermediate, mYuv);
        Core.flip(mYuv, mIntermediate, 1);

        Imgproc.resize(mIntermediate, mYuv, new Size(), 0.25, 0.25, Imgproc.CV_INTER_AREA);

        /** Currently Not Working in OpenCV **/
        /*
        Bitmap jpg = Bitmap.createBitmap(mIntermediate.cols(), mIntermediate.rows(),
            Bitmap.Config.ARGB_8888);
        android.MatToBitmap(mIntermediate, jpg);
        */
        /** So we resort to this method **/
        Highgui.imwrite(mDirPath + mSubDir + mImagePrefix + mCurrentImage + smallType, mYuv);
        Bitmap jpg = BitmapFactory.decodeFile(mDirPath + mSubDir + mImagePrefix + mCurrentImage + smallType);
        /** **/

        // cleanup
        mIntermediate.dispose();
        mYuv.dispose();

        ImageView image = (ImageView) dialog.findViewById(R.id.image);
        image.setScaleType(ImageView.ScaleType.CENTER_INSIDE);
        image.setAdjustViewBounds(true);
        image.setPadding(2, 2, 2, 2);
        image.setImageBitmap(jpg);

        Button capture = (Button) dialog.findViewById(R.id.capture);
        capture.setOnClickListener(new OnClickListener() {

            public void onClick(View v) {
                mCurrentImage++;
                capturePhoto();
            }
        });
        Button retake = (Button) dialog.findViewById(R.id.retake);
        retake.setOnClickListener(new OnClickListener() {

            public void onClick(View v) {
                capturePhoto();
            }
        });
        Button stitch = (Button) dialog.findViewById(R.id.stitch);
        stitch.setOnClickListener(new OnClickListener() {

            public void onClick(View v) {
                new StitchPhotoTask().execute();
            }
        });

        break;
    case DIALOG_SUCCESS:
        final File img = new File(mDirPath + mSubDir + mOutputImage);
        Bitmap result = BitmapFactory.decodeFile(img.getAbsolutePath());

        ImageView png = (ImageView) dialog.findViewById(R.id.image);
        png.setScaleType(ImageView.ScaleType.CENTER_INSIDE);
        png.setAdjustViewBounds(true);
        png.setPadding(3, 3, 3, 3);
        png.setImageBitmap(result);

        refreshImage(mGalleryImage);
        refreshView();

        Button share = (Button) dialog.findViewById(R.id.share);
        share.setOnClickListener(new OnClickListener() {

            public void onClick(View v) {
                shareImage(img);
                dismissDialog(DIALOG_SUCCESS);
            }
        });

        Button exit = (Button) dialog.findViewById(R.id.close);
        exit.setOnClickListener(new OnClickListener() {

            public void onClick(View v) {
                dismissDialog(DIALOG_SUCCESS);
            }
        });
        break;
    case DIALOG_FEATURE_COMPARISON:
        CheckBox box = (CheckBox) dialog.findViewById(R.id.show_tip);
        box.setChecked(mShowTip);
        box.setOnClickListener(new OnClickListener() {

            @Override
            public void onClick(View v) {
                mShowTip = ((CheckBox) v).isChecked();
                SharedPreferences settings = getSharedPreferences(SETTINGS, 0);
                SharedPreferences.Editor editor = settings.edit();
                editor.putBoolean(SETTINGS_SHOW_TIP, mShowTip);
                editor.commit();
            }
        });

        Button cont = (Button) dialog.findViewById(R.id.tip_continue);
        cont.setOnClickListener(new OnClickListener() {

            @Override
            public void onClick(View v) {
                capturePhoto();
            }
        });
        break;
    default:
        super.onPrepareDialog(id, dialog);
    }
}

From source file:nz.ac.auckland.lablet.vision.CamShiftTracker.java

License:Open Source License

/**
 * Finds the dominant colour in an image, and returns two values in HSV colour space to represent similar colours,
 * e.g. so you can keep all colours similar to the dominant colour.
 *
 * How the algorithm works:/*www  . j  a  v a2 s .c  o  m*/
 *
 * 1. Scale the frame down so that algorithm doesn't take too long.
 * 2. Segment the frame into different colours (number of colours determined by k)
 * 3. Find dominant cluster (largest area) and get its central colour point.
 * 4. Get range (min max) to represent similar colours.
 *
 * @param bgr The input frame, in BGR colour space.
 * @param k The number of segments to use (2 works well).
 * @return The min and max HSV colour values, which represent the colours similar to the dominant colour.
 */
private Pair<Scalar, Scalar> getMinMaxHsv(Mat bgr, int k) {
    //Convert to HSV
    Mat input = new Mat();
    Imgproc.cvtColor(bgr, input, Imgproc.COLOR_BGR2BGRA, 3);

    //Scale image
    Size bgrSize = bgr.size();
    Size newSize = new Size();

    if (bgrSize.width > CamShiftTracker.KMEANS_IMG_SIZE || bgrSize.height > CamShiftTracker.KMEANS_IMG_SIZE) {

        if (bgrSize.width > bgrSize.height) {
            newSize.width = CamShiftTracker.KMEANS_IMG_SIZE;
            newSize.height = CamShiftTracker.KMEANS_IMG_SIZE / bgrSize.width * bgrSize.height;
        } else {
            newSize.width = CamShiftTracker.KMEANS_IMG_SIZE / bgrSize.height * bgrSize.width;
            newSize.height = CamShiftTracker.KMEANS_IMG_SIZE;
        }

        Imgproc.resize(input, input, newSize);
    }

    //Image quantization using k-means, see here for details of k-means algorithm: http://bit.ly/1JIvrlB
    Mat clusterData = new Mat();

    Mat reshaped = input.reshape(1, input.rows() * input.cols());
    reshaped.convertTo(clusterData, CvType.CV_32F, 1.0 / 255.0);
    Mat labels = new Mat();
    Mat centres = new Mat();
    TermCriteria criteria = new TermCriteria(TermCriteria.COUNT, 50, 1);
    Core.kmeans(clusterData, k, labels, criteria, 1, Core.KMEANS_PP_CENTERS, centres);

    //Get num hits for each category
    int[] counts = new int[k];

    for (int i = 0; i < labels.rows(); i++) {
        int label = (int) labels.get(i, 0)[0];
        counts[label] += 1;
    }

    //Get cluster index with maximum number of members
    int maxCluster = 0;
    int index = -1;

    for (int i = 0; i < counts.length; i++) {
        int value = counts[i];

        if (value > maxCluster) {
            maxCluster = value;
            index = i;
        }
    }

    //Get cluster centre point hsv
    int r = (int) (centres.get(index, 2)[0] * 255.0);
    int g = (int) (centres.get(index, 1)[0] * 255.0);
    int b = (int) (centres.get(index, 0)[0] * 255.0);
    int sum = (r + g + b) / 3;

    //Get colour range
    Scalar min;
    Scalar max;

    int rg = Math.abs(r - g);
    int gb = Math.abs(g - b);
    int rb = Math.abs(r - b);
    int maxDiff = Math.max(Math.max(rg, gb), rb);

    if (maxDiff < 35 && sum > 120) { //white
        min = new Scalar(0, 0, 0);
        max = new Scalar(180, 40, 255);
    } else if (sum < 50 && maxDiff < 35) { //black
        min = new Scalar(0, 0, 0);
        max = new Scalar(180, 255, 40);
    } else {
        Mat bgrColour = new Mat(1, 1, CvType.CV_8UC3, new Scalar(r, g, b));
        Mat hsvColour = new Mat();

        Imgproc.cvtColor(bgrColour, hsvColour, Imgproc.COLOR_BGR2HSV, 3);
        double[] hsv = hsvColour.get(0, 0);

        int addition = 0;
        int minHue = (int) hsv[0] - colourRange;
        if (minHue < 0) {
            addition = Math.abs(minHue);
        }

        int maxHue = (int) hsv[0] + colourRange;

        min = new Scalar(Math.max(minHue, 0), 60, Math.max(35, hsv[2] - 30));
        max = new Scalar(Math.min(maxHue + addition, 180), 255, 255);
    }

    return new Pair<>(min, max);
}

From source file:org.designosaurs.opmode.DesignosaursAuto.java

@Override
public void runOpMode() {
    appContext = hardwareMap.appContext;
    long startTime;

    if (TEST_MODE) {
        DesignosaursHardware.hardwareEnabled = false;

        Log.i(TAG, "*** TEST MODE ENABLED ***");
        Log.i(TAG, "Hardware is disabled, skipping to beacon search state.");
        Log.i(TAG, "Web debugging interface can be found at http://"
                + DesignosaursUtils.getIpAddress(appContext) + ":9001/");

        autonomousState = STATE_SEARCHING;
    }/* ww  w  .j av a 2  s .  c  om*/

    setInitState("Configuring hardware...");
    robot.init(hardwareMap);

    setInitState("Initializing vuforia...");
    VuforiaLocalizer.Parameters params = new VuforiaLocalizer.Parameters(R.id.cameraMonitorViewId);
    params.cameraDirection = VuforiaLocalizer.CameraDirection.BACK;
    params.vuforiaLicenseKey = VUFORIA_LICENCE_KEY;

    vuforia = new VuforiaLocalizerImplSubclass(params);

    // Vuforia tracks the images, we'll call them beacons
    VuforiaTrackables beacons = vuforia.loadTrackablesFromAsset("FTC_2016-17");
    beacons.get(0).setName("wheels");
    beacons.get(1).setName("tools");
    beacons.get(2).setName("legos");
    beacons.get(3).setName("gears");

    setInitState("Initializing OpenCV...");
    OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, appContext, mLoaderCallback);

    setInitState("Select team color using the gamepad.");

    while (!isStarted() && !isStopRequested()) {
        updateTeamColor();

        robot.waitForTick(25);
    }

    if (DesignosaursHardware.hardwareEnabled) {
        buttonPusherManager.start();
        buttonPusherManager.setStatus(ButtonPusherManager.STATE_HOMING);
        shooterManager.start();
        shooterManager.setStatus(ShooterManager.STATE_AT_BASE);
    }

    if (HOTEL_MODE)
        setState(STATE_SEARCHING);

    beacons.activate();
    startTime = System.currentTimeMillis();
    robot.startTimer();

    long ticksSeeingImage = 0;
    while (opModeIsActive()) {
        String beaconName = "";

        // Detect and process images
        for (VuforiaTrackable beac : beacons) {
            OpenGLMatrix pose = ((VuforiaTrackableDefaultListener) beac.getListener()).getRawPose();

            if (pose != null) {
                beaconName = beac.getName();

                if (beac.getName().equals(lastScoredBeaconName)) // fixes seeing the first beacon and wanting to go back to it
                    continue;

                Matrix34F rawPose = new Matrix34F();
                float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12);
                rawPose.setData(poseData);
                lastPose = rawPose;

                recalculateCriticalPoints();
                boundPoints();
                centeredPos = center.y; // drive routines align based on this
            }
        }

        if (vuforia.rgb != null && ENABLE_CAMERA_STREAMING
                && System.currentTimeMillis() > (lastFrameSentAt + 50)) {
            lastFrameSentAt = System.currentTimeMillis();

            Bitmap bm = Bitmap.createBitmap(vuforia.rgb.getWidth(), vuforia.rgb.getHeight(),
                    Bitmap.Config.RGB_565);
            bm.copyPixelsFromBuffer(vuforia.rgb.getPixels());

            Bitmap resizedbitmap = DesignosaursUtils.resize(bm, bm.getWidth() / 2, bm.getHeight() / 2);
            FtcRobotControllerActivity.webServer.streamCameraFrame(resizedbitmap);

            if (center != null) {
                ArrayList<String> coords = new ArrayList<>(4);
                coords.add(start.toString());
                coords.add(end.toString());
                coords.add(center.toString());

                FtcRobotControllerActivity.webServer.streamPoints(coords);
            }
        }

        switch (autonomousState) {
        case STATE_INITIAL_POSITIONING:
            robot.startOrientationTracking();

            if (teamColor == TEAM_BLUE) {
                robot.rightMotor.setDirection(DcMotor.Direction.REVERSE);
                robot.leftMotor.setDirection(DcMotor.Direction.FORWARD);
            }

            if (teamColor == TEAM_RED) {
                robot.accel(0.3, 0.5);
                shooterManager.setStatus(ShooterManager.STATE_SCORING);
                robot.setDrivePower(0);
                robot.waitForTick(1400);
                robot.accel(0.3, 0.5);
                robot.waitForTick(1400);
                robot.goStraight(-0.2, 0.8);

                updateRunningState("Initial turn...");
                robot.turn(-34, 0.3);

                updateRunningState("Secondary move...");
                robot.accel(0.5, FAST_DRIVE_POWER);
                robot.goStraight(2.8, FAST_DRIVE_POWER);
                robot.decel(0.5, 0);

                updateRunningState("Secondary turn...");
                robot.turn(38, 0.2);
            } else {
                robot.goStraight(-0.3, 0.5);
                shooterManager.setStatus(ShooterManager.STATE_SCORING);
                robot.setDrivePower(0);
                robot.accel(-0.3, 0.5);
                robot.waitForTick(1400);
                robot.goStraight(-0.8, 0.4);
                robot.setDrivePower(0);

                robot.turn(179, 0.45);

                updateRunningState("Initial turn...");
                robot.turn(22, 0.3);

                updateRunningState("Secondary move...");
                robot.accel(0.5, FAST_DRIVE_POWER);
                robot.goStraight(4.1, FAST_DRIVE_POWER);
                robot.decel(0.5, 0);

                updateRunningState("Secondary turn...");
                robot.turn(-35, 0.3);
            }

            robot.setDrivePower(0);
            // Allow the camera time to focus:
            robot.waitForTick(1000);
            setState(STATE_SEARCHING);
            break;
        case STATE_SEARCHING:
            if (Math.abs(getRelativePosition()) < BEACON_ALIGNMENT_TOLERANCE) {
                stateMessage = "Analysing beacon data...";
                robot.setDrivePower(0);
                ticksSeeingImage++;
                vuforia.disableFlashlight();

                if (ticksSeeingImage < 5)
                    continue;

                byte pass = 0;
                boolean successful = false;
                BeaconPositionResult lastBeaconPosition = null;
                int[] range = { 0, 500 };
                Mat image = null;

                while (!successful) {
                    image = getRegionAboveBeacon();
                    if (image == null || vuforia.rgb == null) {
                        Log.w(TAG, "No frame! _");
                        robot.setDrivePower(MIN_DRIVE_POWER);

                        continue;
                    }

                    lastBeaconPosition = beaconFinder.process(System.currentTimeMillis(), image, SAVE_IMAGES)
                            .getResult();
                    range = lastBeaconPosition.getRangePixels();

                    if (range[0] < 0)
                        range[0] = 0;

                    if (range[1] > image.width())
                        range[1] = image.width();

                    Log.i(TAG, "Beacon finder results: " + lastBeaconPosition.toString());

                    if (lastBeaconPosition.isConclusive())
                        successful = true;
                    else {
                        pass++;

                        Log.i(TAG, "Searching for beacon presence, pass #" + beaconsFound + ":" + pass + ".");

                        // We can't see both buttons, so move back and forth and run detection algorithm again
                        if (pass <= 3)
                            robot.goCounts(teamColor == TEAM_RED ? -400 : 400, 0.2);

                        if (pass <= 4 && pass >= 3)
                            robot.goCounts(teamColor == TEAM_RED ? 1000 : -1000, 0.2);

                        if (pass > 4) {
                            robot.goCounts(teamColor == TEAM_RED ? -1600 : 1600, 0.4);
                            successful = true;
                        }

                        // Allow camera time to autofocus:
                        robot.setDrivePower(0);
                        robot.waitForTick(100);
                    }
                }

                if (autonomousState != STATE_SEARCHING)
                    continue;

                // Change the values in the following line for how much off the larger image we crop (y-wise,
                // the x axis is controlled by where the robot thinks the beacon is, see BeaconFinder).
                // TODO: Tune this based on actual field
                Log.i(TAG, "Source image is " + image.height() + "px by " + image.width() + "px");

                int width = range[1] - range[0];
                Log.i(TAG, "X: " + range[0] + " Y: 0, WIDTH: " + width + ", HEIGHT: "
                        + (image.height() > 50 ? image.height() - 50 : image.height()));

                if (range[0] < 0)
                    range[0] = 0;

                if (width < 0)
                    width = image.width() - range[0];

                Mat croppedImageRaw = new Mat(image,
                        new Rect(range[0], 0, width,
                                image.height() > 50 ? image.height() - 50 : image.height())),
                        croppedImage = new Mat();

                Imgproc.resize(croppedImageRaw, croppedImage, new Size(), 0.5, 0.5, Imgproc.INTER_LINEAR);

                if (OBFUSCATE_MIDDLE)
                    Imgproc.rectangle(croppedImage, new Point((croppedImage.width() / 2) - 35, 0),
                            new Point((croppedImage.width() / 2) + 55, croppedImage.height()),
                            new Scalar(255, 255, 255, 255), -1);

                BeaconColorResult lastBeaconColor = beaconProcessor
                        .process(System.currentTimeMillis(), croppedImage, SAVE_IMAGES).getResult();
                BeaconColorResult.BeaconColor targetColor = (teamColor == TEAM_RED
                        ? BeaconColorResult.BeaconColor.RED
                        : BeaconColorResult.BeaconColor.BLUE);

                Log.i(TAG, "*** BEACON FOUND ***");
                Log.i(TAG, "Target color: "
                        + (targetColor == BeaconColorResult.BeaconColor.BLUE ? "Blue" : "Red"));
                Log.i(TAG, "Beacon colors: " + lastBeaconColor.toString());

                // Beacon is already the right color:
                if (lastBeaconColor.getLeftColor() == targetColor
                        && lastBeaconColor.getRightColor() == targetColor) {
                    robot.accel(0.5, 1);
                    robot.goStraight(targetSide == SIDE_LEFT ? 1.2 : 0.7, 1);
                    robot.decel(0.5, DRIVE_POWER);
                    advanceToSecondBeacon(beaconName);

                    continue;
                }

                // Both sides of the beacon are the wrong color, so just score the left side:
                if (lastBeaconColor.getLeftColor() != targetColor
                        && lastBeaconColor.getRightColor() != targetColor) {
                    targetSide = SIDE_LEFT;
                    robot.setDrivePower(-DRIVE_POWER * 0.75);
                    setState(STATE_ALIGNING_WITH_BEACON);

                    continue;
                }

                // TODO: Replace goStraight call with proper combined distance from beacon offset + target side offset
                robot.goStraight(lastBeaconPosition.getOffsetFeet(), DRIVE_POWER);
                robot.setDrivePower(0);

                robot.resetDriveEncoders();
                targetSide = lastBeaconColor.getLeftColor() == targetColor ? SIDE_LEFT : SIDE_RIGHT;
                robot.setDrivePower(-DRIVE_POWER * 0.75);

                setState(STATE_ALIGNING_WITH_BEACON);
            } else if (Math.abs(getRelativePosition()) < SLOW_DOWN_AT) {
                stateMessage = "Beacon seen, centering (" + String.valueOf(getRelativePosition()) + ")...";
                robot.resetDriveEncoders();

                if (getRelativePosition() > 0 && getRelativePosition() != Integer.MAX_VALUE)
                    robot.setDrivePower(-MIN_DRIVE_POWER);
                else
                    robot.setDrivePower(MIN_DRIVE_POWER);
            }
            break;
        case STATE_ALIGNING_WITH_BEACON:
            stateMessage = "Positioning to deploy placer...";

            if (ticksInState > 450)
                robot.emergencyStop();

            double targetCounts = (targetSide == SIDE_LEFT) ? 1150 : 250;

            if (Math.max(Math.abs(robot.getAdjustedEncoderPosition(robot.leftMotor)),
                    Math.abs(robot.getAdjustedEncoderPosition(robot.rightMotor))) >= targetCounts) {
                Log.i(TAG, "//// DEPLOYING ////");

                robot.setDrivePower(0);
                robot.startOrientationTracking(true);
                buttonPusherManager.setStatus(ButtonPusherManager.STATE_SCORING);

                setState(STATE_WAITING_FOR_PLACER);
            }
            break;
        case STATE_WAITING_FOR_PLACER:
            stateMessage = "Waiting for placer to deploy.";

            if ((buttonPusherManager.getStatus() != ButtonPusherManager.STATE_SCORING
                    && buttonPusherManager.getTicksInState() >= 10)
                    || buttonPusherManager.getStatus() == ButtonPusherManager.STATE_AT_BASE) {
                advanceToSecondBeacon(beaconName);

                if (beaconsFound == 2 || HOTEL_MODE)
                    setState(STATE_FINISHED);
                else {
                    robot.turn(teamColor == TEAM_RED ? -3 : 3, 0.2);
                    robot.accel(0.5, 1);
                    robot.turn(teamColor == TEAM_RED ? 2 : -2, 0.2);
                    robot.goStraight(targetSide == SIDE_LEFT ? 1.5 : 1, 1);
                    robot.decel(0.5, DRIVE_POWER);

                    setState(STATE_SEARCHING);
                }
            }
        }

        ticksInState++;
        updateRunningState();
        robot.waitForTick(20);
    }
}