List of usage examples for org.opencv.core Scalar Scalar
public Scalar(double v0, double v1, double v2, double v3)
From source file:gab.opencv.OpenCV.java
License:Open Source License
/** * Adjust the brightness of the image. Works on color or black and white images. * /*from w w w .j a v a2 s .c om*/ * @param amt * The amount to brighten the image. Ranges -255 to 255. * **/ public void brightness(int amt) { Scalar modifier; if (useColor) { modifier = new Scalar(amt, amt, amt, 1); } else { modifier = new Scalar(amt); } Core.add(getCurrentMat(), modifier, getCurrentMat()); }
From source file:mx.iteso.desi.vision.WebCamStream.java
License:Apache License
private void detectFaces() { MatOfRect faces = new MatOfRect(); Mat grayFrame = new Mat(); processedFrame = frame.clone();// www . j a v a 2 s . c o m // convert the frame in gray scale Imgproc.cvtColor(processedFrame, grayFrame, Imgproc.COLOR_BGR2GRAY); // equalize the frame histogram to improve the result Imgproc.equalizeHist(grayFrame, grayFrame); // detect faces this.faceCascade.detectMultiScale(grayFrame, faces, 1.1, 2, Objdetect.CASCADE_SCALE_IMAGE, new Size(this.absoluteFaceSize, this.absoluteFaceSize), new Size()); // each rectangle in faces is a face: draw them! Rect[] facesArray = faces.toArray(); for (Rect face : facesArray) { Imgproc.rectangle(processedFrame, face.tl(), face.br(), new Scalar(0, 255, 0, 255), 3); } }
From source file:opencltest.YetAnotherTestT.java
private static void drawCross(double x, double y, Mat foundSquare) { Imgproc.line(foundSquare, new Point(x, 0), new Point(x, foundSquare.height()), new Scalar(255, 0, 0, 180), 1);//from w w w . j a v a2 s .c o m Imgproc.line(foundSquare, new Point(0, y), new Point(foundSquare.width(), y), new Scalar(255, 0, 0, 180), 1); }
From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.ResultUtil.java
License:Open Source License
@NonNull public static Mat createStripMat(@NonNull Mat mat, @NonNull Point centerPatch, boolean grouped, int maxWidth) { //done with lab schema, make rgb to show in image view // mat holds the strip image Imgproc.cvtColor(mat, mat, Imgproc.COLOR_Lab2RGB); int rightBorder = 0; double ratio; if (mat.width() < MIN_STRIP_WIDTH) { ratio = MAX_STRIP_HEIGHT / mat.height(); rightBorder = BORDER_SIZE;//from ww w. j ava 2 s . c o m } else { ratio = (double) (maxWidth - 10) / (double) mat.width(); } Imgproc.resize(mat, mat, new Size(mat.width() * ratio, mat.height() * ratio)); Core.copyMakeBorder(mat, mat, BORDER_SIZE + ARROW_TRIANGLE_HEIGHT, BORDER_SIZE * 2, 0, rightBorder, Core.BORDER_CONSTANT, new Scalar(MAX_RGB_INT_VALUE, MAX_RGB_INT_VALUE, MAX_RGB_INT_VALUE, MAX_RGB_INT_VALUE)); // Draw an arrow to the patch // only draw if this is not a 'grouped' strip if (!grouped) { double x = centerPatch.x * ratio; double y = 0; MatOfPoint matOfPoint = new MatOfPoint(new Point((x - ARROW_TRIANGLE_LENGTH), y), new Point((x + ARROW_TRIANGLE_LENGTH), y), new Point(x, y + ARROW_TRIANGLE_HEIGHT), new Point((x - ARROW_TRIANGLE_LENGTH), y)); Imgproc.fillConvexPoly(mat, matOfPoint, DARK_GRAY, Core.LINE_AA, 0); } return mat; }
From source file:org.designosaurs.opmode.DesignosaursAuto.java
@Override public void runOpMode() { appContext = hardwareMap.appContext; long startTime; if (TEST_MODE) { DesignosaursHardware.hardwareEnabled = false; Log.i(TAG, "*** TEST MODE ENABLED ***"); Log.i(TAG, "Hardware is disabled, skipping to beacon search state."); Log.i(TAG, "Web debugging interface can be found at http://" + DesignosaursUtils.getIpAddress(appContext) + ":9001/"); autonomousState = STATE_SEARCHING; }/*from ww w . jav a 2s . c o m*/ setInitState("Configuring hardware..."); robot.init(hardwareMap); setInitState("Initializing vuforia..."); VuforiaLocalizer.Parameters params = new VuforiaLocalizer.Parameters(R.id.cameraMonitorViewId); params.cameraDirection = VuforiaLocalizer.CameraDirection.BACK; params.vuforiaLicenseKey = VUFORIA_LICENCE_KEY; vuforia = new VuforiaLocalizerImplSubclass(params); // Vuforia tracks the images, we'll call them beacons VuforiaTrackables beacons = vuforia.loadTrackablesFromAsset("FTC_2016-17"); beacons.get(0).setName("wheels"); beacons.get(1).setName("tools"); beacons.get(2).setName("legos"); beacons.get(3).setName("gears"); setInitState("Initializing OpenCV..."); OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, appContext, mLoaderCallback); setInitState("Select team color using the gamepad."); while (!isStarted() && !isStopRequested()) { updateTeamColor(); robot.waitForTick(25); } if (DesignosaursHardware.hardwareEnabled) { buttonPusherManager.start(); buttonPusherManager.setStatus(ButtonPusherManager.STATE_HOMING); shooterManager.start(); shooterManager.setStatus(ShooterManager.STATE_AT_BASE); } if (HOTEL_MODE) setState(STATE_SEARCHING); beacons.activate(); startTime = System.currentTimeMillis(); robot.startTimer(); long ticksSeeingImage = 0; while (opModeIsActive()) { String beaconName = ""; // Detect and process images for (VuforiaTrackable beac : beacons) { OpenGLMatrix pose = ((VuforiaTrackableDefaultListener) beac.getListener()).getRawPose(); if (pose != null) { beaconName = beac.getName(); if (beac.getName().equals(lastScoredBeaconName)) // fixes seeing the first beacon and wanting to go back to it continue; Matrix34F rawPose = new Matrix34F(); float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12); rawPose.setData(poseData); lastPose = rawPose; recalculateCriticalPoints(); boundPoints(); centeredPos = center.y; // drive routines align based on this } } if (vuforia.rgb != null && ENABLE_CAMERA_STREAMING && System.currentTimeMillis() > (lastFrameSentAt + 50)) { lastFrameSentAt = System.currentTimeMillis(); Bitmap bm = Bitmap.createBitmap(vuforia.rgb.getWidth(), vuforia.rgb.getHeight(), Bitmap.Config.RGB_565); bm.copyPixelsFromBuffer(vuforia.rgb.getPixels()); Bitmap resizedbitmap = DesignosaursUtils.resize(bm, bm.getWidth() / 2, bm.getHeight() / 2); FtcRobotControllerActivity.webServer.streamCameraFrame(resizedbitmap); if (center != null) { ArrayList<String> coords = new ArrayList<>(4); coords.add(start.toString()); coords.add(end.toString()); coords.add(center.toString()); FtcRobotControllerActivity.webServer.streamPoints(coords); } } switch (autonomousState) { case STATE_INITIAL_POSITIONING: robot.startOrientationTracking(); if (teamColor == TEAM_BLUE) { robot.rightMotor.setDirection(DcMotor.Direction.REVERSE); robot.leftMotor.setDirection(DcMotor.Direction.FORWARD); } if (teamColor == TEAM_RED) { robot.accel(0.3, 0.5); shooterManager.setStatus(ShooterManager.STATE_SCORING); robot.setDrivePower(0); robot.waitForTick(1400); robot.accel(0.3, 0.5); robot.waitForTick(1400); robot.goStraight(-0.2, 0.8); updateRunningState("Initial turn..."); robot.turn(-34, 0.3); updateRunningState("Secondary move..."); robot.accel(0.5, FAST_DRIVE_POWER); robot.goStraight(2.8, FAST_DRIVE_POWER); robot.decel(0.5, 0); updateRunningState("Secondary turn..."); robot.turn(38, 0.2); } else { robot.goStraight(-0.3, 0.5); shooterManager.setStatus(ShooterManager.STATE_SCORING); robot.setDrivePower(0); robot.accel(-0.3, 0.5); robot.waitForTick(1400); robot.goStraight(-0.8, 0.4); robot.setDrivePower(0); robot.turn(179, 0.45); updateRunningState("Initial turn..."); robot.turn(22, 0.3); updateRunningState("Secondary move..."); robot.accel(0.5, FAST_DRIVE_POWER); robot.goStraight(4.1, FAST_DRIVE_POWER); robot.decel(0.5, 0); updateRunningState("Secondary turn..."); robot.turn(-35, 0.3); } robot.setDrivePower(0); // Allow the camera time to focus: robot.waitForTick(1000); setState(STATE_SEARCHING); break; case STATE_SEARCHING: if (Math.abs(getRelativePosition()) < BEACON_ALIGNMENT_TOLERANCE) { stateMessage = "Analysing beacon data..."; robot.setDrivePower(0); ticksSeeingImage++; vuforia.disableFlashlight(); if (ticksSeeingImage < 5) continue; byte pass = 0; boolean successful = false; BeaconPositionResult lastBeaconPosition = null; int[] range = { 0, 500 }; Mat image = null; while (!successful) { image = getRegionAboveBeacon(); if (image == null || vuforia.rgb == null) { Log.w(TAG, "No frame! _"); robot.setDrivePower(MIN_DRIVE_POWER); continue; } lastBeaconPosition = beaconFinder.process(System.currentTimeMillis(), image, SAVE_IMAGES) .getResult(); range = lastBeaconPosition.getRangePixels(); if (range[0] < 0) range[0] = 0; if (range[1] > image.width()) range[1] = image.width(); Log.i(TAG, "Beacon finder results: " + lastBeaconPosition.toString()); if (lastBeaconPosition.isConclusive()) successful = true; else { pass++; Log.i(TAG, "Searching for beacon presence, pass #" + beaconsFound + ":" + pass + "."); // We can't see both buttons, so move back and forth and run detection algorithm again if (pass <= 3) robot.goCounts(teamColor == TEAM_RED ? -400 : 400, 0.2); if (pass <= 4 && pass >= 3) robot.goCounts(teamColor == TEAM_RED ? 1000 : -1000, 0.2); if (pass > 4) { robot.goCounts(teamColor == TEAM_RED ? -1600 : 1600, 0.4); successful = true; } // Allow camera time to autofocus: robot.setDrivePower(0); robot.waitForTick(100); } } if (autonomousState != STATE_SEARCHING) continue; // Change the values in the following line for how much off the larger image we crop (y-wise, // the x axis is controlled by where the robot thinks the beacon is, see BeaconFinder). // TODO: Tune this based on actual field Log.i(TAG, "Source image is " + image.height() + "px by " + image.width() + "px"); int width = range[1] - range[0]; Log.i(TAG, "X: " + range[0] + " Y: 0, WIDTH: " + width + ", HEIGHT: " + (image.height() > 50 ? image.height() - 50 : image.height())); if (range[0] < 0) range[0] = 0; if (width < 0) width = image.width() - range[0]; Mat croppedImageRaw = new Mat(image, new Rect(range[0], 0, width, image.height() > 50 ? image.height() - 50 : image.height())), croppedImage = new Mat(); Imgproc.resize(croppedImageRaw, croppedImage, new Size(), 0.5, 0.5, Imgproc.INTER_LINEAR); if (OBFUSCATE_MIDDLE) Imgproc.rectangle(croppedImage, new Point((croppedImage.width() / 2) - 35, 0), new Point((croppedImage.width() / 2) + 55, croppedImage.height()), new Scalar(255, 255, 255, 255), -1); BeaconColorResult lastBeaconColor = beaconProcessor .process(System.currentTimeMillis(), croppedImage, SAVE_IMAGES).getResult(); BeaconColorResult.BeaconColor targetColor = (teamColor == TEAM_RED ? BeaconColorResult.BeaconColor.RED : BeaconColorResult.BeaconColor.BLUE); Log.i(TAG, "*** BEACON FOUND ***"); Log.i(TAG, "Target color: " + (targetColor == BeaconColorResult.BeaconColor.BLUE ? "Blue" : "Red")); Log.i(TAG, "Beacon colors: " + lastBeaconColor.toString()); // Beacon is already the right color: if (lastBeaconColor.getLeftColor() == targetColor && lastBeaconColor.getRightColor() == targetColor) { robot.accel(0.5, 1); robot.goStraight(targetSide == SIDE_LEFT ? 1.2 : 0.7, 1); robot.decel(0.5, DRIVE_POWER); advanceToSecondBeacon(beaconName); continue; } // Both sides of the beacon are the wrong color, so just score the left side: if (lastBeaconColor.getLeftColor() != targetColor && lastBeaconColor.getRightColor() != targetColor) { targetSide = SIDE_LEFT; robot.setDrivePower(-DRIVE_POWER * 0.75); setState(STATE_ALIGNING_WITH_BEACON); continue; } // TODO: Replace goStraight call with proper combined distance from beacon offset + target side offset robot.goStraight(lastBeaconPosition.getOffsetFeet(), DRIVE_POWER); robot.setDrivePower(0); robot.resetDriveEncoders(); targetSide = lastBeaconColor.getLeftColor() == targetColor ? SIDE_LEFT : SIDE_RIGHT; robot.setDrivePower(-DRIVE_POWER * 0.75); setState(STATE_ALIGNING_WITH_BEACON); } else if (Math.abs(getRelativePosition()) < SLOW_DOWN_AT) { stateMessage = "Beacon seen, centering (" + String.valueOf(getRelativePosition()) + ")..."; robot.resetDriveEncoders(); if (getRelativePosition() > 0 && getRelativePosition() != Integer.MAX_VALUE) robot.setDrivePower(-MIN_DRIVE_POWER); else robot.setDrivePower(MIN_DRIVE_POWER); } break; case STATE_ALIGNING_WITH_BEACON: stateMessage = "Positioning to deploy placer..."; if (ticksInState > 450) robot.emergencyStop(); double targetCounts = (targetSide == SIDE_LEFT) ? 1150 : 250; if (Math.max(Math.abs(robot.getAdjustedEncoderPosition(robot.leftMotor)), Math.abs(robot.getAdjustedEncoderPosition(robot.rightMotor))) >= targetCounts) { Log.i(TAG, "//// DEPLOYING ////"); robot.setDrivePower(0); robot.startOrientationTracking(true); buttonPusherManager.setStatus(ButtonPusherManager.STATE_SCORING); setState(STATE_WAITING_FOR_PLACER); } break; case STATE_WAITING_FOR_PLACER: stateMessage = "Waiting for placer to deploy."; if ((buttonPusherManager.getStatus() != ButtonPusherManager.STATE_SCORING && buttonPusherManager.getTicksInState() >= 10) || buttonPusherManager.getStatus() == ButtonPusherManager.STATE_AT_BASE) { advanceToSecondBeacon(beaconName); if (beaconsFound == 2 || HOTEL_MODE) setState(STATE_FINISHED); else { robot.turn(teamColor == TEAM_RED ? -3 : 3, 0.2); robot.accel(0.5, 1); robot.turn(teamColor == TEAM_RED ? 2 : -2, 0.2); robot.goStraight(targetSide == SIDE_LEFT ? 1.5 : 1, 1); robot.decel(0.5, DRIVE_POWER); setState(STATE_SEARCHING); } } } ticksInState++; updateRunningState(); robot.waitForTick(20); } }
From source file:org.lasarobotics.vision.util.color.ColorRGBA.java
License:Open Source License
/** * Instantiate a 3-channel (no transparency) RGB(A) color * * @param r Red channel (0-255)/*from w w w . j ava 2 s. co m*/ * @param g Green channel (0-255) * @param b Blue channel (0-255) */ public ColorRGBA(int r, int g, int b) { super(new Scalar(r, g, b, 255)); }
From source file:org.lasarobotics.vision.util.color.ColorRGBA.java
License:Open Source License
/** * Instantiate a 4-channel RGBA color/*from www . j a v a 2s.c om*/ * * @param r Red channel (0-255) * @param g Green channel (0-255) * @param b Blue channel (0-255) * @param a Alpha channel (0-255), where 0 is transparent and 255 is opaque */ public ColorRGBA(int r, int g, int b, int a) { super(new Scalar(r, g, b, a)); }
From source file:org.lasarobotics.vision.util.color.ColorRGBA.java
License:Open Source License
private static Scalar parseHexCode(String hexCode) { //remove hex key # if (!hexCode.startsWith("#")) hexCode = "#" + hexCode; //ensure that the length is correct if (hexCode.length() != 7 && hexCode.length() != 9) throw new IllegalArgumentException("Hex code must be of length 6 or 8 characters."); //get the integer representation int color = android.graphics.Color.parseColor(hexCode); //get the r,g,b,a values return new Scalar(android.graphics.Color.red(color), android.graphics.Color.green(color), android.graphics.Color.blue(color), android.graphics.Color.alpha(color)); }
From source file:org.lasarobotics.vision.util.color.ColorRGBA.java
License:Open Source License
/** * Parse a scalar value into the colorspace * * @param s Scalar value// ww w. j a v a 2 s . c om * @return Colorspace scalar value */ @Override protected Scalar parseScalar(Scalar s) { if (s.val.length < 3) throw new IllegalArgumentException("Scalar must have 3 or 4 dimensions."); return new Scalar(s.val[0], s.val[1], s.val[2], (s.val.length >= 4) ? (int) s.val[3] : 255); }
From source file:org.openpnp.vision.FluentCv.java
License:Open Source License
public static Scalar colorToScalar(Color color) { return new Scalar(color.getBlue(), color.getGreen(), color.getRed(), 255); }