Example usage for org.opencv.core Scalar Scalar

List of usage examples for org.opencv.core Scalar Scalar

Introduction

In this page you can find the example usage for org.opencv.core Scalar Scalar.

Prototype

public Scalar(double[] vals) 

Source Link

Usage

From source file:MainTextWatermark.java

public static void main(String[] args) {

    try {//  www. ja  va  2s.  c  o m

        System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

        Mat source = Highgui.imread("D://teste.png", Highgui.CV_LOAD_IMAGE_COLOR);

        Mat destination = new Mat(source.rows(), source.cols(), source.type());

        Core.putText(source, "Tutorialspoint.com by DAC",
                new Point(source.rows() / 2, (source.cols() / 15 * 11)), //Posio do texto na tela
                Core.FONT_HERSHEY_PLAIN, new Double(1.1), new Scalar(150));

        Highgui.imwrite("D://Watermarked.jpg", source);

    } catch (Exception e) {
        System.out.println("Exception:" + e.getMessage());
    }

}

From source file:angryhexclient.OurVision.java

License:Open Source License

/**
 * Detects the ground in the image./*from w w w . j  a  v  a2  s . c  om*/
 * @return A list of blocks representing the ground.
 */
public List<Block> detectGround() {
    Mat binaryImage = new Mat(new Size(_nWidth, _nHeight), CvType.CV_8U, new Scalar(1));

    // We only detect right of this margin. The slingshot has some ground
    // colors and would partly be detected as ground. This is not what we
    // want. Trajectories originate at the slingshot, and if there is ground
    // detected at the slingshot, the agent will think, that none of its
    // trajectories are valid. Therefore we start with detecting due right
    // of the slingshot.
    int startAtX = findSlingshot().x + findSlingshot().width * 2;

    // Now we create a binary image of the ground areas. White where there
    // is ground, black otherwise.
    for (int y = 0; y < _nHeight; y++) {
        for (int x = 0; x < _nWidth; x++) {
            if (x > startAtX && isGround(x, y))
                binaryImage.put(y, x, 255);
            else
                binaryImage.put(y, x, 0);
        }
    }

    Mat smoothedImage = new Mat(new Size(_nWidth, _nHeight), CvType.CV_8U, new Scalar(1));

    // This median filter improves the detection tremendously. There are a
    // whole lot of single pixels that carry ground colors spread all over
    // the image. We remove them here.
    Imgproc.medianBlur(binaryImage, smoothedImage, 7);

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();

    // We use OpenCV to find the contours. Contours are lines, that
    // represent the boundaries of the objects in the binary image.
    Imgproc.findContours(smoothedImage, contours, new Mat(), Imgproc.RETR_EXTERNAL,
            Imgproc.CHAIN_APPROX_SIMPLE);

    ArrayList<Block> result = new ArrayList<Block>();

    //Now for every contour, we convert it to blocks for communicating them to DLV.
    for (MatOfPoint mp : contours) {
        org.opencv.core.Point[] pts = mp.toArray();

        for (int i = 0; i < pts.length - 1; i++) {
            Block b = new Block((int) pts[i].x, (int) pts[i].y);
            b.add((int) pts[i + 1].x, (int) pts[i + 1].y);
            result.add(b);
        }

        //One block for the first vertex to the last vertex.
        Block b = new Block((int) pts[pts.length - 1].x, (int) pts[pts.length - 1].y);
        b.add((int) pts[0].x, (int) pts[0].y);
        result.add(b);
    }

    return result;
}

From source file:arlocros.ArMarkerPoseEstimator.java

License:Apache License

private void start(final ConnectedNode connectedNode) {
    // load OpenCV shared library
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    // read configuration variables from the ROS Runtime (configured in the
    // launch file)
    log = connectedNode.getLog();/*  www  .  ja  v a  2 s.co  m*/

    // Read Marker Config
    markerConfig = MarkerConfig.createFromConfig(parameter.markerConfigFile(), parameter.patternDirectory());

    camp = getCameraInfo(connectedNode, parameter);

    // start to listen to transform messages in /tf in order to feed the
    // Transformer and lookup transforms
    final TransformationService transformationService = TransformationService.create(connectedNode);

    // Subscribe to Image
    Subscriber<sensor_msgs.Image> subscriberToImage = connectedNode.newSubscriber(parameter.cameraImageTopic(),
            sensor_msgs.Image._TYPE);

    ComputePose computePose = null;
    try {
        final Mat cameraMatrix = CameraParams.getCameraMatrix(camp);
        final MatOfDouble distCoeffs = CameraParams.getDistCoeffs(camp);
        computePose = ComputePose.create(markerConfig, new Size(camp.width(), camp.height()), cameraMatrix,
                distCoeffs, this.parameter.visualization());
    } catch (NyARException e) {
        logger.info("Cannot initialize ComputePose", e);
    } catch (FileNotFoundException e) {
        logger.info("Cannot find file when initialize ComputePose", e);
    }
    final ComputePose poseProcessor = computePose;
    final Publisher<tf2_msgs.TFMessage> tfPublisherCamToMarker = connectedNode.newPublisher("tf",
            tf2_msgs.TFMessage._TYPE);
    final Publisher<tf2_msgs.TFMessage> tfPublisherMapToOdom = connectedNode.newPublisher("tf",
            tf2_msgs.TFMessage._TYPE);

    logger.info("My instance id is " + parameter.instanceId());
    if (heartbeatMonitor != null) {
        logger.info("Start waiting for arlocros id: " + (parameter.instanceId() - 1));
        while (true) {
            final Time currentTime = connectedNode.getCurrentTime();
            final Time lastHeartbeatTime = heartbeatMonitor.getLastTimeReceivedMessage();
            if (lastHeartbeatTime != null) {
                final Duration duration = currentTime.subtract(lastHeartbeatTime);
                if (duration.totalNsecs() > 3.0E8) {
                    logger.info("Not received any heartbeat for 300ms. Start running.");
                    break;
                }
            }
        }
    }

    subscriberToImage.addMessageListener(new MessageListener<sensor_msgs.Image>() {

        @Override
        public void onNewMessage(sensor_msgs.Image message) {
            //
            if (!message.getEncoding().toLowerCase().equals("rgb8")) {
                log.error("Sorry, " + message.getEncoding() + " Image encoding is not supported! EXITING");
                System.exit(-1);
            }
            if (camp != null) {
                try {
                    //
                    final Mat image = Utils.matFromImage(message);
                    // uncomment to add more contrast to the image
                    final Mat thresholdedImage = Utils.tresholdContrastBlackWhite(image,
                            parameter.filterBlockSize(), parameter.subtractedConstant(),
                            parameter.invertBlackWhiteColor());
                    image.release();
                    // Mat cannyimg = new Mat(image.height(), image.width(),
                    // CvType.CV_8UC3);
                    // Imgproc.Canny(image, cannyimg, 10, 100);
                    // Imshow.show(cannyimg);

                    // image.convertTo(image, -1, 1.5, 0);
                    // setup camera matrix and return vectors
                    // compute pose
                    final Mat rvec = new Mat(3, 1, CvType.CV_64F);
                    final MatOfDouble tvec = new MatOfDouble(1.0, 1.0, 1.0);
                    final boolean hasPose = poseProcessor.computePose(rvec, tvec, thresholdedImage);

                    if (!hasPose) {
                        return;
                    }

                    thresholdedImage.release();

                    // publish pose
                    final QuaternionHelper q = new QuaternionHelper();

                    // convert rotation vector result of solvepnp to rotation matrix
                    Mat R = new Mat(3, 3, CvType.CV_32FC1);
                    Calib3d.Rodrigues(rvec, R);
                    // see publishers before for documentation
                    final Mat tvec_map_cam = new MatOfDouble(1.0, 1.0, 1.0);
                    R = R.t();
                    final double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
                    final double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
                    final double attitudeZ = Math.asin(R.get(1, 0)[0]);
                    q.setFromEuler(bankX, headingY, attitudeZ);
                    Core.multiply(R, new Scalar(-1), R);
                    Core.gemm(R, tvec, 1, new Mat(), 0, tvec_map_cam, 0);
                    R.release();
                    final org.ros.rosjava_geometry.Quaternion rotation = new org.ros.rosjava_geometry.Quaternion(
                            q.getX(), q.getY(), q.getZ(), q.getW());
                    final double x = tvec_map_cam.get(0, 0)[0];
                    final double y = tvec_map_cam.get(1, 0)[0];
                    final double z = tvec_map_cam.get(2, 0)[0];
                    tvec_map_cam.release();

                    final org.ros.rosjava_geometry.Vector3 translation = new org.ros.rosjava_geometry.Vector3(x,
                            y, z);
                    final org.ros.rosjava_geometry.Transform transform_map_cam = new org.ros.rosjava_geometry.Transform(
                            translation, rotation);

                    // odom to camera_rgb_optical_frame
                    final GraphName sourceFrame = GraphName.of(parameter.cameraFrameName());
                    final GraphName targetFrame = GraphName.of("base_link");
                    org.ros.rosjava_geometry.Transform transform_cam_base = null;

                    if (transformationService.canTransform(targetFrame, sourceFrame)) {
                        try {
                            transform_cam_base = transformationService.lookupTransform(targetFrame,
                                    sourceFrame);
                        } catch (Exception e) {
                            log.error(ExceptionUtils.getStackTrace(e));
                            log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                                    + "base_link! " + "However, will continue..");
                            // cancel this loop..no result can be computed
                            return;
                        }
                    } else {
                        log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                                + "base_link!" + " However, " + "will continue..");
                        // cancel this loop..no result can be computed
                        return;
                    }

                    // multiply results
                    org.ros.rosjava_geometry.Transform current_pose = org.ros.rosjava_geometry.Transform
                            .identity();
                    current_pose = current_pose.multiply(transform_map_cam);
                    current_pose = current_pose.multiply(transform_cam_base);

                    if (current_pose.getTranslation().getZ() < 0.5) {
                        return;
                    }

                    // check for plausibility of the pose by checking if movement
                    // exceeds max speed (defined) of the robot
                    if (parameter.badPoseReject()) {
                        Time current_timestamp = connectedNode.getCurrentTime();
                        // TODO Unfortunately, we do not have the tf timestamp at
                        // hand here. So we can only use the current timestamp.
                        double maxspeed = 5;
                        boolean goodpose = false;
                        // if (current_pose != null && current_timestamp != null) {
                        if ((last_pose != null && last_timestamp != null)
                                && !Double.isNaN(last_pose.getTranslation().getX())) {
                            // check speed of movement between last and current pose
                            double distance = PoseCompare.distance(current_pose, last_pose);
                            double timedelta = PoseCompare.timedelta(current_timestamp, last_timestamp);
                            if ((distance / timedelta) < maxspeed) {
                                if (smoothing) {
                                    double xold = last_pose.getTranslation().getX();
                                    double yold = last_pose.getTranslation().getY();
                                    double zold = last_pose.getTranslation().getZ();
                                    double xnew = current_pose.getTranslation().getX();
                                    double ynew = current_pose.getTranslation().getY();
                                    double znew = current_pose.getTranslation().getZ();
                                    final org.ros.rosjava_geometry.Vector3 smoothTranslation = new org.ros.rosjava_geometry.Vector3(
                                            (xold * 2 + xnew) / 3, (yold * 2 + ynew) / 3,
                                            (zold * 2 + znew) / 3);
                                    current_pose = new org.ros.rosjava_geometry.Transform(smoothTranslation,
                                            current_pose.getRotationAndScale());
                                    last_pose = current_pose;
                                }
                                last_pose = current_pose;
                                last_timestamp = current_timestamp;
                                goodpose = true;
                            } else {
                                log.info("distance " + distance + " time: " + timedelta + " --> Pose rejected");
                                log.info("current pose: " + current_pose.getTranslation().getX() + " "
                                        + current_pose.getTranslation().getY() + " "
                                        + current_pose.getTranslation().getZ());
                                log.info("last pose: " + last_pose.getTranslation().getX() + " "
                                        + last_pose.getTranslation().getY() + " "
                                        + last_pose.getTranslation().getZ());
                            }

                        } else {
                            last_pose = current_pose;
                            last_timestamp = current_timestamp;
                        }
                        // }
                        // bad pose rejection
                        if (!goodpose) {
                            return;
                        }
                    }

                    // set information to message
                    final geometry_msgs.PoseStamped posestamped = posePublisher.newMessage();
                    Pose pose = posestamped.getPose();
                    Quaternion orientation = pose.getOrientation();
                    Point point = pose.getPosition();

                    point.setX(current_pose.getTranslation().getX());

                    point.setY(current_pose.getTranslation().getY());

                    point.setZ(current_pose.getTranslation().getZ());

                    orientation.setW(current_pose.getRotationAndScale().getW());
                    orientation.setX(current_pose.getRotationAndScale().getX());
                    orientation.setY(current_pose.getRotationAndScale().getY());
                    orientation.setZ(current_pose.getRotationAndScale().getZ());

                    // frame_id too
                    posestamped.getHeader().setFrameId("map");
                    posestamped.getHeader().setStamp(connectedNode.getCurrentTime());
                    posePublisher.publish(posestamped);
                    mostRecentPose.set(posestamped);

                    //                publishCamFrameToMarkerFrame(rvec, tvec, tfPublisherCamToMarker, connectedNode);
                    //                publishMapToOdom(
                    //                    rvec, tvec, transformationService, tfPublisherMapToOdom, connectedNode);
                    rvec.release();
                    tvec.release();

                } catch (Exception e) {
                    logger.info("An exception occurs.", e);
                }
            }
        }
    });
}

From source file:arlocros.ArMarkerPoseEstimator.java

License:Apache License

private void publishMapToOdom(Mat rvec, Mat tvec, TransformationService transformationService,
        Publisher<tf2_msgs.TFMessage> tfPublisherMapToOdom, ConnectedNode connectedNode) {
    // compute transform map to odom from map to
    // camera_rgb_optical_frame and odom to camera_rgb_optical_frame

    // map to camera_rgb_optical_frame
    Mat tvec_map_cam = new MatOfDouble(1.0, 1.0, 1.0);
    QuaternionHelper q = new QuaternionHelper();
    // get rotation matrix R from solvepnp output rotation vector
    // rvec//w  ww.j  ava2 s . co  m
    Mat R = new Mat(3, 3, CvType.CV_32FC1);
    Calib3d.Rodrigues(rvec, R);
    // transpose R, because we need the transformation from
    // world(map) to camera
    R = R.t();
    // get rotation around X,Y,Z from R in radiants
    double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
    double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
    double attitudeZ = Math.asin(R.get(1, 0)[0]);
    q.setFromEuler(bankX, headingY, attitudeZ);
    // compute translation vector from world (map) to cam
    // tvec_map_cam
    Core.multiply(R, new Scalar(-1), R); // R=-R
    Core.gemm(R, tvec, 1, new Mat(), 0, tvec_map_cam, 0); // tvec_map_cam=R*tvec
    R.release();

    org.ros.rosjava_geometry.Quaternion rotation = new org.ros.rosjava_geometry.Quaternion(q.getX(), q.getY(),
            q.getZ(), q.getW());
    double x = tvec_map_cam.get(0, 0)[0];
    double y = tvec_map_cam.get(1, 0)[0];
    double z = tvec_map_cam.get(2, 0)[0];
    tvec_map_cam.release();
    // create a Transform Object that hold the transform map to cam
    org.ros.rosjava_geometry.Vector3 translation = new org.ros.rosjava_geometry.Vector3(x, y, z);
    org.ros.rosjava_geometry.Transform transform_map_cam = new org.ros.rosjava_geometry.Transform(translation,
            rotation);

    // odom to camera_rgb_optical_frame
    GraphName sourceFrame = GraphName.of(parameter.cameraFrameName());
    GraphName targetFrame = GraphName.of("odom");
    org.ros.rosjava_geometry.Transform transform_cam_odom = null;
    if (transformationService.canTransform(targetFrame, sourceFrame)) {
        try {
            transform_cam_odom = transformationService.lookupTransform(targetFrame, sourceFrame);
        } catch (Exception e) {
            log.error(ExceptionUtils.getStackTrace(e));
            log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to " + "odom! " + ""
                    + "" + "" + "" + "However, " + "will continue..");
            return;
        }
    } else {
        log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to " + "odom! "
                + "However, will " + "continue..");
        // cancel this loop..no result can be computed
        return;
    }
    // multiply results
    org.ros.rosjava_geometry.Transform result = org.ros.rosjava_geometry.Transform.identity();
    result = result.multiply(transform_map_cam);
    result = result.multiply(transform_cam_odom);

    // set information to ROS message
    TFMessage tfMessage = tfPublisherMapToOdom.newMessage();
    TransformStamped transformStamped = connectedNode.getTopicMessageFactory()
            .newFromType(geometry_msgs.TransformStamped._TYPE);
    Transform transform = transformStamped.getTransform();

    Quaternion orientation = transform.getRotation();
    Vector3 vector = transform.getTranslation();
    vector.setX(result.getTranslation().getX());
    vector.setY(result.getTranslation().getY());
    vector.setZ(result.getTranslation().getZ());

    orientation.setW(result.getRotationAndScale().getW());
    orientation.setX(result.getRotationAndScale().getX());
    orientation.setY(result.getRotationAndScale().getY());
    orientation.setZ(result.getRotationAndScale().getZ());
    transformStamped.getHeader().setFrameId("map");
    transformStamped.setChildFrameId("odom");
    transformStamped.getHeader().setStamp(connectedNode.getCurrentTime());
    // frame_id too
    tfMessage.getTransforms().add(transformStamped);
    tfPublisherMapToOdom.publish(tfMessage);
}

From source file:ch.zhaw.facerecognitionlibrary.PreProcessor.PreProcessor.java

License:Open Source License

public void normalize01(Mat norm) {
    Core.normalize(norm, norm, 0.0, 1.0, Core.NORM_MINMAX, CvType.CV_64FC1);
    Core.MinMaxLocResult minmax = Core.minMaxLoc(norm);
    Scalar min = new Scalar(minmax.minVal);
    Core.subtract(norm, min, norm);/*from  w  w w.  java2s  . c om*/
    minmax = Core.minMaxLoc(norm);
    Scalar max = new Scalar(minmax.maxVal);
    Core.divide(norm, max, norm);
}

From source file:classes.BlobsFinder.java

public void findBlobContours() {

    Mat grayImage = new Mat();
    Imgproc.cvtColor(image, grayImage, Imgproc.COLOR_BGR2GRAY);
    ImageUtils.saveImage(grayImage, outImageName + "_grayImage.png", request);

    Mat gaussianImage = new Mat();
    Imgproc.GaussianBlur(grayImage, gaussianImage, new Size(0, 0), 3);
    Core.addWeighted(grayImage, 1.5, gaussianImage, -1, 0, gaussianImage);
    ImageUtils.saveImage(gaussianImage, outImageName + "_gaussianGrayImage.png", request);

    Mat binaryImage = new Mat();
    Imgproc.adaptiveThreshold(gaussianImage, binaryImage, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C,
            Imgproc.THRESH_BINARY_INV, 15, 4);
    ImageUtils.saveImage(binaryImage, outImageName + "_binaryImage.png", request);

    Mat erodedImage = new Mat();

    binaryImage.copyTo(erodedImage);/*from  w w w.  ja  v a2s  . c  o  m*/

    Mat structuringElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 3));
    Point anchor = new Point(-1, -1);

    Imgproc.morphologyEx(erodedImage, erodedImage, Imgproc.MORPH_CLOSE, structuringElement, anchor, 1);
    ImageUtils.saveImage(erodedImage, outImageName + "_erodedImage.png", request);

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();

    Imgproc.findContours(erodedImage, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

    Mat originalContoursImage = new Mat(image.size(), CvType.CV_8UC1, new Scalar(0));
    Scalar contourColor = new Scalar(255);
    int thickness = -1; // Thicknes should be lower than zero in order to drawn the filled contours
    Imgproc.drawContours(originalContoursImage, contours, -1, contourColor, thickness); // Drawing all the contours found
    ImageUtils.saveImage(originalContoursImage, outImageName + "_originalContoursImage.png", request);

    Mat erodedContoursImage = new Mat();
    Imgproc.erode(originalContoursImage, erodedContoursImage, structuringElement, anchor, 1);
    ImageUtils.saveImage(erodedContoursImage, outImageName + "_erodedContoursImage.png", request);

    ArrayList<MatOfPoint> finalContours = new ArrayList<MatOfPoint>();
    Mat finalContourImage = new Mat(image.size(), CvType.CV_8UC1, new Scalar(0));
    Imgproc.findContours(erodedContoursImage, finalContours, new Mat(), Imgproc.RETR_EXTERNAL,
            Imgproc.CHAIN_APPROX_SIMPLE);

    for (int i = 0; i < finalContours.size(); i++) {
        MatOfPoint currentContour = finalContours.get(i);
        double area = Imgproc.contourArea(currentContour);
        if (area > MIN_AREA) {

            validContours.add(currentContour);

            String fabricPath = generateFabricPathString(currentContour);
            contourPaths.add(fabricPath);

            Rect boundingRect = Imgproc.boundingRect(currentContour);
            topLeftCorners.add(boundingRect.tl());

            contoursAreas.add(area);
        }
    }

    // Drawing ALL the valid contours
    Imgproc.drawContours(finalContourImage, validContours, -1, contourColor, thickness);
    ImageUtils.saveImage(finalContourImage, outImageName + "_finalContourImage.png", request);

}

From source file:classes.ObjectFinder.java

private void applyMorphologicalFilters() {
    Mat element = new Mat(3, 3, CvType.CV_8U, new Scalar(1));
    Imgproc.erode(thresholdedBackprojection, morphologicalImage, element);
    Imgproc.morphologyEx(morphologicalImage, morphologicalImage, Imgproc.MORPH_CLOSE, element,
            new Point(-1, -1), 2);
    Imgproc.morphologyEx(morphologicalImage, morphologicalImage, Imgproc.MORPH_OPEN, element, new Point(-1, -1),
            2);//from   w  w w.  ja  va 2s  .  c om
}

From source file:classes.ObjectFinder.java

private void computeSearchWindow() {

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();

    // a vector of contours
    // retrieve the external contours
    // all pixels of each contours    
    Imgproc.findContours(this.morphologicalImage.clone(), contours, new Mat(), Imgproc.RETR_EXTERNAL,
            Imgproc.CHAIN_APPROX_NONE);//from   w w w. j  a v a2 s.co m

    // Draw black contours on a white image
    this.contoursImage = new Mat(morphologicalImage.size(), CvType.CV_8U, new Scalar(255));

    if (contours.size() > 1) {

        int minContourWith = 20;
        int minContourHeight = 20;
        int maxContourWith = 6400 / 2;
        int maxContourHeight = 4800 / 2;

        contours = filterContours(contours, minContourWith, minContourHeight, maxContourWith, maxContourHeight);
    }

    if (contours.size() > 1) {
        Collections.sort(contours, new ContourComparator()); // Sorttig the contours to take ONLY the bigger one
    }

    computedSearchWindow = new Rect();
    massCenter = new Point(-1, -1);

    if (contours.size() > 0) {

        this.firstContour = contours.get(0);

        Mat contournedImage = this.firstContour;

        // draw all contours in black with a thickness of 2
        Scalar color = new Scalar(0);
        int thickness = 2;
        Imgproc.drawContours(contoursImage, contours, 0, color, thickness); //

        // testing the bounding box
        computedSearchWindow = Imgproc.boundingRect(this.firstContour);

        topLeftCorner = computedSearchWindow.tl();

        // compute all moments
        Moments mom = Imgproc.moments(contournedImage);

        massCenter = new Point(mom.get_m10() / mom.get_m00(), mom.get_m01() / mom.get_m00());

        // draw black dot
        Core.circle(contoursImage, massCenter, 4, color, 8);
    }
}

From source file:classes.TextExtractor.java

public void extractText(Rect roi, double roiAngle) throws Exception {

    Point roiTopLeft = roi.tl();//from  w w  w  .ja  v a  2  s.  com

    double radians = Math.toRadians(roiAngle);
    double sin = Math.abs(Math.sin(radians));
    double cos = Math.abs(Math.cos(radians));

    int newWidth = (int) (image.width() * cos + image.height() * sin);
    int newHeight = (int) (image.width() * sin + image.height() * cos);

    int[] newWidthHeight = { newWidth, newHeight };

    int pivotX = newWidthHeight[0] / 2;
    int pivotY = newWidthHeight[1] / 2;

    Point center = new Point(pivotX, pivotY);

    Size targetSize = new Size(newWidthHeight[0], newWidthHeight[1]);

    Mat intermediateImage = new Mat(targetSize, image.type());

    int offsetX = (newWidthHeight[0] - image.width()) / 2;
    int offsetY = (newWidthHeight[1] - image.height()) / 2;

    Point paddedTopLeft = new Point(roiTopLeft.x + offsetX, roiTopLeft.y + offsetY);

    Mat containerImage = intermediateImage.submat(offsetY, offsetY + image.height(), offsetX,
            offsetX + image.width());
    image.copyTo(containerImage);

    Mat rotationMatrix = Imgproc.getRotationMatrix2D(center, roiAngle, 1.0);

    Point transformedTopLeft = transformPoint(paddedTopLeft, rotationMatrix);

    Mat rotatedImage = new Mat();
    Imgproc.warpAffine(intermediateImage, rotatedImage, rotationMatrix, targetSize, Imgproc.INTER_LINEAR,
            Imgproc.BORDER_CONSTANT, new Scalar(0));

    ImageUtils.saveImage(rotatedImage, imageID + "_rotatedImage.png", request);

    double adjustedWidth = roi.size().width;
    double adjustedHeight = roi.size().height;

    if (transformedTopLeft.x + adjustedWidth > rotatedImage.width()) {
        adjustedWidth = rotatedImage.width() - transformedTopLeft.x;
    }

    if (transformedTopLeft.y + adjustedHeight > rotatedImage.height()) {
        adjustedHeight = rotatedImage.height() - transformedTopLeft.y;
    }

    Rect newROI = new Rect(transformedTopLeft, new Size(adjustedWidth, adjustedHeight));

    Mat extractedROI = new Mat(rotatedImage, newROI);

    String fileName = ImageUtils.saveImage(extractedROI, imageID + "_ROI.png", request);

    extractText(fileName);
}

From source file:cmib_4_4.Countour.java

public static void main(String args[]) {

    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    Mat image = Highgui.imread("input1.jpg", Highgui.CV_LOAD_IMAGE_GRAYSCALE);
    Mat image1 = Highgui.imread("input1.jpg", Highgui.CV_LOAD_IMAGE_GRAYSCALE);
    Mat image4 = Highgui.imread("input1.jpg");
    Imgproc.threshold(image1, image1, 0, 255, THRESH_OTSU);
    Imgproc.Canny(image1, image1, Imgproc.THRESH_BINARY_INV + Imgproc.THRESH_OTSU,
            Imgproc.THRESH_BINARY_INV + Imgproc.THRESH_OTSU);
    Mat image2 = Mat.zeros(image.rows() + 2, image.cols() + 2, CV_8U);
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Imgproc.findContours(image1, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);

    for (int i = 0; i < contours.size(); i++) {

        if (Imgproc.contourArea(contours.get(i)) > 100) {

            Rect rect = Imgproc.boundingRect(contours.get(i));
            Imgproc.floodFill(image1, image2, new Point(150, 150), new Scalar(255));
            Rect rectCrop = new Rect(rect.x, rect.y, rect.width, rect.height);
            Mat image_roi_rgb = new Mat(image4, rectCrop);
            Highgui.imwrite("crop2.jpg", image_roi_rgb);
            if (rect.height > 28) {

                Core.rectangle(image, new Point(rect.x, rect.y),
                        new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 0, 255));
            }/*from   w  ww .j ava 2s.c  om*/
        }
    }
    Highgui.imwrite("falciparum2.jpg", image);

}