Example usage for org.opencv.core MatOfDouble release

List of usage examples for org.opencv.core MatOfDouble release

Introduction

In this page you can find the example usage for org.opencv.core MatOfDouble release.

Prototype

public void release() 

Source Link

Usage

From source file:arlocros.ArMarkerPoseEstimator.java

License:Apache License

private void start(final ConnectedNode connectedNode) {
    // load OpenCV shared library
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    // read configuration variables from the ROS Runtime (configured in the
    // launch file)
    log = connectedNode.getLog();/*  w  w  w . ja v  a2  s .  co m*/

    // Read Marker Config
    markerConfig = MarkerConfig.createFromConfig(parameter.markerConfigFile(), parameter.patternDirectory());

    camp = getCameraInfo(connectedNode, parameter);

    // start to listen to transform messages in /tf in order to feed the
    // Transformer and lookup transforms
    final TransformationService transformationService = TransformationService.create(connectedNode);

    // Subscribe to Image
    Subscriber<sensor_msgs.Image> subscriberToImage = connectedNode.newSubscriber(parameter.cameraImageTopic(),
            sensor_msgs.Image._TYPE);

    ComputePose computePose = null;
    try {
        final Mat cameraMatrix = CameraParams.getCameraMatrix(camp);
        final MatOfDouble distCoeffs = CameraParams.getDistCoeffs(camp);
        computePose = ComputePose.create(markerConfig, new Size(camp.width(), camp.height()), cameraMatrix,
                distCoeffs, this.parameter.visualization());
    } catch (NyARException e) {
        logger.info("Cannot initialize ComputePose", e);
    } catch (FileNotFoundException e) {
        logger.info("Cannot find file when initialize ComputePose", e);
    }
    final ComputePose poseProcessor = computePose;
    final Publisher<tf2_msgs.TFMessage> tfPublisherCamToMarker = connectedNode.newPublisher("tf",
            tf2_msgs.TFMessage._TYPE);
    final Publisher<tf2_msgs.TFMessage> tfPublisherMapToOdom = connectedNode.newPublisher("tf",
            tf2_msgs.TFMessage._TYPE);

    logger.info("My instance id is " + parameter.instanceId());
    if (heartbeatMonitor != null) {
        logger.info("Start waiting for arlocros id: " + (parameter.instanceId() - 1));
        while (true) {
            final Time currentTime = connectedNode.getCurrentTime();
            final Time lastHeartbeatTime = heartbeatMonitor.getLastTimeReceivedMessage();
            if (lastHeartbeatTime != null) {
                final Duration duration = currentTime.subtract(lastHeartbeatTime);
                if (duration.totalNsecs() > 3.0E8) {
                    logger.info("Not received any heartbeat for 300ms. Start running.");
                    break;
                }
            }
        }
    }

    subscriberToImage.addMessageListener(new MessageListener<sensor_msgs.Image>() {

        @Override
        public void onNewMessage(sensor_msgs.Image message) {
            //
            if (!message.getEncoding().toLowerCase().equals("rgb8")) {
                log.error("Sorry, " + message.getEncoding() + " Image encoding is not supported! EXITING");
                System.exit(-1);
            }
            if (camp != null) {
                try {
                    //
                    final Mat image = Utils.matFromImage(message);
                    // uncomment to add more contrast to the image
                    final Mat thresholdedImage = Utils.tresholdContrastBlackWhite(image,
                            parameter.filterBlockSize(), parameter.subtractedConstant(),
                            parameter.invertBlackWhiteColor());
                    image.release();
                    // Mat cannyimg = new Mat(image.height(), image.width(),
                    // CvType.CV_8UC3);
                    // Imgproc.Canny(image, cannyimg, 10, 100);
                    // Imshow.show(cannyimg);

                    // image.convertTo(image, -1, 1.5, 0);
                    // setup camera matrix and return vectors
                    // compute pose
                    final Mat rvec = new Mat(3, 1, CvType.CV_64F);
                    final MatOfDouble tvec = new MatOfDouble(1.0, 1.0, 1.0);
                    final boolean hasPose = poseProcessor.computePose(rvec, tvec, thresholdedImage);

                    if (!hasPose) {
                        return;
                    }

                    thresholdedImage.release();

                    // publish pose
                    final QuaternionHelper q = new QuaternionHelper();

                    // convert rotation vector result of solvepnp to rotation matrix
                    Mat R = new Mat(3, 3, CvType.CV_32FC1);
                    Calib3d.Rodrigues(rvec, R);
                    // see publishers before for documentation
                    final Mat tvec_map_cam = new MatOfDouble(1.0, 1.0, 1.0);
                    R = R.t();
                    final double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
                    final double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
                    final double attitudeZ = Math.asin(R.get(1, 0)[0]);
                    q.setFromEuler(bankX, headingY, attitudeZ);
                    Core.multiply(R, new Scalar(-1), R);
                    Core.gemm(R, tvec, 1, new Mat(), 0, tvec_map_cam, 0);
                    R.release();
                    final org.ros.rosjava_geometry.Quaternion rotation = new org.ros.rosjava_geometry.Quaternion(
                            q.getX(), q.getY(), q.getZ(), q.getW());
                    final double x = tvec_map_cam.get(0, 0)[0];
                    final double y = tvec_map_cam.get(1, 0)[0];
                    final double z = tvec_map_cam.get(2, 0)[0];
                    tvec_map_cam.release();

                    final org.ros.rosjava_geometry.Vector3 translation = new org.ros.rosjava_geometry.Vector3(x,
                            y, z);
                    final org.ros.rosjava_geometry.Transform transform_map_cam = new org.ros.rosjava_geometry.Transform(
                            translation, rotation);

                    // odom to camera_rgb_optical_frame
                    final GraphName sourceFrame = GraphName.of(parameter.cameraFrameName());
                    final GraphName targetFrame = GraphName.of("base_link");
                    org.ros.rosjava_geometry.Transform transform_cam_base = null;

                    if (transformationService.canTransform(targetFrame, sourceFrame)) {
                        try {
                            transform_cam_base = transformationService.lookupTransform(targetFrame,
                                    sourceFrame);
                        } catch (Exception e) {
                            log.error(ExceptionUtils.getStackTrace(e));
                            log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                                    + "base_link! " + "However, will continue..");
                            // cancel this loop..no result can be computed
                            return;
                        }
                    } else {
                        log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                                + "base_link!" + " However, " + "will continue..");
                        // cancel this loop..no result can be computed
                        return;
                    }

                    // multiply results
                    org.ros.rosjava_geometry.Transform current_pose = org.ros.rosjava_geometry.Transform
                            .identity();
                    current_pose = current_pose.multiply(transform_map_cam);
                    current_pose = current_pose.multiply(transform_cam_base);

                    if (current_pose.getTranslation().getZ() < 0.5) {
                        return;
                    }

                    // check for plausibility of the pose by checking if movement
                    // exceeds max speed (defined) of the robot
                    if (parameter.badPoseReject()) {
                        Time current_timestamp = connectedNode.getCurrentTime();
                        // TODO Unfortunately, we do not have the tf timestamp at
                        // hand here. So we can only use the current timestamp.
                        double maxspeed = 5;
                        boolean goodpose = false;
                        // if (current_pose != null && current_timestamp != null) {
                        if ((last_pose != null && last_timestamp != null)
                                && !Double.isNaN(last_pose.getTranslation().getX())) {
                            // check speed of movement between last and current pose
                            double distance = PoseCompare.distance(current_pose, last_pose);
                            double timedelta = PoseCompare.timedelta(current_timestamp, last_timestamp);
                            if ((distance / timedelta) < maxspeed) {
                                if (smoothing) {
                                    double xold = last_pose.getTranslation().getX();
                                    double yold = last_pose.getTranslation().getY();
                                    double zold = last_pose.getTranslation().getZ();
                                    double xnew = current_pose.getTranslation().getX();
                                    double ynew = current_pose.getTranslation().getY();
                                    double znew = current_pose.getTranslation().getZ();
                                    final org.ros.rosjava_geometry.Vector3 smoothTranslation = new org.ros.rosjava_geometry.Vector3(
                                            (xold * 2 + xnew) / 3, (yold * 2 + ynew) / 3,
                                            (zold * 2 + znew) / 3);
                                    current_pose = new org.ros.rosjava_geometry.Transform(smoothTranslation,
                                            current_pose.getRotationAndScale());
                                    last_pose = current_pose;
                                }
                                last_pose = current_pose;
                                last_timestamp = current_timestamp;
                                goodpose = true;
                            } else {
                                log.info("distance " + distance + " time: " + timedelta + " --> Pose rejected");
                                log.info("current pose: " + current_pose.getTranslation().getX() + " "
                                        + current_pose.getTranslation().getY() + " "
                                        + current_pose.getTranslation().getZ());
                                log.info("last pose: " + last_pose.getTranslation().getX() + " "
                                        + last_pose.getTranslation().getY() + " "
                                        + last_pose.getTranslation().getZ());
                            }

                        } else {
                            last_pose = current_pose;
                            last_timestamp = current_timestamp;
                        }
                        // }
                        // bad pose rejection
                        if (!goodpose) {
                            return;
                        }
                    }

                    // set information to message
                    final geometry_msgs.PoseStamped posestamped = posePublisher.newMessage();
                    Pose pose = posestamped.getPose();
                    Quaternion orientation = pose.getOrientation();
                    Point point = pose.getPosition();

                    point.setX(current_pose.getTranslation().getX());

                    point.setY(current_pose.getTranslation().getY());

                    point.setZ(current_pose.getTranslation().getZ());

                    orientation.setW(current_pose.getRotationAndScale().getW());
                    orientation.setX(current_pose.getRotationAndScale().getX());
                    orientation.setY(current_pose.getRotationAndScale().getY());
                    orientation.setZ(current_pose.getRotationAndScale().getZ());

                    // frame_id too
                    posestamped.getHeader().setFrameId("map");
                    posestamped.getHeader().setStamp(connectedNode.getCurrentTime());
                    posePublisher.publish(posestamped);
                    mostRecentPose.set(posestamped);

                    //                publishCamFrameToMarkerFrame(rvec, tvec, tfPublisherCamToMarker, connectedNode);
                    //                publishMapToOdom(
                    //                    rvec, tvec, transformationService, tfPublisherMapToOdom, connectedNode);
                    rvec.release();
                    tvec.release();

                } catch (Exception e) {
                    logger.info("An exception occurs.", e);
                }
            }
        }
    });
}

From source file:qupath.opencv.tools.WandToolCV.java

License:Open Source License

@Override
protected Shape createShape(double x, double y, boolean useTiles) {

    if (mat == null)
        mat = new Mat(w, w, CvType.CV_8UC3);
    if (matMask == null)
        matMask = new Mat(w + 2, w + 2, CvType.CV_8U);

    if (pLast != null && pLast.distanceSq(x, y) < 4)
        return new Path2D.Float();

    long startTime = System.currentTimeMillis();

    QuPathViewer viewer = getViewer();//from w w w  .j a va  2  s  .co m
    if (viewer == null)
        return new Path2D.Float();

    double downsample = viewer.getDownsampleFactor();

    DefaultImageRegionStore regionStore = viewer.getImageRegionStore();

    // Paint the image as it is currently being viewed
    Graphics2D g2d = imgTemp.createGraphics();
    g2d.setColor(Color.BLACK);
    g2d.fillRect(0, 0, w, w);
    bounds.setFrame(x - w * downsample * .5, y - w * downsample * .5, w * downsample, w * downsample);
    g2d.scale(1.0 / downsample, 1.0 / downsample);
    g2d.translate(-bounds.getX(), -bounds.getY());
    regionStore.paintRegionCompletely(viewer.getServer(), g2d, bounds, viewer.getZPosition(),
            viewer.getTPosition(), viewer.getDownsampleFactor(), null, viewer.getImageDisplay(), 250);
    g2d.dispose();

    // Put pixels into an OpenCV image
    byte[] buffer = ((DataBufferByte) imgTemp.getRaster().getDataBuffer()).getData();
    mat.put(0, 0, buffer);

    //      Imgproc.cvtColor(mat, mat, Imgproc.COLOR_BGR2Lab);
    //      blurSigma = 4;

    double blurSigma = Math.max(0.5, getWandSigmaPixels());
    double size = Math.ceil(blurSigma * 2) * 2 + 1;
    blurSize.width = size;
    blurSize.height = size;

    // Smooth a little
    Imgproc.GaussianBlur(mat, mat, blurSize, blurSigma);

    //      Imgproc.cvtColor(mat, mat, Imgproc.COLOR_RGB2Lab);

    MatOfDouble mean = new MatOfDouble();
    MatOfDouble stddev = new MatOfDouble();
    Core.meanStdDev(mat, mean, stddev);
    //      logger.trace(stddev.dump());

    double[] stddev2 = stddev.toArray();
    double scale = .4;
    for (int i = 0; i < stddev2.length; i++)
        stddev2[i] = stddev2[i] * scale;
    threshold.set(stddev2);

    mean.release();
    stddev.release();

    matMask.setTo(zero);
    Imgproc.circle(matMask, seed, w / 2, one);
    Imgproc.floodFill(mat, matMask, seed, one, null, threshold, threshold,
            4 | (2 << 8) | Imgproc.FLOODFILL_MASK_ONLY | Imgproc.FLOODFILL_FIXED_RANGE);
    Core.subtract(matMask, one, matMask);

    if (strel == null)
        strel = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5));
    Imgproc.morphologyEx(matMask, matMask, Imgproc.MORPH_CLOSE, strel);
    ////      Imgproc.morphologyEx(matMask, matMask, Imgproc.MORPH_OPEN, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, size));
    //      
    ////      threshold = new Scalar(10, 10, 10);
    //      double[] stddev2 = stddev.toArray();
    //      double scale = .5;
    //      threshold = new Scalar(stddev2[0]*scale, stddev2[1]*scale, stddev2[2]*scale);

    contours.clear();
    if (contourHierarchy == null)
        contourHierarchy = new Mat();
    Imgproc.findContours(matMask, contours, contourHierarchy, Imgproc.RETR_EXTERNAL,
            Imgproc.CHAIN_APPROX_SIMPLE);
    //      logger.trace("Contours: " + contours.size());

    Path2D path = new Path2D.Float();
    boolean isOpen = false;
    for (MatOfPoint contour : contours) {

        // Discard single pixels / lines
        if (contour.size().height <= 2)
            continue;

        // Create a polygon ROI
        boolean firstPoint = true;
        for (Point p : contour.toArray()) {
            double xx = (p.x - w / 2 - 1) * downsample + x;
            double yy = (p.y - w / 2 - 1) * downsample + y;
            if (firstPoint) {
                path.moveTo(xx, yy);
                firstPoint = false;
                isOpen = true;
            } else
                path.lineTo(xx, yy);
        }
    }
    if (isOpen)
        path.closePath();

    long endTime = System.currentTimeMillis();
    logger.trace(getClass().getSimpleName() + " time: " + (endTime - startTime));

    if (pLast == null)
        pLast = new Point2D.Double(x, y);
    else
        pLast.setLocation(x, y);

    return path;

}