Example usage for java.lang Math asin

List of usage examples for java.lang Math asin

Introduction

In this page you can find the example usage for java.lang Math asin.

Prototype

public static double asin(double a) 

Source Link

Document

Returns the arc sine of a value; the returned angle is in the range -pi/2 through pi/2.

Usage

From source file:org.wso2.carbon.iot.android.sense.event.streams.location.LocationDataReader.java

public double CalculationByDistance(double lat1, double lon1, double lat2, double lon2) {
    double Radius = EARTH_RADIUS;
    double dLat = Math.toRadians(lat2 - lat1);
    double dLon = Math.toRadians(lon2 - lon1);
    double a = Math.sin(dLat / 2) * Math.sin(dLat / 2) + Math.cos(Math.toRadians(lat1))
            * Math.cos(Math.toRadians(lat2)) * Math.sin(dLon / 2) * Math.sin(dLon / 2);
    double c = 2 * Math.asin(Math.sqrt(a));
    return Radius * c;
}

From source file:com.github.rosjava_catkin_package_a.ARLocROS.ARLoc.java

@Override
public void onStart(final ConnectedNode connectedNode) {
    // load OpenCV shared library
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    // read configuration variables from the ROS Runtime (configured in the
    // launch file)
    log = connectedNode.getLog();/*from ww w  .  j ava  2 s.c om*/
    log.info("Reading parameters");
    this.parameter = Parameter.createFromParameterTree(connectedNode.getParameterTree());

    // Read Marker Config
    markerConfig = MarkerConfig.createFromConfig(parameter.markerConfigFile(), parameter.patternDirectory());

    // setup rotation vector and translation vector storing output of the
    // localization
    rvec = new Mat(3, 1, CvType.CV_64F);
    tvec = new MatOfDouble(1.0, 1.0, 1.0);

    camp = getCameraInfo(connectedNode, parameter);

    // start to listen to transform messages in /tf in order to feed the
    // Transformer and lookup transforms
    final TransformationService transformationService = TransformationService.create(connectedNode);

    // Subscribe to Image
    Subscriber<sensor_msgs.Image> subscriberToImage = connectedNode.newSubscriber(parameter.cameraImageTopic(),
            sensor_msgs.Image._TYPE);

    ComputePose computePose = null;
    try {
        final Mat cameraMatrix = CameraParams.getCameraMatrix(camp);
        final MatOfDouble distCoeffs = CameraParams.getDistCoeffs(camp);
        computePose = ComputePose.create(markerConfig, new Size(camp.width(), camp.height()), cameraMatrix,
                distCoeffs, this.parameter.visualization());
    } catch (NyARException e) {
        logger.info("Cannot initialize ComputePose", e);
    } catch (FileNotFoundException e) {
        logger.info("Cannot find file when initialize ComputePose", e);
    }
    final ComputePose poseProcessor = computePose;
    subscriberToImage.addMessageListener(new MessageListener<sensor_msgs.Image>() {

        @Override
        public void onNewMessage(sensor_msgs.Image message) {
            //
            if (!message.getEncoding().toLowerCase().equals("rgb8")) {
                log.error("Sorry, " + message.getEncoding() + " Image encoding is not supported! EXITING");
                System.exit(-1);
            }
            if (camp != null) {
                try {
                    //
                    image = Utils.matFromImage(message);
                    // uncomment to add more contrast to the image
                    //Utils.tresholdContrastBlackWhite(image, 600);
                    Imgproc.threshold(image, image, 200, 255, Imgproc.THRESH_BINARY);
                    // Mat cannyimg = new Mat(image.height(), image.width(),
                    // CvType.CV_8UC3);
                    // Imgproc.Canny(image, cannyimg, 10, 100);
                    // Imshow.show(cannyimg);

                    // image.convertTo(image, -1, 1.5, 0);
                    // setup camera matrix and return vectors
                    // compute pose
                    if (poseProcessor.computePose(rvec, tvec, image)) {
                        // notify publisher threads (pose and tf, see below)
                        synchronized (tvec) {
                            tvec.notifyAll();
                        }
                    }

                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }
    });

    // publish tf CAMERA_FRAME_NAME --> MARKER_FRAME_NAME
    final Publisher<tf2_msgs.TFMessage> tfPublisherCamToMarker = connectedNode.newPublisher("tf",
            tf2_msgs.TFMessage._TYPE);
    connectedNode.executeCancellableLoop(new CancellableLoop() {

        @Override
        protected void loop() throws InterruptedException {

            synchronized (tvec) {
                tvec.wait();
            }

            QuaternionHelper q = new QuaternionHelper();

            /*
             * http://euclideanspace.com/maths/geometry/rotations/
             * conversions/matrixToEuler/index.htm
             * http://stackoverflow.com/questions/12933284/rodrigues-into-
             * eulerangles-and-vice-versa
             * 
             * heading = atan2(-m20,m00) attitude = asin(m10) bank =
             * atan2(-m12,m11)
             */
            // convert output rotation vector rvec to rotation matrix R
            Mat R = new Mat(3, 3, CvType.CV_32FC1);
            Calib3d.Rodrigues(rvec, R);
            // get rotations around X,Y,Z from rotation matrix R
            double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
            double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
            double attitudeZ = Math.asin(R.get(1, 0)[0]);
            // convert Euler angles to quarternion
            q.setFromEuler(bankX, headingY, attitudeZ);

            // set information to message
            TFMessage tfmessage = tfPublisherCamToMarker.newMessage();
            TransformStamped posestamped = connectedNode.getTopicMessageFactory()
                    .newFromType(geometry_msgs.TransformStamped._TYPE);
            Transform transform = posestamped.getTransform();

            Quaternion orientation = transform.getRotation();
            Vector3 point = transform.getTranslation();
            point.setX(tvec.get(0, 0)[0]);
            point.setY(tvec.get(1, 0)[0]);
            point.setZ(tvec.get(2, 0)[0]);

            orientation.setW(q.getW());
            orientation.setX(q.getX());
            orientation.setY(q.getY());
            orientation.setZ(q.getZ());
            posestamped.getHeader().setFrameId(parameter.cameraFrameName());
            posestamped.setChildFrameId(parameter.markerFrameName());
            posestamped.getHeader().setStamp(connectedNode.getCurrentTime());
            // frame_id too
            tfmessage.getTransforms().add(posestamped);
            tfPublisherCamToMarker.publish(tfmessage);
        }
    });

    // publish Markers
    final Publisher<visualization_msgs.Marker> markerPublisher = connectedNode.newPublisher("markers",
            visualization_msgs.Marker._TYPE);
    connectedNode.executeCancellableLoop(new CancellableLoop() {

        @Override
        protected void loop() throws InterruptedException {
            // publish markers every 500ms
            Thread.sleep(500);
            // get marker points from markerConfig, each marker has 4
            // vertices
            List<Point3> points3dlist = markerConfig.getUnordered3DPointList();
            int i = 0;
            for (Point3 p : points3dlist) {
                Marker markermessage = markerPublisher.newMessage();
                // FIXME If the markers are published into an existing frame
                // (e.g. map or odom) the node will consume very high CPU
                // and will fail after a short time. The markers are
                // probably published in the wrong way.
                markermessage.getHeader().setFrameId(parameter.markerFrameName());
                markermessage.setId(i);
                i++;
                markermessage.setType(visualization_msgs.Marker.SPHERE);
                markermessage.setAction(visualization_msgs.Marker.ADD);
                // position
                double x = p.x;
                markermessage.getPose().getPosition().setX(x);
                double y = p.y;
                markermessage.getPose().getPosition().setY(y);
                double z = p.z;
                markermessage.getPose().getPosition().setZ(z);
                // orientation
                markermessage.getPose().getOrientation().setX(0);
                markermessage.getPose().getOrientation().setY(0);
                markermessage.getPose().getOrientation().setZ(0);
                markermessage.getPose().getOrientation().setW(1);
                // patterntSize
                markermessage.getScale().setX(0.1);
                markermessage.getScale().setY(0.1);
                markermessage.getScale().setZ(0.1);
                // color
                markermessage.getColor().setA(1);
                markermessage.getColor().setR(1);
                markermessage.getColor().setG(0);
                markermessage.getColor().setB(0);

                markerPublisher.publish(markermessage);
            }
        }
    });

    // publish tf map --> odom
    final Publisher<tf2_msgs.TFMessage> tfPublisherMapToOdom = connectedNode.newPublisher("tf",
            tf2_msgs.TFMessage._TYPE);
    connectedNode.executeCancellableLoop(new CancellableLoop() {

        @Override
        protected void loop() throws InterruptedException {

            // since this is an infinite loop, wait to be notified if new
            // image was processed
            synchronized (tvec) {
                tvec.wait();
            }

            // compute transform map to odom from map to
            // camera_rgb_optical_frame and odom to camera_rgb_optical_frame

            // map to camera_rgb_optical_frame
            Mat tvec_map_cam = new MatOfDouble(1.0, 1.0, 1.0);
            QuaternionHelper q = new QuaternionHelper();
            // get rotation matrix R from solvepnp output rotation vector
            // rvec
            Mat R = new Mat(3, 3, CvType.CV_32FC1);
            Calib3d.Rodrigues(rvec, R);
            // transpose R, because we need the transformation from
            // world(map) to camera
            R = R.t();
            // get rotation around X,Y,Z from R in radiants
            double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
            double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
            double attitudeZ = Math.asin(R.get(1, 0)[0]);
            q.setFromEuler(bankX, headingY, attitudeZ);
            // compute translation vector from world (map) to cam
            // tvec_map_cam
            Core.multiply(R, new Scalar(-1), R); // R=-R
            Core.gemm(R, tvec, 1, new Mat(), 0, tvec_map_cam, 0); // tvec_map_cam=R*tvec

            org.ros.rosjava_geometry.Quaternion rotation = new org.ros.rosjava_geometry.Quaternion(q.getX(),
                    q.getY(), q.getZ(), q.getW());
            double x = tvec_map_cam.get(0, 0)[0];
            double y = tvec_map_cam.get(1, 0)[0];
            double z = tvec_map_cam.get(2, 0)[0];
            // create a Transform Object that hold the transform map to cam
            org.ros.rosjava_geometry.Vector3 translation = new org.ros.rosjava_geometry.Vector3(x, y, z);
            org.ros.rosjava_geometry.Transform transform_map_cam = new org.ros.rosjava_geometry.Transform(
                    translation, rotation);

            // odom to camera_rgb_optical_frame
            GraphName sourceFrame = GraphName.of(parameter.cameraFrameName());
            GraphName targetFrame = GraphName.of("odom");
            org.ros.rosjava_geometry.Transform transform_cam_odom = null;
            if (transformationService.canTransform(targetFrame, sourceFrame)) {
                try {
                    transform_cam_odom = transformationService.lookupTransform(targetFrame, sourceFrame);
                } catch (Exception e) {
                    e.printStackTrace();
                    log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                            + "odom! " + "However, " + "will continue..");
                    return;
                }
            } else {
                log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to " + "odom! "
                        + "However, will " + "continue..");
                // cancel this loop..no result can be computed
                return;
            }
            // multiply results
            org.ros.rosjava_geometry.Transform result = org.ros.rosjava_geometry.Transform.identity();
            result = result.multiply(transform_map_cam);
            result = result.multiply(transform_cam_odom);

            // set information to ROS message
            TFMessage tfMessage = tfPublisherMapToOdom.newMessage();
            TransformStamped transformStamped = connectedNode.getTopicMessageFactory()
                    .newFromType(geometry_msgs.TransformStamped._TYPE);
            Transform transform = transformStamped.getTransform();

            Quaternion orientation = transform.getRotation();
            Vector3 vector = transform.getTranslation();
            vector.setX(result.getTranslation().getX());
            vector.setY(result.getTranslation().getY());
            vector.setZ(result.getTranslation().getZ());

            orientation.setW(result.getRotationAndScale().getW());
            orientation.setX(result.getRotationAndScale().getX());
            orientation.setY(result.getRotationAndScale().getY());
            orientation.setZ(result.getRotationAndScale().getZ());
            transformStamped.getHeader().setFrameId("map");
            transformStamped.setChildFrameId("odom");
            transformStamped.getHeader().setStamp(connectedNode.getCurrentTime());
            // frame_id too
            tfMessage.getTransforms().add(transformStamped);
            tfPublisherMapToOdom.publish(tfMessage);
            // System.exit(0);
        }
    });

    // Publish Pose

    final Publisher<geometry_msgs.PoseStamped> posePublisher = connectedNode
            .newPublisher(parameter.poseTopicName(), geometry_msgs.PoseStamped._TYPE);

    connectedNode.executeCancellableLoop(new CancellableLoop() {

        @Override
        protected void loop() throws InterruptedException {

            // since this is an infinite loop, wait here to be notified if
            // new image was processed
            synchronized (tvec) {
                tvec.wait();
            }
            final QuaternionHelper q = new QuaternionHelper();

            // convert rotation vector result of solvepnp to rotation matrix
            Mat R = new Mat(3, 3, CvType.CV_32FC1);
            Calib3d.Rodrigues(rvec, R);
            // see publishers before for documentation
            final Mat tvec_map_cam = new MatOfDouble(1.0, 1.0, 1.0);
            R = R.t();
            final double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
            final double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
            final double attitudeZ = Math.asin(R.get(1, 0)[0]);
            q.setFromEuler(bankX, headingY, attitudeZ);
            Core.multiply(R, new Scalar(-1), R);
            Core.gemm(R, tvec, 1, new Mat(), 0, tvec_map_cam, 0);
            final org.ros.rosjava_geometry.Quaternion rotation = new org.ros.rosjava_geometry.Quaternion(
                    q.getX(), q.getY(), q.getZ(), q.getW());
            final double x = tvec_map_cam.get(0, 0)[0];
            final double y = tvec_map_cam.get(1, 0)[0];
            final double z = tvec_map_cam.get(2, 0)[0];

            final org.ros.rosjava_geometry.Vector3 translation = new org.ros.rosjava_geometry.Vector3(x, y, z);
            final org.ros.rosjava_geometry.Transform transform_map_cam = new org.ros.rosjava_geometry.Transform(
                    translation, rotation);

            // odom to camera_rgb_optical_frame
            final GraphName sourceFrame = GraphName.of(parameter.cameraFrameName());
            final GraphName targetFrame = GraphName.of("base_link");
            org.ros.rosjava_geometry.Transform transform_cam_base = null;

            if (transformationService.canTransform(targetFrame, sourceFrame)) {
                try {
                    transform_cam_base = transformationService.lookupTransform(targetFrame, sourceFrame);
                } catch (Exception e) {
                    e.printStackTrace();
                    log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                            + "base_link! " + "However, will continue..");
                    // cancel this loop..no result can be computed
                    return;
                }
            } else {
                log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                        + "base_link!" + " However, " + "will continue..");
                // cancel this loop..no result can be computed
                return;
            }

            // multiply results
            org.ros.rosjava_geometry.Transform current_pose = org.ros.rosjava_geometry.Transform.identity();
            current_pose = current_pose.multiply(transform_map_cam);
            current_pose = current_pose.multiply(transform_cam_base);

            // check for plausibility of the pose by checking if movement
            // exceeds max speed (defined) of the robot
            if (parameter.badPoseReject()) {
                Time current_timestamp = connectedNode.getCurrentTime();
                // TODO Unfortunately, we do not have the tf timestamp at
                // hand here. So we can only use the current timestamp.
                double maxspeed = 5;
                boolean goodpose = false;
                // if (current_pose != null && current_timestamp != null) {
                if (last_pose != null && last_timestamp != null) {
                    // check speed of movement between last and current pose
                    double distance = PoseCompare.distance(current_pose, last_pose);
                    double timedelta = PoseCompare.timedelta(current_timestamp, last_timestamp);
                    if ((distance / timedelta) < maxspeed) {
                        if (smoothing) {
                            double xold = last_pose.getTranslation().getX();
                            double yold = last_pose.getTranslation().getY();
                            double zold = last_pose.getTranslation().getZ();
                            double xnew = current_pose.getTranslation().getX();
                            double ynew = current_pose.getTranslation().getY();
                            double znew = current_pose.getTranslation().getZ();
                            final org.ros.rosjava_geometry.Vector3 smoothTranslation = new org.ros.rosjava_geometry.Vector3(
                                    (xold * 2 + xnew) / 3, (yold * 2 + ynew) / 3, (zold * 2 + znew) / 3);
                            current_pose = new org.ros.rosjava_geometry.Transform(smoothTranslation,
                                    current_pose.getRotationAndScale());
                            last_pose = current_pose;
                        }
                        last_pose = current_pose;
                        last_timestamp = current_timestamp;
                        goodpose = true;
                    } else {
                        log.info("distance " + distance + " time: " + timedelta + " --> Pose rejected");
                    }

                } else {
                    last_pose = current_pose;
                    last_timestamp = current_timestamp;
                }
                // }
                // bad pose rejection
                if (!goodpose) {
                    return;
                }
            }

            // set information to message
            geometry_msgs.PoseStamped posestamped = posePublisher.newMessage();
            Pose pose = posestamped.getPose();
            Quaternion orientation = pose.getOrientation();
            Point point = pose.getPosition();

            point.setX(current_pose.getTranslation().getX());

            point.setY(current_pose.getTranslation().getY());

            point.setZ(current_pose.getTranslation().getZ());

            orientation.setW(current_pose.getRotationAndScale().getW());
            orientation.setX(current_pose.getRotationAndScale().getX());
            orientation.setY(current_pose.getRotationAndScale().getY());
            orientation.setZ(current_pose.getRotationAndScale().getZ());

            // frame_id too
            posestamped.getHeader().setFrameId("map");
            posestamped.getHeader().setStamp(connectedNode.getCurrentTime());
            posePublisher.publish(posestamped);

        }
    });

}

From source file:org.esa.nest.eo.GeoUtils.java

/**
 * Convert Cartesian to Polar coordinates.
 * <p>/*  w  w  w .  j  a va2  s .c  o m*/
 * <b>Definitions:<b/>
 *  <p>Latitude: angle from XY-plane towards +Z-axis.<p/>
 *  <p>Longitude: angle in XY-plane measured from +X-axis towards +Y-axis.<p/>
 * </p>
 * <p>
 *  Implementation Details: Unlike for rest of utility methods GeoPos class container is not used for storing polar
 *  coordinates. GeoPos fields are declared as floats and can introduced numerical errors, especially in radius/height.
 * </p>
 * <p>
 *  Note: Apache's FastMath used in implementation.
 * </p>
 * @param xyz Array of x, y, and z coordinates.
 * @param phiLamHeight Array of latitude (in radians), longitude (in radians), and radius (in meters).
 *
 * @author Petar Marikovic, PPO.labs
 */
public static void cartesian2polar(final double[] xyz, final double[] phiLamHeight) {

    phiLamHeight[2] = Math.sqrt(xyz[0] * xyz[0] + xyz[1] * xyz[1] + xyz[2] * xyz[2]);
    phiLamHeight[1] = Math.atan2(xyz[1], xyz[0]);
    phiLamHeight[0] = Math.asin(xyz[2] / phiLamHeight[2]);

}

From source file:com.qasp.diego.arsp.indice_localFrag.java

public double FormuladHaversine(double lat1, double long1, double lat2, double long2) {

    final double RAIO_DA_TERRA = 6371; // em Km
    // Converso/*from w  ww . j a v a2 s . com*/
    double radlat1 = Math.toRadians(lat1);
    double radlong1 = Math.toRadians(long1);
    double radlat2 = Math.toRadians(lat2);
    double radlong2 = Math.toRadians(long2);
    //Parametros TO DO: BigDecimal?
    double radlat = radlat2 - radlat1;
    double radlong = radlong2 - radlong1;
    // Formula
    double d = Math.pow(Math.sin(radlat * 0.5), 2)
            + Math.cos(radlat1) * Math.cos(radlat2) * Math.pow(Math.sin(radlong * 0.5), 2);
    return 2 * RAIO_DA_TERRA * Math.asin(Math.sqrt(d));
}

From source file:uk.ac.diamond.scisoft.analysis.diffraction.powder.PixelIntegrationUtils.java

public static Dataset[] generateMinMaxRadialArray(int[] shape, QSpace qSpace, XAxis xAxis) {

    if (qSpace == null)
        return null;

    double[] beamCentre = qSpace.getDetectorProperties().getBeamCentreCoords();

    Dataset radialArrayMax = DatasetFactory.zeros(shape, Dataset.FLOAT64);
    Dataset radialArrayMin = DatasetFactory.zeros(shape, Dataset.FLOAT64);

    PositionIterator iter = radialArrayMax.getPositionIterator();
    int[] pos = iter.getPos();

    double[] vals = new double[4];
    double w = qSpace.getWavelength();
    while (iter.hasNext()) {

        //FIXME or not fix me, but I would expect centre to be +0.5, but this
        //clashes with much of the rest of DAWN

        if (xAxis != XAxis.PIXEL) {
            vals[0] = qSpace.qFromPixelPosition(pos[1], pos[0]).length();
            vals[1] = qSpace.qFromPixelPosition(pos[1] + 1, pos[0]).length();
            vals[2] = qSpace.qFromPixelPosition(pos[1], pos[0] + 1).length();
            vals[3] = qSpace.qFromPixelPosition(pos[1] + 1, pos[0] + 1).length();
        } else {//  w w  w. java  2s .  c  o  m
            vals[0] = Math.hypot(pos[1] - beamCentre[0], pos[0] - beamCentre[1]);
            vals[1] = Math.hypot(pos[1] + 1 - beamCentre[0], pos[0] - beamCentre[1]);
            vals[2] = Math.hypot(pos[1] - beamCentre[0], pos[0] + 1 - beamCentre[1]);
            vals[3] = Math.hypot(pos[1] + 1 - beamCentre[0], pos[0] + 1 - beamCentre[1]);
        }

        Arrays.sort(vals);

        switch (xAxis) {
        case ANGLE:
            radialArrayMax.set(Math.toDegrees(Math.asin(vals[3] * w / (4 * Math.PI)) * 2), pos);
            radialArrayMin.set(Math.toDegrees(Math.asin(vals[0] * w / (4 * Math.PI)) * 2), pos);
            break;
        case Q:
        case PIXEL:
            radialArrayMax.set(vals[3], pos);
            radialArrayMin.set(vals[0], pos);
            break;
        case RESOLUTION:
            radialArrayMax.set((2 * Math.PI) / vals[0], pos);
            radialArrayMin.set((2 * Math.PI) / vals[3], pos);
            break;
        }
    }
    return new Dataset[] { radialArrayMin, radialArrayMax };
}

From source file:com.georgeme.Act_Circle.java

public double CalculationByDistance(LatLng StartP, LatLng EndP) {
    int Radius = 6371;// radius of earth in Km
    double lat1 = StartP.latitude;
    double lat2 = EndP.latitude;
    double lon1 = StartP.longitude;
    double lon2 = EndP.longitude;
    double dLat = Math.toRadians(lat2 - lat1);
    double dLon = Math.toRadians(lon2 - lon1);
    double a = Math.sin(dLat / 2) * Math.sin(dLat / 2) + Math.cos(Math.toRadians(lat1))
            * Math.cos(Math.toRadians(lat2)) * Math.sin(dLon / 2) * Math.sin(dLon / 2);
    double c = 2 * Math.asin(Math.sqrt(a));
    return Radius * c * 1000;
}

From source file:Quaternion.java

public static final Vector3 anguarVelocity(Quaternion q0, Quaternion q1) {
    Quaternion q0inv = new Quaternion(q0.s, q0.v.multiply(-1)).multiply(1 / q0.dot(q0));
    Quaternion r = q0inv.multiply(q1);//  w  w w  .  ja va2 s .  c  o  m

    double sinomeganormhalf = r.v.norm();

    //zero angular velocity
    if (sinomeganormhalf < 1e-7) {
        return new Vector3();
    }

    Vector3 n = r.v.multiply(1 / sinomeganormhalf);

    double omegaNorm = Math.asin(sinomeganormhalf) * 2;

    return n.multiply(omegaNorm);
}

From source file:arlocros.ArMarkerPoseEstimator.java

private void start(final ConnectedNode connectedNode) {
    // load OpenCV shared library
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    // read configuration variables from the ROS Runtime (configured in the
    // launch file)
    log = connectedNode.getLog();//from w  w  w  .j  a va 2  s  .  c om

    // Read Marker Config
    markerConfig = MarkerConfig.createFromConfig(parameter.markerConfigFile(), parameter.patternDirectory());

    // setup rotation vector and translation vector storing output of the
    // localization
    rvec = new Mat(3, 1, CvType.CV_64F);
    tvec = new MatOfDouble(1.0, 1.0, 1.0);

    camp = getCameraInfo(connectedNode, parameter);

    // start to listen to transform messages in /tf in order to feed the
    // Transformer and lookup transforms
    final TransformationService transformationService = TransformationService.create(connectedNode);

    // Subscribe to Image
    Subscriber<sensor_msgs.Image> subscriberToImage = connectedNode.newSubscriber(parameter.cameraImageTopic(),
            sensor_msgs.Image._TYPE);

    ComputePose computePose = null;
    try {
        final Mat cameraMatrix = CameraParams.getCameraMatrix(camp);
        final MatOfDouble distCoeffs = CameraParams.getDistCoeffs(camp);
        computePose = ComputePose.create(markerConfig, new Size(camp.width(), camp.height()), cameraMatrix,
                distCoeffs, this.parameter.visualization());
    } catch (NyARException e) {
        logger.info("Cannot initialize ComputePose", e);
    } catch (FileNotFoundException e) {
        logger.info("Cannot find file when initialize ComputePose", e);
    }
    final ComputePose poseProcessor = computePose;
    subscriberToImage.addMessageListener(new MessageListener<sensor_msgs.Image>() {

        @Override
        public void onNewMessage(sensor_msgs.Image message) {
            //
            if (!message.getEncoding().toLowerCase().equals("rgb8")) {
                log.error("Sorry, " + message.getEncoding() + " Image encoding is not supported! EXITING");
                System.exit(-1);
            }
            if (camp != null) {
                try {
                    //
                    image = Utils.matFromImage(message);
                    // uncomment to add more contrast to the image
                    if (parameter.blackWhiteContrastLevel() > 0) {
                        log.trace("using BlackWhiteContrastLevel");
                        Utils.tresholdContrastBlackWhite(image, parameter.blackWhiteContrastLevel(),
                                parameter.invertBlackWhiteColor());
                    }
                    if (parameter.useThreshold()) {
                        Imgproc.threshold(image, image, 200, 255, Imgproc.THRESH_BINARY);
                    }
                    // Mat cannyimg = new Mat(image.height(), image.width(),
                    // CvType.CV_8UC3);
                    // Imgproc.Canny(image, cannyimg, 10, 100);
                    // Imshow.show(cannyimg);

                    // image.convertTo(image, -1, 1.5, 0);
                    // setup camera matrix and return vectors
                    // compute pose
                    if (poseProcessor.computePose(rvec, tvec, image)) {
                        // notify publisher threads (pose and tf, see below)
                        synchronized (tvec) {
                            tvec.notifyAll();
                        }
                    }

                } catch (Exception e) {
                    logger.info("An exception occurs.", e);
                }
            }
        }
    });

    // publish tf CAMERA_FRAME_NAME --> MARKER_FRAME_NAME
    final Publisher<tf2_msgs.TFMessage> tfPublisherCamToMarker = connectedNode.newPublisher("tf",
            tf2_msgs.TFMessage._TYPE);
    connectedNode.executeCancellableLoop(new CancellableLoop() {

        @Override
        protected void loop() throws InterruptedException {

            synchronized (tvec) {
                tvec.wait();
            }

            QuaternionHelper q = new QuaternionHelper();

            /*
             * http://euclideanspace.com/maths/geometry/rotations/
             * conversions/matrixToEuler/index.htm
             * http://stackoverflow.com/questions/12933284/rodrigues-into-
             * eulerangles-and-vice-versa
             *
             * heading = atan2(-m20,m00) attitude = asin(m10) bank =
             * atan2(-m12,m11)
             */
            // convert output rotation vector rvec to rotation matrix R
            Mat R = new Mat(3, 3, CvType.CV_32FC1);
            Calib3d.Rodrigues(rvec, R);
            // get rotations around X,Y,Z from rotation matrix R
            double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
            double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
            double attitudeZ = Math.asin(R.get(1, 0)[0]);
            // convert Euler angles to quarternion
            q.setFromEuler(bankX, headingY, attitudeZ);

            // set information to message
            TFMessage tfmessage = tfPublisherCamToMarker.newMessage();
            TransformStamped posestamped = connectedNode.getTopicMessageFactory()
                    .newFromType(geometry_msgs.TransformStamped._TYPE);
            Transform transform = posestamped.getTransform();

            Quaternion orientation = transform.getRotation();
            Vector3 point = transform.getTranslation();
            point.setX(tvec.get(0, 0)[0]);
            point.setY(tvec.get(1, 0)[0]);
            point.setZ(tvec.get(2, 0)[0]);

            orientation.setW(q.getW());
            orientation.setX(q.getX());
            orientation.setY(q.getY());
            orientation.setZ(q.getZ());
            posestamped.getHeader().setFrameId(parameter.cameraFrameName());
            posestamped.setChildFrameId(parameter.markerFrameName());
            posestamped.getHeader().setStamp(connectedNode.getCurrentTime());
            // frame_id too
            tfmessage.getTransforms().add(posestamped);
            tfPublisherCamToMarker.publish(tfmessage);
        }
    });

    // publish Markers
    final Publisher<visualization_msgs.Marker> markerPublisher = connectedNode.newPublisher("markers",
            visualization_msgs.Marker._TYPE);
    connectedNode.executeCancellableLoop(new CancellableLoop() {

        @Override
        protected void loop() throws InterruptedException {
            // publish markers every 500ms
            Thread.sleep(500);
            // get marker points from markerConfig, each marker has 4
            // vertices
            List<Point3> points3dlist = markerConfig.getUnordered3DPointList();
            int i = 0;
            for (Point3 p : points3dlist) {
                Marker markermessage = markerPublisher.newMessage();
                // FIXME If the markers are published into an existing frame
                // (e.g. map or odom) the node will consume very high CPU
                // and will fail after a short time. The markers are
                // probably published in the wrong way.
                markermessage.getHeader().setFrameId(parameter.markerFrameName());
                markermessage.setId(i);
                i++;
                markermessage.setType(visualization_msgs.Marker.SPHERE);
                markermessage.setAction(visualization_msgs.Marker.ADD);
                // position
                double x = p.x;
                markermessage.getPose().getPosition().setX(x);
                double y = p.y;
                markermessage.getPose().getPosition().setY(y);
                double z = p.z;
                markermessage.getPose().getPosition().setZ(z);
                // orientation
                markermessage.getPose().getOrientation().setX(0);
                markermessage.getPose().getOrientation().setY(0);
                markermessage.getPose().getOrientation().setZ(0);
                markermessage.getPose().getOrientation().setW(1);
                // patterntSize
                markermessage.getScale().setX(0.1);
                markermessage.getScale().setY(0.1);
                markermessage.getScale().setZ(0.1);
                // color
                markermessage.getColor().setA(1);
                markermessage.getColor().setR(1);
                markermessage.getColor().setG(0);
                markermessage.getColor().setB(0);

                markerPublisher.publish(markermessage);
            }
        }
    });

    // publish tf map --> odom
    final Publisher<tf2_msgs.TFMessage> tfPublisherMapToOdom = connectedNode.newPublisher("tf",
            tf2_msgs.TFMessage._TYPE);
    connectedNode.executeCancellableLoop(new CancellableLoop() {

        @Override
        protected void loop() throws InterruptedException {

            // since this is an infinite loop, wait to be notified if new
            // image was processed
            synchronized (tvec) {
                tvec.wait();
            }

            // compute transform map to odom from map to
            // camera_rgb_optical_frame and odom to camera_rgb_optical_frame

            // map to camera_rgb_optical_frame
            Mat tvec_map_cam = new MatOfDouble(1.0, 1.0, 1.0);
            QuaternionHelper q = new QuaternionHelper();
            // get rotation matrix R from solvepnp output rotation vector
            // rvec
            Mat R = new Mat(3, 3, CvType.CV_32FC1);
            Calib3d.Rodrigues(rvec, R);
            // transpose R, because we need the transformation from
            // world(map) to camera
            R = R.t();
            // get rotation around X,Y,Z from R in radiants
            double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
            double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
            double attitudeZ = Math.asin(R.get(1, 0)[0]);
            q.setFromEuler(bankX, headingY, attitudeZ);
            // compute translation vector from world (map) to cam
            // tvec_map_cam
            Core.multiply(R, new Scalar(-1), R); // R=-R
            Core.gemm(R, tvec, 1, new Mat(), 0, tvec_map_cam, 0); // tvec_map_cam=R*tvec

            org.ros.rosjava_geometry.Quaternion rotation = new org.ros.rosjava_geometry.Quaternion(q.getX(),
                    q.getY(), q.getZ(), q.getW());
            double x = tvec_map_cam.get(0, 0)[0];
            double y = tvec_map_cam.get(1, 0)[0];
            double z = tvec_map_cam.get(2, 0)[0];
            // create a Transform Object that hold the transform map to cam
            org.ros.rosjava_geometry.Vector3 translation = new org.ros.rosjava_geometry.Vector3(x, y, z);
            org.ros.rosjava_geometry.Transform transform_map_cam = new org.ros.rosjava_geometry.Transform(
                    translation, rotation);

            // odom to camera_rgb_optical_frame
            GraphName sourceFrame = GraphName.of(parameter.cameraFrameName());
            GraphName targetFrame = GraphName.of("odom");
            org.ros.rosjava_geometry.Transform transform_cam_odom = null;
            if (transformationService.canTransform(targetFrame, sourceFrame)) {
                try {
                    transform_cam_odom = transformationService.lookupTransform(targetFrame, sourceFrame);
                } catch (Exception e) {
                    log.error(ExceptionUtils.getStackTrace(e));
                    log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                            + "odom! " + "However, " + "will continue..");
                    return;
                }
            } else {
                log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to " + "odom! "
                        + "However, will " + "continue..");
                // cancel this loop..no result can be computed
                return;
            }
            // multiply results
            org.ros.rosjava_geometry.Transform result = org.ros.rosjava_geometry.Transform.identity();
            result = result.multiply(transform_map_cam);
            result = result.multiply(transform_cam_odom);

            // set information to ROS message
            TFMessage tfMessage = tfPublisherMapToOdom.newMessage();
            TransformStamped transformStamped = connectedNode.getTopicMessageFactory()
                    .newFromType(geometry_msgs.TransformStamped._TYPE);
            Transform transform = transformStamped.getTransform();

            Quaternion orientation = transform.getRotation();
            Vector3 vector = transform.getTranslation();
            vector.setX(result.getTranslation().getX());
            vector.setY(result.getTranslation().getY());
            vector.setZ(result.getTranslation().getZ());

            orientation.setW(result.getRotationAndScale().getW());
            orientation.setX(result.getRotationAndScale().getX());
            orientation.setY(result.getRotationAndScale().getY());
            orientation.setZ(result.getRotationAndScale().getZ());
            transformStamped.getHeader().setFrameId("map");
            transformStamped.setChildFrameId("odom");
            transformStamped.getHeader().setStamp(connectedNode.getCurrentTime());
            // frame_id too
            tfMessage.getTransforms().add(transformStamped);
            tfPublisherMapToOdom.publish(tfMessage);
            // System.exit(0);
        }
    });

    // Publish Pose

    connectedNode.executeCancellableLoop(new CancellableLoop() {

        @Override
        protected void loop() throws InterruptedException {

            // since this is an infinite loop, wait here to be notified if
            // new image was processed
            synchronized (tvec) {
                tvec.wait();
            }
            final QuaternionHelper q = new QuaternionHelper();

            // convert rotation vector result of solvepnp to rotation matrix
            Mat R = new Mat(3, 3, CvType.CV_32FC1);
            Calib3d.Rodrigues(rvec, R);
            // see publishers before for documentation
            final Mat tvec_map_cam = new MatOfDouble(1.0, 1.0, 1.0);
            R = R.t();
            final double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
            final double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
            final double attitudeZ = Math.asin(R.get(1, 0)[0]);
            q.setFromEuler(bankX, headingY, attitudeZ);
            Core.multiply(R, new Scalar(-1), R);
            Core.gemm(R, tvec, 1, new Mat(), 0, tvec_map_cam, 0);
            final org.ros.rosjava_geometry.Quaternion rotation = new org.ros.rosjava_geometry.Quaternion(
                    q.getX(), q.getY(), q.getZ(), q.getW());
            final double x = tvec_map_cam.get(0, 0)[0];
            final double y = tvec_map_cam.get(1, 0)[0];
            final double z = tvec_map_cam.get(2, 0)[0];

            final org.ros.rosjava_geometry.Vector3 translation = new org.ros.rosjava_geometry.Vector3(x, y, z);
            final org.ros.rosjava_geometry.Transform transform_map_cam = new org.ros.rosjava_geometry.Transform(
                    translation, rotation);

            // odom to camera_rgb_optical_frame
            final GraphName sourceFrame = GraphName.of(parameter.cameraFrameName());
            final GraphName targetFrame = GraphName.of("base_link");
            org.ros.rosjava_geometry.Transform transform_cam_base = null;

            if (transformationService.canTransform(targetFrame, sourceFrame)) {
                try {
                    transform_cam_base = transformationService.lookupTransform(targetFrame, sourceFrame);
                } catch (Exception e) {
                    log.error(ExceptionUtils.getStackTrace(e));
                    log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                            + "base_link! " + "However, will continue..");
                    // cancel this loop..no result can be computed
                    return;
                }
            } else {
                log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                        + "base_link!" + " However, " + "will continue..");
                // cancel this loop..no result can be computed
                return;
            }

            // multiply results
            org.ros.rosjava_geometry.Transform current_pose = org.ros.rosjava_geometry.Transform.identity();
            current_pose = current_pose.multiply(transform_map_cam);
            current_pose = current_pose.multiply(transform_cam_base);

            // check for plausibility of the pose by checking if movement
            // exceeds max speed (defined) of the robot
            if (parameter.badPoseReject()) {
                Time current_timestamp = connectedNode.getCurrentTime();
                // TODO Unfortunately, we do not have the tf timestamp at
                // hand here. So we can only use the current timestamp.
                double maxspeed = 5;
                boolean goodpose = false;
                // if (current_pose != null && current_timestamp != null) {
                if (last_pose != null && last_timestamp != null) {
                    // check speed of movement between last and current pose
                    double distance = PoseCompare.distance(current_pose, last_pose);
                    double timedelta = PoseCompare.timedelta(current_timestamp, last_timestamp);
                    if ((distance / timedelta) < maxspeed) {
                        if (smoothing) {
                            double xold = last_pose.getTranslation().getX();
                            double yold = last_pose.getTranslation().getY();
                            double zold = last_pose.getTranslation().getZ();
                            double xnew = current_pose.getTranslation().getX();
                            double ynew = current_pose.getTranslation().getY();
                            double znew = current_pose.getTranslation().getZ();
                            final org.ros.rosjava_geometry.Vector3 smoothTranslation = new org.ros.rosjava_geometry.Vector3(
                                    (xold * 2 + xnew) / 3, (yold * 2 + ynew) / 3, (zold * 2 + znew) / 3);
                            current_pose = new org.ros.rosjava_geometry.Transform(smoothTranslation,
                                    current_pose.getRotationAndScale());
                            last_pose = current_pose;
                        }
                        last_pose = current_pose;
                        last_timestamp = current_timestamp;
                        goodpose = true;
                    } else {
                        log.info("distance " + distance + " time: " + timedelta + " --> Pose rejected");
                    }

                } else {
                    last_pose = current_pose;
                    last_timestamp = current_timestamp;
                }
                // }
                // bad pose rejection
                if (!goodpose) {
                    return;
                }
            }

            // set information to message
            final geometry_msgs.PoseStamped posestamped = posePublisher.newMessage();
            Pose pose = posestamped.getPose();
            Quaternion orientation = pose.getOrientation();
            Point point = pose.getPosition();

            point.setX(current_pose.getTranslation().getX());

            point.setY(current_pose.getTranslation().getY());

            point.setZ(current_pose.getTranslation().getZ());

            orientation.setW(current_pose.getRotationAndScale().getW());
            orientation.setX(current_pose.getRotationAndScale().getX());
            orientation.setY(current_pose.getRotationAndScale().getY());
            orientation.setZ(current_pose.getRotationAndScale().getZ());

            // frame_id too
            posestamped.getHeader().setFrameId("map");
            posestamped.getHeader().setStamp(connectedNode.getCurrentTime());
            posePublisher.publish(posestamped);
            mostRecentPose.set(posestamped);
        }
    });

}

From source file:org.esa.beam.util.math.FastMathPerformance.java

public void testAsin() {
    System.gc();//from w  ww  .  j ava  2s  .  com
    double x = 0;
    long time = System.nanoTime();
    for (int i = 0; i < RUNS; i++)
        x += StrictMath.asin(i / 10000000.0);
    long strictTime = System.nanoTime() - time;

    System.gc();
    double y = 0;
    time = System.nanoTime();
    for (int i = 0; i < RUNS; i++)
        y += FastMath.asin(i / 10000000.0);
    long fastTime = System.nanoTime() - time;

    System.gc();
    double z = 0;
    time = System.nanoTime();
    for (int i = 0; i < RUNS; i++)
        z += Math.asin(i / 10000000.0);
    long mathTime = System.nanoTime() - time;

    report("asin", x + y + z, strictTime, fastTime, mathTime);
}

From source file:com.itemanalysis.psychometrics.distribution.BivariateNormalDistribution.java

public double bivnor(double ah, double ak, double r) {
    double a2;//from   w w w  .j av a  2  s .  c o  m
    double ap;
    double b;
    double cn;
    double con;
    double conex;
    double ex;
    double g2;
    double gh;
    double gk;
    double gw = 0;
    double h2;
    double h4;
    int i;
    int idig = 15;
    int is = 0;
    double rr;
    double s1;
    double s2;
    double sgn;
    double sn;
    double sp;
    double sqr;
    double t;
    double twopi = 6.283185307179587;
    double w2 = 0;
    double wh = 0;
    double wk = 0;

    b = 0.0;

    gh = gauss(-ah) / 2.0;
    gk = gauss(-ak) / 2.0;

    if (r == 0.0) {
        b = 4.00 * gh * gk;
        b = r8_max(b, 0.0);
        b = r8_min(b, 1.0);
        return b;
    }

    rr = (1.0 + r) * (1.0 - r);

    if (rr < 0.0) {
        throw new IllegalStateException("BIVNOR - Fatal error!");
        //    cerr << "\n";
        //    cerr << "BIVNOR - Fatal error!\n";
        //    cerr << "  1 < |R|.\n";
        //    exit ( 0 );
    }

    if (rr == 0.0) {
        if (r < 0.0) {
            if (ah + ak < 0.0) {
                b = 2.0 * (gh + gk) - 1.0;
            }
        } else {
            if (ah - ak < 0.0) {
                b = 2.0 * gk;
            } else {
                b = 2.0 * gh;
            }
        }
        b = r8_max(b, 0.0);
        b = r8_min(b, 1.0);
        return b;
    }

    sqr = Math.sqrt(rr);

    if (idig == 15) {
        con = twopi * 1.0E-15 / 2.0;
    } else {
        con = twopi / 2.0;
        for (i = 1; i <= idig; i++) {
            con = con / 10.0;
        }
    }
    //
    //  (0,0)
    //
    if (ah == 0.0 && ak == 0.0) {
        b = 0.25 + Math.asin(r) / twopi;
        b = r8_max(b, 0.0);
        b = r8_min(b, 1.0);
        return b;
    }
    //
    //  (0,nonzero)
    //
    if (ah == 0.0 && ak != 0.0) {
        b = gk;
        wh = -ak;
        wk = (ah / ak - r) / sqr;
        gw = 2.0 * gk;
        is = 1;
    }
    //
    //  (nonzero,0)
    //
    else if (ah != 0.0 && ak == 0.0) {
        b = gh;
        wh = -ah;
        wk = (ak / ah - r) / sqr;
        gw = 2.0 * gh;
        is = -1;
    }
    //
    //  (nonzero,nonzero)
    //
    else if (ah != 0.0 && ak != 0.0) {
        b = gh + gk;
        if (ah * ak < 0.0) {
            b = b - 0.5;
        }
        wh = -ah;
        wk = (ak / ah - r) / sqr;
        gw = 2.0 * gh;
        is = -1;
    }

    for (;;) {
        sgn = -1.0;
        t = 0.0;

        if (wk != 0.0) {
            if (r8_abs(wk) == 1.0) {
                t = wk * gw * (1.0 - gw) / 2.0;
                b = b + sgn * t;
            } else {
                if (1.0 < r8_abs(wk)) {
                    sgn = -sgn;
                    wh = wh * wk;
                    g2 = gauss(wh);
                    wk = 1.0 / wk;

                    if (wk < 0.0) {
                        b = b + 0.5;
                    }
                    b = b - (gw + g2) / 2.0 + gw * g2;
                }
                h2 = wh * wh;
                a2 = wk * wk;
                h4 = h2 / 2.0;
                ex = Math.exp(-h4);
                w2 = h4 * ex;
                ap = 1.0;
                s2 = ap - ex;
                sp = ap;
                s1 = 0.0;
                sn = s1;
                conex = r8_abs(con / wk);

                for (;;) {
                    cn = ap * s2 / (sn + sp);
                    s1 = s1 + cn;

                    if (r8_abs(cn) <= conex) {
                        break;
                    }
                    sn = sp;
                    sp = sp + 1.0;
                    s2 = s2 - w2;
                    w2 = w2 * h4 / sp;
                    ap = -ap * a2;
                }
                t = (Math.atan(wk) - wk * s1) / twopi;
                b = b + sgn * t;
            }
        }
        if (0 <= is) {
            break;
        }
        if (ak == 0.0) {
            break;
        }
        wh = -ak;
        wk = (ah / ak - r) / sqr;
        gw = 2.0 * gk;
        is = 1;
    }

    b = r8_max(b, 0.0);
    b = r8_min(b, 1.0);

    return b;
}