Example usage for org.opencv.core Scalar Scalar

List of usage examples for org.opencv.core Scalar Scalar

Introduction

In this page you can find the example usage for org.opencv.core Scalar Scalar.

Prototype

public Scalar(double[] vals) 

Source Link

Usage

From source file:com.astrocytes.core.operationsengine.CoreOperations.java

License:Open Source License

/**
 * Applies thresholding for color image.
 *
 * @param src - color source image./* ww  w. ja  v a2 s.  c  om*/
 * @param r - the value for red value in threshold.
 * @param g - the value for green value in threshold.
 * @param b - the value for blue value in threshold.
 * @return thresholded color image.
 */
public static Mat threshold(Mat src, int r, int g, int b) {
    if (src.channels() < 3)
        return src;

    Mat dest = new Mat();
    Mat srcBin = new Mat();

    Imgproc.threshold(src, srcBin, 1, 255, Imgproc.THRESH_BINARY);
    Core.inRange(src, new Scalar(0), new Scalar(r, g, b), dest);
    dest = invert(dest);
    cvtColor(dest, dest, Imgproc.COLOR_GRAY2BGR);

    dest = xor(srcBin, dest);
    dest = and(src, dest);
    return dest;
}

From source file:com.astrocytes.core.operationsengine.CoreOperations.java

License:Open Source License

/**
 * Remove all small contours on binary image with areas less than specified threshold.
 *
 * @param src - binary source image.//from   ww w. ja va  2s . co  m
 * @param thresh - minimum area of contour.
 * @return a source image with removed all contours with area less than {@param thresh}.
 */
public static Mat clearContours(Mat src, int thresh) {
    if (src.channels() > 1)
        return src;

    Mat dest = src.clone();
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Mat hierarchy = new Mat();

    findContours(src, contours, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_TC89_L1);

    Mat maskWhite = new Mat(src.rows(), src.cols(), CvType.CV_8UC1, new Scalar(255));
    Mat maskBlack = maskWhite.clone();

    for (int i = 0; i < contours.size(); i++) {
        Double contourArea = contourArea(contours.get(i));

        if (contourArea < thresh) {
            int pixelColor = averageIntensity(src, contours.get(i));
            drawContours(pixelColor > 127 ? maskWhite : maskBlack, contours, i, new Scalar(0), Core.FILLED);
        }
    }

    maskWhite = erode(maskWhite, 2);
    maskBlack = erode(maskBlack, 2);
    dest = and(maskWhite, dest);
    dest = or(invert(maskBlack), dest);

    return dest;
}

From source file:com.astrocytes.core.operationsengine.CoreOperations.java

License:Open Source License

/**
 * Draw all contours of source image.//from w  w w  .  j a v  a 2s. c o  m
 *
 * @param src - source image.
 * @return binary image with black background and white contours.
 */
public static int drawAllContours(Mat src, Mat dest) {
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();

    findContours(grayscale(src), contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_TC89_L1);
    for (int i = 0; i < contours.size(); i++) {
        drawContours(dest, contours, i, new Scalar(255));
    }

    return contours.size();
}

From source file:com.astrocytes.core.operationsengine.OperationsImpl.java

License:Open Source License

private void findNeurons() {
    if (this.preparedImage == null) {
        makePreparedImage();//w  ww  . ja va 2  s  . co m
    }

    int minNeuronRadius = 12;
    int maxNeuronRadius = 27;
    int stepSize = 3;

    neurons = new ArrayList<Neuron>();

    for (int step = maxNeuronRadius; step >= minNeuronRadius; step -= stepSize) {
        Mat stepImage = CoreOperations.erode(this.preparedImage, step);

        for (Neuron neuron : neurons) {
            Scalar blackColor = new Scalar(0);
            Imgproc.circle(stepImage, neuron.getCenter(), (int) (1.8f * neuron.getRadius()), blackColor,
                    Core.FILLED);
        }

        List<Neuron> neuronsInStep = findNeuronsInStep(stepImage, step);

        for (Neuron neuron : neuronsInStep) {
            //TODO: check for astrocyte collision
            if (!neurons.contains(neuron)) {
                neurons.add(neuron);
            }
        }
    }
}

From source file:com.astrocytes.core.operationsengine.OperationsImpl.java

License:Open Source License

private Mat applyRayCastingSegmentation() {
    //Mat cannyEdges = CoreOperations.cannyFilter(sourceImage, 26, 58);
    Mat contours = new Mat(preparedImage.rows(), preparedImage.cols(), CvType.CV_32S);
    int contoursCount = /*neurons.size();*/ CoreOperations
            .drawAllContours(CoreOperations.erode(preparedImage, 5), contours);
    Mat result = new Mat(preparedImage.rows(), preparedImage.cols(), preparedImage.type());//CoreOperations.or(CoreOperations.and(cannyEdges, CoreOperations.grayscale(preparedImage)), contours);
    //cannyEdges.release();

    //Mat markers = new Mat(contours.rows(), contours.cols(), CvType.CV_32S);
    //contours.copyTo(markers);
    contours.convertTo(contours, CvType.CV_32S);

    for (Neuron neuron : neurons) {
        int x = (int) neuron.getCenter().x;
        int y = (int) neuron.getCenter().y;
        int color = (int) preparedImage.get(y, x)[0];
        /*contours.put(y, x, color);
        contours.put(y - 2, x, color);// w w  w .  j  av a2  s.co  m
        contours.put(y + 2, x, color);
        contours.put(y, x - 2, color);
        contours.put(y, x + 2, color);*/
        Imgproc.circle(contours, neuron.getCenter(), (int) (0.4f * neuron.getRadius()), new Scalar(color), -1);
    }

    Imgproc.watershed(sourceImage, contours);

    for (int i = 0; i < contours.rows(); i++) {
        for (int j = 0; j < contours.cols(); j++) {
            int index = (int) contours.get(i, j)[0];
            if (index == -1) {
                result.put(i, j, 0, 0, 0);
            } else if (index <= 0 || index > contoursCount) {
                result.put(i, j, 0, 0, 0);
            } else {
                if (index == 255) {
                    result.put(i, j, 0, 0, 0/*sourceImage.get(i, j)*/);
                } else {
                    result.put(i, j, index, index, index);
                }
            }
        }
    }

    result = CoreOperations.erode(result, 2);
    result = CoreOperations.dilate(result, 3);

    contours.release();

    contours = sourceImage.clone();
    CoreOperations.drawAllContours(result, contours);

    return contours;
}

From source file:com.example.thibautg.libreaudioview.VideoProcessing.java

License:Open Source License

/**
 *
 * @param audioOutput//from w  ww . j a  va 2 s  .  c  om
 */
public VideoProcessing(AudioOutput audioOutput) {
    mInputMat320240 = new Mat(Globals.acquisitionFrameHeight + Globals.acquisitionFrameHeight / 2,
            Globals.acquisitionFrameWidth, CvType.CV_8UC1);
    mInputGray320240 = new Mat(Globals.acquisitionFrameHeight, Globals.acquisitionFrameWidth, CvType.CV_8UC1);

    mInputMat = new Mat(height + height / 2, width, CvType.CV_8UC1);
    mRgba = new Mat(height, width, CvType.CV_8U, new Scalar(4));
    mPreviousMat = new Mat(height, width, CvType.CV_8UC1);
    mDiffMat2 = new Mat(height, width, CvType.CV_8UC1);
    mInputGray = new Mat(height, width, CvType.CV_8UC1);
    mPreviousMat = new Mat(height, width, CvType.CV_8UC1);
    mOutputGrayMat = new Mat(height, width, CvType.CV_8UC1);
    mSonifier = new Sonifier(audioOutput);
}

From source file:com.example.thibautg.libreaudioview.VideoProcessing.java

License:Open Source License

/**
 *
 * @param data//from   ww w  .  ja  v a2 s .c o m
 */
public void processFrame(byte[] data) {

    int detectionThreshold = 50;
    mInputMat320240.put(0, 0, data);

    mInputGray320240 = mInputMat320240.submat(0, mInputMat320240.height() * 2 / 3, 0, mInputMat320240.width());
    Imgproc.resize(mInputGray320240, mInputGray, new Size(160, 120));

    //mInputMat.put(0, 0, data);
    //mInputGray = mInputMat.submat(0, mInputMat.height() * 2 / 3, 0, mInputMat.width());
    Imgproc.GaussianBlur(mInputGray, mInputGray, windowSize, 0.6, 0.6);
    Core.absdiff(mInputGray, mPreviousMat, mDiffMat2);
    mInputGray.copyTo(mPreviousMat);
    Imgproc.threshold(mDiffMat2, mOutputGrayMat, detectionThreshold, 255, 0);

    if (mBoolFirstImage) {
        mOutputGrayMat.setTo(new Scalar(0));
        mBoolFirstImage = false;
    }

    mSonifier.sonifyFrame(mOutputGrayMat);
}

From source file:com.github.rosjava_catkin_package_a.ARLocROS.ARLoc.java

License:Apache License

@Override
public void onStart(final ConnectedNode connectedNode) {
    // load OpenCV shared library
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    // read configuration variables from the ROS Runtime (configured in the
    // launch file)
    log = connectedNode.getLog();/*from w ww.j av  a2s.c  o  m*/
    log.info("Reading parameters");
    this.parameter = Parameter.createFromParameterTree(connectedNode.getParameterTree());

    // Read Marker Config
    markerConfig = MarkerConfig.createFromConfig(parameter.markerConfigFile(), parameter.patternDirectory());

    // setup rotation vector and translation vector storing output of the
    // localization
    rvec = new Mat(3, 1, CvType.CV_64F);
    tvec = new MatOfDouble(1.0, 1.0, 1.0);

    camp = getCameraInfo(connectedNode, parameter);

    // start to listen to transform messages in /tf in order to feed the
    // Transformer and lookup transforms
    final TransformationService transformationService = TransformationService.create(connectedNode);

    // Subscribe to Image
    Subscriber<sensor_msgs.Image> subscriberToImage = connectedNode.newSubscriber(parameter.cameraImageTopic(),
            sensor_msgs.Image._TYPE);

    ComputePose computePose = null;
    try {
        final Mat cameraMatrix = CameraParams.getCameraMatrix(camp);
        final MatOfDouble distCoeffs = CameraParams.getDistCoeffs(camp);
        computePose = ComputePose.create(markerConfig, new Size(camp.width(), camp.height()), cameraMatrix,
                distCoeffs, this.parameter.visualization());
    } catch (NyARException e) {
        logger.info("Cannot initialize ComputePose", e);
    } catch (FileNotFoundException e) {
        logger.info("Cannot find file when initialize ComputePose", e);
    }
    final ComputePose poseProcessor = computePose;
    subscriberToImage.addMessageListener(new MessageListener<sensor_msgs.Image>() {

        @Override
        public void onNewMessage(sensor_msgs.Image message) {
            //
            if (!message.getEncoding().toLowerCase().equals("rgb8")) {
                log.error("Sorry, " + message.getEncoding() + " Image encoding is not supported! EXITING");
                System.exit(-1);
            }
            if (camp != null) {
                try {
                    //
                    image = Utils.matFromImage(message);
                    // uncomment to add more contrast to the image
                    //Utils.tresholdContrastBlackWhite(image, 600);
                    Imgproc.threshold(image, image, 200, 255, Imgproc.THRESH_BINARY);
                    // Mat cannyimg = new Mat(image.height(), image.width(),
                    // CvType.CV_8UC3);
                    // Imgproc.Canny(image, cannyimg, 10, 100);
                    // Imshow.show(cannyimg);

                    // image.convertTo(image, -1, 1.5, 0);
                    // setup camera matrix and return vectors
                    // compute pose
                    if (poseProcessor.computePose(rvec, tvec, image)) {
                        // notify publisher threads (pose and tf, see below)
                        synchronized (tvec) {
                            tvec.notifyAll();
                        }
                    }

                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }
    });

    // publish tf CAMERA_FRAME_NAME --> MARKER_FRAME_NAME
    final Publisher<tf2_msgs.TFMessage> tfPublisherCamToMarker = connectedNode.newPublisher("tf",
            tf2_msgs.TFMessage._TYPE);
    connectedNode.executeCancellableLoop(new CancellableLoop() {

        @Override
        protected void loop() throws InterruptedException {

            synchronized (tvec) {
                tvec.wait();
            }

            QuaternionHelper q = new QuaternionHelper();

            /*
             * http://euclideanspace.com/maths/geometry/rotations/
             * conversions/matrixToEuler/index.htm
             * http://stackoverflow.com/questions/12933284/rodrigues-into-
             * eulerangles-and-vice-versa
             * 
             * heading = atan2(-m20,m00) attitude = asin(m10) bank =
             * atan2(-m12,m11)
             */
            // convert output rotation vector rvec to rotation matrix R
            Mat R = new Mat(3, 3, CvType.CV_32FC1);
            Calib3d.Rodrigues(rvec, R);
            // get rotations around X,Y,Z from rotation matrix R
            double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
            double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
            double attitudeZ = Math.asin(R.get(1, 0)[0]);
            // convert Euler angles to quarternion
            q.setFromEuler(bankX, headingY, attitudeZ);

            // set information to message
            TFMessage tfmessage = tfPublisherCamToMarker.newMessage();
            TransformStamped posestamped = connectedNode.getTopicMessageFactory()
                    .newFromType(geometry_msgs.TransformStamped._TYPE);
            Transform transform = posestamped.getTransform();

            Quaternion orientation = transform.getRotation();
            Vector3 point = transform.getTranslation();
            point.setX(tvec.get(0, 0)[0]);
            point.setY(tvec.get(1, 0)[0]);
            point.setZ(tvec.get(2, 0)[0]);

            orientation.setW(q.getW());
            orientation.setX(q.getX());
            orientation.setY(q.getY());
            orientation.setZ(q.getZ());
            posestamped.getHeader().setFrameId(parameter.cameraFrameName());
            posestamped.setChildFrameId(parameter.markerFrameName());
            posestamped.getHeader().setStamp(connectedNode.getCurrentTime());
            // frame_id too
            tfmessage.getTransforms().add(posestamped);
            tfPublisherCamToMarker.publish(tfmessage);
        }
    });

    // publish Markers
    final Publisher<visualization_msgs.Marker> markerPublisher = connectedNode.newPublisher("markers",
            visualization_msgs.Marker._TYPE);
    connectedNode.executeCancellableLoop(new CancellableLoop() {

        @Override
        protected void loop() throws InterruptedException {
            // publish markers every 500ms
            Thread.sleep(500);
            // get marker points from markerConfig, each marker has 4
            // vertices
            List<Point3> points3dlist = markerConfig.getUnordered3DPointList();
            int i = 0;
            for (Point3 p : points3dlist) {
                Marker markermessage = markerPublisher.newMessage();
                // FIXME If the markers are published into an existing frame
                // (e.g. map or odom) the node will consume very high CPU
                // and will fail after a short time. The markers are
                // probably published in the wrong way.
                markermessage.getHeader().setFrameId(parameter.markerFrameName());
                markermessage.setId(i);
                i++;
                markermessage.setType(visualization_msgs.Marker.SPHERE);
                markermessage.setAction(visualization_msgs.Marker.ADD);
                // position
                double x = p.x;
                markermessage.getPose().getPosition().setX(x);
                double y = p.y;
                markermessage.getPose().getPosition().setY(y);
                double z = p.z;
                markermessage.getPose().getPosition().setZ(z);
                // orientation
                markermessage.getPose().getOrientation().setX(0);
                markermessage.getPose().getOrientation().setY(0);
                markermessage.getPose().getOrientation().setZ(0);
                markermessage.getPose().getOrientation().setW(1);
                // patterntSize
                markermessage.getScale().setX(0.1);
                markermessage.getScale().setY(0.1);
                markermessage.getScale().setZ(0.1);
                // color
                markermessage.getColor().setA(1);
                markermessage.getColor().setR(1);
                markermessage.getColor().setG(0);
                markermessage.getColor().setB(0);

                markerPublisher.publish(markermessage);
            }
        }
    });

    // publish tf map --> odom
    final Publisher<tf2_msgs.TFMessage> tfPublisherMapToOdom = connectedNode.newPublisher("tf",
            tf2_msgs.TFMessage._TYPE);
    connectedNode.executeCancellableLoop(new CancellableLoop() {

        @Override
        protected void loop() throws InterruptedException {

            // since this is an infinite loop, wait to be notified if new
            // image was processed
            synchronized (tvec) {
                tvec.wait();
            }

            // compute transform map to odom from map to
            // camera_rgb_optical_frame and odom to camera_rgb_optical_frame

            // map to camera_rgb_optical_frame
            Mat tvec_map_cam = new MatOfDouble(1.0, 1.0, 1.0);
            QuaternionHelper q = new QuaternionHelper();
            // get rotation matrix R from solvepnp output rotation vector
            // rvec
            Mat R = new Mat(3, 3, CvType.CV_32FC1);
            Calib3d.Rodrigues(rvec, R);
            // transpose R, because we need the transformation from
            // world(map) to camera
            R = R.t();
            // get rotation around X,Y,Z from R in radiants
            double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
            double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
            double attitudeZ = Math.asin(R.get(1, 0)[0]);
            q.setFromEuler(bankX, headingY, attitudeZ);
            // compute translation vector from world (map) to cam
            // tvec_map_cam
            Core.multiply(R, new Scalar(-1), R); // R=-R
            Core.gemm(R, tvec, 1, new Mat(), 0, tvec_map_cam, 0); // tvec_map_cam=R*tvec

            org.ros.rosjava_geometry.Quaternion rotation = new org.ros.rosjava_geometry.Quaternion(q.getX(),
                    q.getY(), q.getZ(), q.getW());
            double x = tvec_map_cam.get(0, 0)[0];
            double y = tvec_map_cam.get(1, 0)[0];
            double z = tvec_map_cam.get(2, 0)[0];
            // create a Transform Object that hold the transform map to cam
            org.ros.rosjava_geometry.Vector3 translation = new org.ros.rosjava_geometry.Vector3(x, y, z);
            org.ros.rosjava_geometry.Transform transform_map_cam = new org.ros.rosjava_geometry.Transform(
                    translation, rotation);

            // odom to camera_rgb_optical_frame
            GraphName sourceFrame = GraphName.of(parameter.cameraFrameName());
            GraphName targetFrame = GraphName.of("odom");
            org.ros.rosjava_geometry.Transform transform_cam_odom = null;
            if (transformationService.canTransform(targetFrame, sourceFrame)) {
                try {
                    transform_cam_odom = transformationService.lookupTransform(targetFrame, sourceFrame);
                } catch (Exception e) {
                    e.printStackTrace();
                    log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                            + "odom! " + "However, " + "will continue..");
                    return;
                }
            } else {
                log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to " + "odom! "
                        + "However, will " + "continue..");
                // cancel this loop..no result can be computed
                return;
            }
            // multiply results
            org.ros.rosjava_geometry.Transform result = org.ros.rosjava_geometry.Transform.identity();
            result = result.multiply(transform_map_cam);
            result = result.multiply(transform_cam_odom);

            // set information to ROS message
            TFMessage tfMessage = tfPublisherMapToOdom.newMessage();
            TransformStamped transformStamped = connectedNode.getTopicMessageFactory()
                    .newFromType(geometry_msgs.TransformStamped._TYPE);
            Transform transform = transformStamped.getTransform();

            Quaternion orientation = transform.getRotation();
            Vector3 vector = transform.getTranslation();
            vector.setX(result.getTranslation().getX());
            vector.setY(result.getTranslation().getY());
            vector.setZ(result.getTranslation().getZ());

            orientation.setW(result.getRotationAndScale().getW());
            orientation.setX(result.getRotationAndScale().getX());
            orientation.setY(result.getRotationAndScale().getY());
            orientation.setZ(result.getRotationAndScale().getZ());
            transformStamped.getHeader().setFrameId("map");
            transformStamped.setChildFrameId("odom");
            transformStamped.getHeader().setStamp(connectedNode.getCurrentTime());
            // frame_id too
            tfMessage.getTransforms().add(transformStamped);
            tfPublisherMapToOdom.publish(tfMessage);
            // System.exit(0);
        }
    });

    // Publish Pose

    final Publisher<geometry_msgs.PoseStamped> posePublisher = connectedNode
            .newPublisher(parameter.poseTopicName(), geometry_msgs.PoseStamped._TYPE);

    connectedNode.executeCancellableLoop(new CancellableLoop() {

        @Override
        protected void loop() throws InterruptedException {

            // since this is an infinite loop, wait here to be notified if
            // new image was processed
            synchronized (tvec) {
                tvec.wait();
            }
            final QuaternionHelper q = new QuaternionHelper();

            // convert rotation vector result of solvepnp to rotation matrix
            Mat R = new Mat(3, 3, CvType.CV_32FC1);
            Calib3d.Rodrigues(rvec, R);
            // see publishers before for documentation
            final Mat tvec_map_cam = new MatOfDouble(1.0, 1.0, 1.0);
            R = R.t();
            final double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
            final double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
            final double attitudeZ = Math.asin(R.get(1, 0)[0]);
            q.setFromEuler(bankX, headingY, attitudeZ);
            Core.multiply(R, new Scalar(-1), R);
            Core.gemm(R, tvec, 1, new Mat(), 0, tvec_map_cam, 0);
            final org.ros.rosjava_geometry.Quaternion rotation = new org.ros.rosjava_geometry.Quaternion(
                    q.getX(), q.getY(), q.getZ(), q.getW());
            final double x = tvec_map_cam.get(0, 0)[0];
            final double y = tvec_map_cam.get(1, 0)[0];
            final double z = tvec_map_cam.get(2, 0)[0];

            final org.ros.rosjava_geometry.Vector3 translation = new org.ros.rosjava_geometry.Vector3(x, y, z);
            final org.ros.rosjava_geometry.Transform transform_map_cam = new org.ros.rosjava_geometry.Transform(
                    translation, rotation);

            // odom to camera_rgb_optical_frame
            final GraphName sourceFrame = GraphName.of(parameter.cameraFrameName());
            final GraphName targetFrame = GraphName.of("base_link");
            org.ros.rosjava_geometry.Transform transform_cam_base = null;

            if (transformationService.canTransform(targetFrame, sourceFrame)) {
                try {
                    transform_cam_base = transformationService.lookupTransform(targetFrame, sourceFrame);
                } catch (Exception e) {
                    e.printStackTrace();
                    log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                            + "base_link! " + "However, will continue..");
                    // cancel this loop..no result can be computed
                    return;
                }
            } else {
                log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                        + "base_link!" + " However, " + "will continue..");
                // cancel this loop..no result can be computed
                return;
            }

            // multiply results
            org.ros.rosjava_geometry.Transform current_pose = org.ros.rosjava_geometry.Transform.identity();
            current_pose = current_pose.multiply(transform_map_cam);
            current_pose = current_pose.multiply(transform_cam_base);

            // check for plausibility of the pose by checking if movement
            // exceeds max speed (defined) of the robot
            if (parameter.badPoseReject()) {
                Time current_timestamp = connectedNode.getCurrentTime();
                // TODO Unfortunately, we do not have the tf timestamp at
                // hand here. So we can only use the current timestamp.
                double maxspeed = 5;
                boolean goodpose = false;
                // if (current_pose != null && current_timestamp != null) {
                if (last_pose != null && last_timestamp != null) {
                    // check speed of movement between last and current pose
                    double distance = PoseCompare.distance(current_pose, last_pose);
                    double timedelta = PoseCompare.timedelta(current_timestamp, last_timestamp);
                    if ((distance / timedelta) < maxspeed) {
                        if (smoothing) {
                            double xold = last_pose.getTranslation().getX();
                            double yold = last_pose.getTranslation().getY();
                            double zold = last_pose.getTranslation().getZ();
                            double xnew = current_pose.getTranslation().getX();
                            double ynew = current_pose.getTranslation().getY();
                            double znew = current_pose.getTranslation().getZ();
                            final org.ros.rosjava_geometry.Vector3 smoothTranslation = new org.ros.rosjava_geometry.Vector3(
                                    (xold * 2 + xnew) / 3, (yold * 2 + ynew) / 3, (zold * 2 + znew) / 3);
                            current_pose = new org.ros.rosjava_geometry.Transform(smoothTranslation,
                                    current_pose.getRotationAndScale());
                            last_pose = current_pose;
                        }
                        last_pose = current_pose;
                        last_timestamp = current_timestamp;
                        goodpose = true;
                    } else {
                        log.info("distance " + distance + " time: " + timedelta + " --> Pose rejected");
                    }

                } else {
                    last_pose = current_pose;
                    last_timestamp = current_timestamp;
                }
                // }
                // bad pose rejection
                if (!goodpose) {
                    return;
                }
            }

            // set information to message
            geometry_msgs.PoseStamped posestamped = posePublisher.newMessage();
            Pose pose = posestamped.getPose();
            Quaternion orientation = pose.getOrientation();
            Point point = pose.getPosition();

            point.setX(current_pose.getTranslation().getX());

            point.setY(current_pose.getTranslation().getY());

            point.setZ(current_pose.getTranslation().getZ());

            orientation.setW(current_pose.getRotationAndScale().getW());
            orientation.setX(current_pose.getRotationAndScale().getX());
            orientation.setY(current_pose.getRotationAndScale().getY());
            orientation.setZ(current_pose.getRotationAndScale().getZ());

            // frame_id too
            posestamped.getHeader().setFrameId("map");
            posestamped.getHeader().setStamp(connectedNode.getCurrentTime());
            posePublisher.publish(posestamped);

        }
    });

}

From source file:com.minio.io.alice.XPly.java

License:Open Source License

public void setScalar(double scalar) {
    this.scalar = new Scalar(scalar);
}

From source file:com.projecttango.examples.java.pointcloud.MainActivity.java

License:Open Source License

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_point_cloud);

    //IOIO//  w  ww  .  j av  a  2s  .  c om
    pwm_speed = 1500;
    pwm_steering = 1500;

    mDetector = new ColorBlobDetector();
    mSpectrum = new Mat();
    mBlobColorRgba = new Scalar(255);
    CONTOUR_COLOR = new Scalar(255, 0, 0, 255);
    //To set color, find HSV values of desired color and convert each value to 1-255 scale
    //mDetector.setHsvColor(new Scalar(7, 196, 144)); // red
    //mDetector.setHsvColor(new Scalar(253.796875,222.6875,195.21875));
    mDetector.setHsvColor(new Scalar(7.015625, 255.0, 239.3125)); //bucket orange

    mSurfaceView = (RajawaliSurfaceView) findViewById(R.id.gl_surface_view);

    textToSpeech = new TextToSpeech(MainActivity.this, new TextToSpeech.OnInitListener() {
        @Override
        public void onInit(int i) {
            textToSpeech.setLanguage(Locale.US);
        }
    });

    mPointCloudManager = new TangoPointCloudManager();
    mTangoUx = setupTangoUxAndLayout();
    mRenderer = new Scene(this);
    setupRenderer();

    //Set as topdown
    mRenderer.setTopDownView();
    mRenderer.renderVirtualObjects(true);

    tangoCameraPreview = (TangoTextureCameraPreview) findViewById(R.id.cameraPreview);

    mapInfo = new MapInfo();

    primaryColor = Color.parseColor("#FF3F51B5");
    primaryDark = Color.parseColor("#FF303F9F");

    mapInfo.setGrid(new int[GRID_SIZE][GRID_SIZE]);
    mapInfo.setCurrentCell(1, 3, 4);

    Window window = this.getWindow();
    window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS);

    window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
    //Will Error setStatusBarColor due to MIN API lvl at 19
    window.setStatusBarColor(primaryDark);

    final Toolbar mToolBar = (Toolbar) findViewById(R.id.mainToolBar);
    //setSupportActionBar(mToolBar);
    //getSupportActionBar().setDisplayShowTitleEnabled(false);
    mToolBar.setTitleTextColor(Color.WHITE);
    mToolBar.setBackgroundColor(primaryColor);
    mToolBar.setTitle("");

    Button startPointButton = (Button) findViewById(R.id.setStartPoint);
    startPointButton.setOnClickListener(new View.OnClickListener() {
        public void onClick(View v) {
            // Do something in response to button click
            mRenderer.setStartPoint(getCurrentPose());
            textToSpeech.speak("Start Point Set", TextToSpeech.QUEUE_FLUSH, null);
            Log.d("StartPoint", "Startpoint Set at: " + getCurrentPose());
        }
    });

    Button endPointButton = (Button) findViewById(R.id.setEndPoint);
    endPointButton.setOnClickListener(new View.OnClickListener() {
        public void onClick(View v) {
            // Do something in response to button click
            mRenderer.setEndPoint(getCurrentPose());
            Log.d("EndPoint", "Endpoint Set at: " + getCurrentPose());
        }
    });

    ToggleButton toggle = (ToggleButton) findViewById(R.id.togglePointCloud);
    toggle.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
        public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
            if (isChecked) {
                mRenderer.setThirdPersonView();
                mRenderer.drawLineBtwnBuckets();
            } else {
                mRenderer.setTopDownView();
                mRenderer.removeLineBtwnBuckets();
            }
        }
    });

    ToggleButton toggleMotors = (ToggleButton) findViewById(R.id.toggleMotors);
    toggleMotors.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
        public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
            if (isChecked) {
                toast("Speed" + ": " + get_speed() + "Steer" + ": " + get_steering());
                set_speed(1500 + 800);
                set_steering(1500);
            } else {
                toast("Speed" + ": " + get_speed() + "Steer" + ": " + get_steering());
                set_speed(1500);
                set_steering(1500);
            }
        }
    });

    mMotorbar = (SeekBar) findViewById(R.id.motorBar); // make seekbar object
    mMotorbar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {

        @Override
        public void onStopTrackingTouch(SeekBar seekBar) {
            // TODO Auto-generated method stub
            toast("MotorVal: " + motorSliderVal);
            set_speed(1500 + motorSliderVal);
            set_steering(1500);

        }

        @Override
        public void onStartTrackingTouch(SeekBar seekBar) {
            // TODO Auto-generated method stub

        }

        @Override
        public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
            // TODO Auto-generated method stub
            motorSliderVal = progress;

        }
    });

    DisplayManager displayManager = (DisplayManager) getSystemService(DISPLAY_SERVICE);
    if (displayManager != null) {
        displayManager.registerDisplayListener(new DisplayManager.DisplayListener() {
            @Override
            public void onDisplayAdded(int displayId) {
            }

            @Override
            public void onDisplayChanged(int displayId) {
                synchronized (this) {
                    setDisplayRotation();
                    mMapView.setFloorPlanData(mRenderer.getFloorPlanData());
                    setAndroidOrientation();
                }
            }

            @Override
            public void onDisplayRemoved(int displayId) {
            }
        }, null);
        if (hasPermission()) {
            if (null == savedInstanceState) {

                //Instansiates the TensorflowView
                //setFragment();
            }
        } else {
            requestPermission();
        }

    }
}