Example usage for org.opencv.core Scalar all

List of usage examples for org.opencv.core Scalar all

Introduction

In this page you can find the example usage for org.opencv.core Scalar all.

Prototype

public static Scalar all(double v) 

Source Link

Usage

From source file:com.example.colordetector.CamMainActivity.java

License:Apache License

public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    // The frame currently captured by the camera, converted in the color RGBA
    rgbaFrame = inputFrame.rgba();//from   ww w .j  a v a  2 s . c  o  m

    // Convert the frame in the HSV color space, to be able to identify the color with the thresholds
    Imgproc.cvtColor(rgbaFrame, rgbFrame, Imgproc.COLOR_RGBA2RGB); // Cant't convert directly rgba->hsv
    Imgproc.cvtColor(rgbFrame, hsvFrame, Imgproc.COLOR_RGB2HSV);

    // Create a mask with ONLY zones of the chosen color on the frame currently captured
    Core.inRange(hsvFrame, thresMin, thresMax, inRangeMask);
    filteredFrame.setTo(new Scalar(0, 0, 0));
    rgbFrame.copyTo(filteredFrame, inRangeMask);

    // if the method of shooting image is set to manual, exit and return the filtered image...
    if (!methodAuto) {
        return filteredFrame;
    }

    //...else it was setted the automatic method, so continue with the method
    // Check the H channel of the image to see if the searched color is present on the frame
    Core.extractChannel(filteredFrame, hChannel, 0);

    /* There are two method to verify the color presence; below a little explanation */

    /* checkRange: if almost one pixel of the searched color is found, continue with the countdown
     * Pro -> fast.
     * Versus -> less accurate, possible presence of false positive depending the quality of the camera
     * if(!Core.checkRange(hChannel, true, 0, 1)){ */

    /* Percentage: count the pixel of the searched color, and if there are almost the
     * 0.1% of total pixel of the frame with the searched color, continue with the countdown
     * Pro: more accurate, lower risk of false positive
     * Versus: slower than checkRange
     * N.B.: the threshold percentage is imposted with a low value, otherwise small object will not be seen */

    int perc = Core.countNonZero(hChannel); // Percentage
    if (perc > (frameDim * 0.001)) {
        // if the shooting method is setted to 'immediate', the photo is returned now;
        // otherwise continue with the countdown
        if (!countDown) {
            takePicture();
            return rgbaFrame;
        }

        // 'point' is where the countdown will be visualized; in that case at
        //  a quarter of height and width than left up angle
        Point point = new Point(rgbaFrame.cols() >> 2, rgbaFrame.rows() >> 2);

        // Update the osd countdown every 75*8 ms (if color searched is present)
        // Use the division in 75 ms cause a higher value would give the user the feeling of screen/app 'blocked'.
        if (timeToElapse % 8 == 0) {
            if (osdSecond.compareTo("") == 0)
                osdSecond = ((Integer) (timeToElapse >> 3)).toString();
            else
                osdSecond = osdSecond.concat(".." + (((Integer) (timeToElapse >> 3)).toString()));
            Core.putText(rgbaFrame, osdSecond, point, 1, 3, Scalar.all(255));
        }
        timeToElapse -= 1;

        // the user has framed an object for more than 3 seconds; shoot the photo
        if (timeToElapse <= 0) {
            timeToElapse = 24;
            takePicture();
        }
        // the user has framed an object for less than 3 seconds; wait
        else {
            try {
                synchronized (this) {
                    wait(75);
                }
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
    }
    // the user has NOT framed a color searched object; reset osd
    else {
        timeToElapse = 24;
        osdSecond = "";
    }
    return rgbaFrame;
}

From source file:com.seleniumtests.util.imaging.ImageDetector.java

License:Apache License

/**
 * Compute the rectangle where the searched picture is and the rotation angle between both images
 * Throw {@link ImageSearchException} if picture is not found
 * @return/*from w  w w. j a v  a 2s . c  o  m*/
 * @Deprecated Kept here for information, but open CV 3 does not include SURF anymore for java build
 */
public void detectCorrespondingZone() {
    Mat objectImageMat = Imgcodecs.imread(objectImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
    Mat sceneImageMat = Imgcodecs.imread(sceneImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
    FeatureDetector surf = FeatureDetector.create(FeatureDetector.SURF);

    MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint();
    MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint();

    surf.detect(objectImageMat, objectKeyPoints);
    surf.detect(sceneImageMat, sceneKeyPoints);

    DescriptorExtractor surfExtractor = DescriptorExtractor.create(DescriptorExtractor.SURF);
    Mat objectDescriptor = new Mat();
    Mat sceneDescriptor = new Mat();
    surfExtractor.compute(objectImageMat, objectKeyPoints, objectDescriptor);
    surfExtractor.compute(sceneImageMat, sceneKeyPoints, sceneDescriptor);

    try {
        Mat outImage = new Mat();
        Features2d.drawKeypoints(objectImageMat, objectKeyPoints, outImage);
        String tempFile = File.createTempFile("img", ".png").getAbsolutePath();
        writeComparisonPictureToFile(tempFile, outImage);
    } catch (IOException e) {

    }

    // http://stackoverflow.com/questions/29828849/flann-for-opencv-java
    DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
    MatOfDMatch matches = new MatOfDMatch();

    if (objectKeyPoints.toList().isEmpty()) {
        throw new ImageSearchException("No keypoints in object to search, check it's not uniformly coloured: "
                + objectImage.getAbsolutePath());
    }
    if (sceneKeyPoints.toList().isEmpty()) {
        throw new ImageSearchException(
                "No keypoints in scene, check it's not uniformly coloured: " + sceneImage.getAbsolutePath());
    }
    if (objectDescriptor.type() != CvType.CV_32F) {
        objectDescriptor.convertTo(objectDescriptor, CvType.CV_32F);
    }
    if (sceneDescriptor.type() != CvType.CV_32F) {
        sceneDescriptor.convertTo(sceneDescriptor, CvType.CV_32F);
    }

    matcher.match(objectDescriptor, sceneDescriptor, matches);

    double maxDist = 0;
    double minDist = 10000;

    for (int i = 0; i < objectDescriptor.rows(); i++) {
        double dist = matches.toList().get(i).distance;
        if (dist < minDist) {
            minDist = dist;
        }
        if (dist > maxDist) {
            maxDist = dist;
        }
    }

    logger.debug("-- Max dist : " + maxDist);
    logger.debug("-- Min dist : " + minDist);

    LinkedList<DMatch> goodMatches = new LinkedList<>();
    MatOfDMatch gm = new MatOfDMatch();

    for (int i = 0; i < objectDescriptor.rows(); i++) {
        if (matches.toList().get(i).distance < detectionThreshold) {
            goodMatches.addLast(matches.toList().get(i));
        }
    }
    gm.fromList(goodMatches);

    Features2d.drawMatches(objectImageMat, objectKeyPoints, sceneImageMat, sceneKeyPoints, gm, imgMatch,
            Scalar.all(-1), Scalar.all(-1), new MatOfByte(), Features2d.NOT_DRAW_SINGLE_POINTS);

    if (goodMatches.isEmpty()) {
        throw new ImageSearchException("Cannot find matching zone");
    }

    LinkedList<Point> objList = new LinkedList<>();
    LinkedList<Point> sceneList = new LinkedList<>();

    List<KeyPoint> objectKeyPointsList = objectKeyPoints.toList();
    List<KeyPoint> sceneKeyPointsList = sceneKeyPoints.toList();

    for (int i = 0; i < goodMatches.size(); i++) {
        objList.addLast(objectKeyPointsList.get(goodMatches.get(i).queryIdx).pt);
        sceneList.addLast(sceneKeyPointsList.get(goodMatches.get(i).trainIdx).pt);
    }

    MatOfPoint2f obj = new MatOfPoint2f();
    obj.fromList(objList);

    MatOfPoint2f scene = new MatOfPoint2f();
    scene.fromList(sceneList);

    // Calib3d.RANSAC could be used instead of 0
    Mat hg = Calib3d.findHomography(obj, scene, 0, 5);

    Mat objectCorners = new Mat(4, 1, CvType.CV_32FC2);
    Mat sceneCorners = new Mat(4, 1, CvType.CV_32FC2);

    objectCorners.put(0, 0, new double[] { 0, 0 });
    objectCorners.put(1, 0, new double[] { objectImageMat.cols(), 0 });
    objectCorners.put(2, 0, new double[] { objectImageMat.cols(), objectImageMat.rows() });
    objectCorners.put(3, 0, new double[] { 0, objectImageMat.rows() });

    Core.perspectiveTransform(objectCorners, sceneCorners, hg);

    // points of object
    Point po1 = new Point(objectCorners.get(0, 0));
    Point po2 = new Point(objectCorners.get(1, 0));
    Point po3 = new Point(objectCorners.get(2, 0));
    Point po4 = new Point(objectCorners.get(3, 0));

    // point of object in scene
    Point p1 = new Point(sceneCorners.get(0, 0)); // top left
    Point p2 = new Point(sceneCorners.get(1, 0)); // top right
    Point p3 = new Point(sceneCorners.get(2, 0)); // bottom right
    Point p4 = new Point(sceneCorners.get(3, 0)); // bottom left

    logger.debug(po1);
    logger.debug(po2);
    logger.debug(po3);
    logger.debug(po4);
    logger.debug(p1); // top left
    logger.debug(p2); // top right
    logger.debug(p3); // bottom right
    logger.debug(p4); // bottom left

    if (debug) {
        try {
            // translate corners
            p1.set(new double[] { p1.x + objectImageMat.cols(), p1.y });
            p2.set(new double[] { p2.x + objectImageMat.cols(), p2.y });
            p3.set(new double[] { p3.x + objectImageMat.cols(), p3.y });
            p4.set(new double[] { p4.x + objectImageMat.cols(), p4.y });

            Imgproc.line(imgMatch, p1, p2, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p2, p3, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p3, p4, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p4, p1, new Scalar(0, 255, 0), 1);

            showResultingPicture(imgMatch);
        } catch (IOException e) {
        }
    }

    // check rotation angles
    checkRotationAngle(p1, p2, p3, p4, po1, po2, po3, po4);

    // rework on scene points as new, we are sure the object rotation is 0, 90, 180 or 270
    reworkOnScenePoints(p1, p2, p3, p4);

    // check that aspect ratio of the detected height and width are the same
    checkDetectionZoneAspectRatio(p1, p2, p4, po1, po2, po4);

    recordDetectedRectangle(p1, p2, p3, p4);
}

From source file:digimesh.xbee.gui.SensorMap.java

private void drawSensor(SmartSensor sensor) {
    Point sensorLocation = new Point(sensor.getPositionXY().positionX - 50,
            sensor.getPositionXY().positionY + 30);

    int measNr = sensor.getMeasurmentToDraw();
    String id = sensor.getId();/*from  w  w  w  .  ja v  a 2 s  .c  om*/
    String sTextName = "Sensor ID : " + id.substring(id.length() - 4, id.length());
    int fontFace = Core.FONT_HERSHEY_PLAIN;
    double fontScale = 0.7;
    Imgproc.putText(map, sTextName, sensorLocation, fontFace, fontScale, Scalar.all(255));
    Scalar circleColor = new Scalar(255, 255, 255);
    if (sensor.hasMeasurements) {
        double value = sensor.m_measurments.get(measNr).value;
        String measName = sensor.m_measurments.get(measNr).name;
        String unit = sensor.m_measurments.get(measNr).unit;

        String sTextMeas = measName + " = " + value + "[" + unit + "]";
        Imgproc.putText(map, sTextMeas,
                new Point(sensor.getPositionXY().positionX - 50, (sensor.getPositionXY().positionY + 50)),
                fontFace, fontScale, Scalar.all(255));
        double upperLimit = sensor.m_measurments.get(measNr).upperLimit;
        double lowerLimit = sensor.m_measurments.get(measNr).lowerLimit;
        //check value

        if (value > upperLimit) {
            circleColor = new Scalar(0, 0, 255);
        } else if (value < lowerLimit) {
            circleColor = new Scalar(255, 0, 0);
        } else {
            circleColor = new Scalar(0, 255, 0);
        }
    } else {
        Imgproc.putText(map, HUB_LABEL,
                new Point(sensor.getPositionXY().positionX - 50, (sensor.getPositionXY().positionY + 12)),
                fontFace, fontScale, Scalar.all(255));
    }

    Imgproc.circle(map, sensorLocation, 10, circleColor, 2, fontFace, 0);

}

From source file:fuzzycv.MainFrame.java

private Mat docanny(Mat frame) {
    int treshValue = thresholdSlider.getValue();
    Mat cvtImg = new Mat();
    Mat detectedEdges = new Mat();

    Imgproc.cvtColor(frame, cvtImg, Imgproc.COLOR_BGR2GRAY);
    Imgproc.blur(cvtImg, detectedEdges, new Size(3.0, 3.0));

    Imgproc.Canny(detectedEdges, detectedEdges, treshValue, treshValue * 3, 3, false);

    Mat mask = new Mat();
    Core.add(mask, Scalar.all(0), mask);
    frame.copyTo(mask, detectedEdges);/*from   ww  w . j a va  2  s  . co m*/

    return mask;
}

From source file:imageprocess.HistogramProcessor.java

public static Mat getHistogramImage(Mat image) {

    // Compute histogram first
    Mat hist = getGrayHistogram(image);/*from   w  ww .jav  a  2 s.  c  om*/
    // Get min and max bin values

    MinMaxLocResult locPeak = Core.minMaxLoc(hist);
    double maxVal = locPeak.maxVal;
    double minVal = locPeak.minVal;

    // Image on which to display histogram
    Mat histImg = new Mat(image.rows(), image.rows(), CV_8U, new Scalar(255));

    // set highest point at 90% of nbins
    int hpt = (int) (0.9 * 256);

    // Draw vertical line for each bin 
    for (int h = 0; h < 256; h++) {

        double[] f = hist.get(h, 0);
        float binVal = (float) f[0];
        int intensity = (int) (binVal * hpt / maxVal);
        Core.line(histImg, new Point(h, 256.0d), new Point(h, 256.0d - intensity), Scalar.all(0));
    }
    return histImg;
}

From source file:src.model.filters.CannyFilter.java

public void doGet(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    System.out.println("**______________CANNY_______________**");

    try {//www .  j av a  2 s.  c o m

        String imgInput = request.getParameter("name").toString();
        String savePath = savePath(request);
        //____________________________________

        Mat source = Imgcodecs.imread(savePath);
        Mat destination = new Mat(source.rows(), source.cols(), source.type());
        Mat det = new Mat(source.rows(), source.cols(), source.type());

        Imgproc.cvtColor(source, destination, Imgproc.COLOR_BGR2GRAY);
        Imgproc.blur(destination, det, new Size(3, 3));
        Imgproc.Canny(det, det, 5, 15, 3, false);
        Mat dest = new Mat();
        Core.add(dest, Scalar.all(0), dest);
        source.copyTo(dest, det);

        String output = savePath.substring(0, savePath.lastIndexOf(".")) + "_CA_temp.jpg";
        imgInput = request.getParameter("name").toString();
        String imgOutput = imgInput.substring(0, imgInput.lastIndexOf(".")) + "_CA_temp.jpg";
        Imgcodecs.imwrite(output, dest);

        //____________________________________
        System.out.println("output: " + output);
        System.out.println("imgOutput: " + imgOutput);

        publishImg(response, imgOutput);

    } catch (Exception e) {
        System.out.println("Error: " + e.getMessage());
    }
}