Example usage for org.opencv.core Scalar Scalar

List of usage examples for org.opencv.core Scalar Scalar

Introduction

In this page you can find the example usage for org.opencv.core Scalar Scalar.

Prototype

public Scalar(double[] vals) 

Source Link

Usage

From source file:org.lasarobotics.vision.util.color.ColorGRAY.java

License:Open Source License

/**
 * Instantiate a Grayscale (8-bit) color from an integer
 *
 * @param v Value (0-255)//from   w  w w  .  j  a  va  2s  . co  m
 */
public ColorGRAY(int v) {
    super(new Scalar(v));
}

From source file:org.lasarobotics.vision.util.color.ColorGRAY.java

License:Open Source License

/**
 * Parse a scalar value into the colorspace
 *
 * @param s Scalar value//from  w  w w  . j  a va 2s.c  o  m
 * @return Colorspace scalar value
 */
@Override
protected Scalar parseScalar(Scalar s) {
    if (s.val.length < 1)
        throw new IllegalArgumentException("Scalar must have 1 dimension.");
    return new Scalar(s.val[0]);
}

From source file:org.openpnp.machine.reference.vision.OpenCvVisionProvider.java

License:Open Source License

/**
 * Attempt to find matches of the given template within the current camera
 * frame. Matches are returned as TemplateMatch objects which contain
 * a Location in Camera coordinates. The results are sorted best score
 * to worst score./*  w  w  w. ja  va2 s .  co m*/
 * @param template
 * @return
 */
public List<TemplateMatch> getTemplateMatches(BufferedImage template) {
    // TODO: ROI
    BufferedImage image = camera.capture();

    // Convert the camera image and template image to the same type. This
    // is required by the cvMatchTemplate call.
    template = OpenCvUtils.convertBufferedImage(template, BufferedImage.TYPE_BYTE_GRAY);
    image = OpenCvUtils.convertBufferedImage(image, BufferedImage.TYPE_BYTE_GRAY);

    Mat templateMat = OpenCvUtils.toMat(template);
    Mat imageMat = OpenCvUtils.toMat(image);
    Mat resultMat = new Mat();

    Imgproc.matchTemplate(imageMat, templateMat, resultMat, Imgproc.TM_CCOEFF_NORMED);

    Mat debugMat = null;
    if (logger.isDebugEnabled()) {
        debugMat = imageMat.clone();
    }

    MinMaxLocResult mmr = Core.minMaxLoc(resultMat);
    double maxVal = mmr.maxVal;

    // TODO: Externalize?
    double threshold = 0.7f;
    double corr = 0.85f;

    double rangeMin = Math.max(threshold, corr * maxVal);
    double rangeMax = maxVal;

    List<TemplateMatch> matches = new ArrayList<TemplateMatch>();
    for (Point point : matMaxima(resultMat, rangeMin, rangeMax)) {
        TemplateMatch match = new TemplateMatch();
        int x = point.x;
        int y = point.y;
        match.score = resultMat.get(y, x)[0] / maxVal;

        if (logger.isDebugEnabled()) {
            Core.rectangle(debugMat, new org.opencv.core.Point(x, y),
                    new org.opencv.core.Point(x + templateMat.cols(), y + templateMat.rows()), new Scalar(255));
            Core.putText(debugMat, "" + match.score,
                    new org.opencv.core.Point(x + templateMat.cols(), y + templateMat.rows()),
                    Core.FONT_HERSHEY_PLAIN, 1.0, new Scalar(255));
        }

        Location offsets = getPixelCenterOffsets(x + (templateMat.cols() / 2), y + (templateMat.rows() / 2));
        match.location = camera.getLocation().subtract(offsets);
        matches.add(match);
    }

    Collections.sort(matches, new Comparator<TemplateMatch>() {
        @Override
        public int compare(TemplateMatch o1, TemplateMatch o2) {
            return ((Double) o2.score).compareTo(o1.score);
        }
    });

    saveDebugImage("template", templateMat);
    saveDebugImage("camera", imageMat);
    saveDebugImage("result", resultMat);
    saveDebugImage("debug", debugMat);

    return matches;
}

From source file:org.surmon.pattern.editor2d.components.Mapping.java

public static List<MatOfPoint> process(Mat source, List<Particle> particles) {

    Mat partImage = new Mat(source.size(), CvType.CV_8UC1);

    // Draw particles as images
    Point p;//w  w w.  ja v a2  s . com
    for (Particle part : particles) {
        p = new Point(part.getPosition().toArray());
        Core.circle(partImage, p, 1, new Scalar(255));
    }

    // Blur with Gaussian kernel
    Mat blured = new Mat();
    Imgproc.GaussianBlur(partImage, blured, new Size(101, 101), -1, -1);

    // Equalize histogram
    List<Mat> eqChannels = new ArrayList<>();
    List<Mat> channels = new ArrayList<>();
    Core.split(blured, channels);

    for (Mat channel : channels) {
        Mat eqImage = new Mat();
        Imgproc.equalizeHist(channel, eqImage);
        eqChannels.add(eqImage);
    }
    Mat eqResult = new Mat();
    Core.merge(eqChannels, eqResult);

    // Binary threshold
    Mat bin = new Mat();
    Imgproc.threshold(eqResult, bin, 0, 255, Imgproc.THRESH_OTSU);
    //        Imgproc.threshold(eqResult, bin, 10, 255, Imgproc.THRESH_BINARY);

    // Find contours
    Mat imMat = bin.clone();
    Mat canny_output = new Mat();
    Mat hierarchy = new Mat();
    int thresh = 100;
    //median filter:
    List<MatOfPoint> borders = new ArrayList<>();
    Imgproc.Canny(imMat, canny_output, thresh, thresh * 2);
    Imgproc.findContours(canny_output, borders, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); // Find contours

    return borders;

    //        Mat result = source.clone();
    //        Imgproc.drawContours(result, borders, -1, new Scalar(255, 0, 255));
    //        
    //        return result;
}

From source file:org.vinesrobotics.bot.utils.opencv.OpenCvManager.java

License:Open Source License

public Scalar converScalarHsv2Rgba(Scalar hsvColor) {
    Mat pointMatRgba = new Mat();
    Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
    Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);

    return new Scalar(pointMatRgba.get(0, 0));
}

From source file:qupath.opencv.classify.NeuralNetworksClassifier.java

License:Open Source License

@Override
protected void createAndTrainClassifier() {
    // Create the required Mats
    int nMeasurements = measurements.size();
    Mat matTraining = new Mat(arrayTraining.length / nMeasurements, nMeasurements, CvType.CV_32FC1);
    matTraining.put(0, 0, arrayTraining);

    // Parse parameters
    ParameterList params = getParameterList();
    int nHidden = Math.max(2, params.getIntParameterValue("nHidden"));
    int termIterations = params.getIntParameterValue("termCritMaxIterations");
    double termEPS = params.getDoubleParameterValue("termCritEPS");
    TermCriteria crit = createTerminationCriteria(termIterations, termEPS);

    // Create & train the classifier
    classifier = createClassifier();//from   www. j  a v a2 s. c o m
    ANN_MLP nnet = (ANN_MLP) classifier;
    System.out.println(nnet.getLayerSizes());
    Mat layers = new Mat(3, 1, CvType.CV_32F);
    int n = arrayTraining.length / nMeasurements;
    //      layers.put(0, 0, new float[]{nMeasurements, nHidden, pathClasses.size()});
    layers.put(0, 0, nMeasurements);
    layers.put(1, 0, nHidden); // Number of hidden layers
    layers.put(2, 0, pathClasses.size());
    if (crit != null)
        nnet.setTermCriteria(crit);
    else
        crit = nnet.getTermCriteria();
    nnet.setLayerSizes(layers);
    //         matResponses.convertTo(matResponses, CvType.CV_32F);
    Mat matResponses = new Mat(n, pathClasses.size(), CvType.CV_32F);
    matResponses.setTo(new Scalar(0));
    for (int i = 0; i < n; i++) {
        matResponses.put(i, arrayResponses[i], 1);
    }
    nnet.setActivationFunction(ANN_MLP.SIGMOID_SYM, 1, 1);
    nnet.train(matTraining, Ml.ROW_SAMPLE, matResponses);

    //      lastDescription = getName() + "\n\nMain parameters:\n  " + DefaultPluginWorkflowStep.getParameterListJSON(params, "\n  ") + "\n\nTermination criteria:\n  " + crit.toString();
}

From source file:qupath.opencv.processing.OpenCVTools.java

License:Open Source License

public static void labelImage(Mat matBinary, Mat matLabels, int contourType) {
    List<MatOfPoint> contours = new ArrayList<>();
    Mat hierarchy = new Mat();
    Imgproc.findContours(matBinary, contours, hierarchy, contourType, Imgproc.CHAIN_APPROX_SIMPLE);
    // It's convoluted, but drawing contours this way is *much* faster than passing the full list (which is copied by the OpenCV 2.4.9 Java code)
    List<MatOfPoint> temp = new ArrayList<>(1);
    int i = 2;//from  w w w . java  2 s.  com
    int ind = 0;
    for (MatOfPoint contour : contours) {
        temp.clear();
        temp.add(contour);
        Imgproc.drawContours(matLabels, temp, 0, new Scalar(i++), -1, 8, hierarchy.col(ind), 2,
                new Point(0, 0));
        //         Imgproc.drawContours(matLabels, temp, 0, new Scalar(i++), -1);
        ind++;
    }
}

From source file:qupath.opencv.processing.OpenCVTools.java

License:Open Source License

public static void watershedDistanceTransformSplit(Mat matBinary, int maxFilterRadius) {
    Mat matWatershedSeedsBinary;/*from ww w.  j  a  v  a2  s.com*/

    // Create a background mask
    Mat matBackground = new Mat();
    Core.compare(matBinary, new Scalar(255), matBackground, Core.CMP_NE);

    // Separate by shape using the watershed transform
    Mat matDistanceTransform = new Mat();
    Imgproc.distanceTransform(matBinary, matDistanceTransform, Imgproc.CV_DIST_L2,
            Imgproc.CV_DIST_MASK_PRECISE);
    // Find local maxima
    matWatershedSeedsBinary = new Mat();
    Imgproc.dilate(matDistanceTransform, matWatershedSeedsBinary,
            OpenCVTools.getCircularStructuringElement(maxFilterRadius));
    Core.compare(matDistanceTransform, matWatershedSeedsBinary, matWatershedSeedsBinary, Core.CMP_EQ);
    matWatershedSeedsBinary.setTo(new Scalar(0), matBackground);
    // Dilate slightly to merge nearby maxima
    Imgproc.dilate(matWatershedSeedsBinary, matWatershedSeedsBinary,
            OpenCVTools.getCircularStructuringElement(2));

    // Create labels for watershed
    Mat matLabels = new Mat(matDistanceTransform.size(), CvType.CV_32F, new Scalar(0));
    labelImage(matWatershedSeedsBinary, matLabels, Imgproc.RETR_CCOMP);

    // Remove everything outside the thresholded region
    matLabels.setTo(new Scalar(0), matBackground);

    // Do watershed
    // 8-connectivity is essential for the watershed lines to be preserved - otherwise OpenCV's findContours could not be used
    ProcessingCV.doWatershed(matDistanceTransform, matLabels, 0.1, true);

    // Update the binary image to remove the watershed lines
    Core.multiply(matBinary, matLabels, matBinary, 1, matBinary.type());
}

From source file:qupath.opencv.processing.OpenCVTools.java

License:Open Source License

public static Mat getCircularStructuringElement(int radius) {
    Mat strel = new Mat(radius * 2 + 1, radius * 2 + 1, CvType.CV_8UC1, new Scalar(0));
    Imgproc.circle(strel, new Point(radius, radius), radius, new Scalar(1), -1);
    return strel;
}

From source file:qupath.opencv.processing.OpenCVTools.java

License:Open Source License

public static void invertBinary(Mat matBinary, Mat matDest) {
    Core.compare(matBinary, new Scalar(0), matDest, Core.CMP_EQ);
}