Example usage for org.opencv.core Scalar Scalar

List of usage examples for org.opencv.core Scalar Scalar

Introduction

In this page you can find the example usage for org.opencv.core Scalar Scalar.

Prototype

public Scalar(double v0, double v1, double v2) 

Source Link

Usage

From source file:edu.fiu.cate.breader.BaseSegmentation.java

/**
 * Capture button has been press. Obtain the high resolution image and the low resolution 
 * data. Once captured, the images are corrected. 
 *///from ww w  .jav  a 2 s  .  c o m
public void captureEvent() {
    long t0, t1;
    t0 = System.currentTimeMillis();
    t1 = t0;
    byte[][][] img = getHidefImage();
    System.out.println("HiRez Capture: " + (System.currentTimeMillis() - t0) / 1000.0);
    new IViewer("HiRez", ImageManipulation.getBufferedImage(img));

    t0 = System.currentTimeMillis();
    Rect bound = null;
    try {
        bound = highRes(BReaderTools.byteArrayToMat(ITools.toGrayscale(img)));
    } catch (java.lang.Exception e) {
    }
    System.out.println("First bounding box: " + (System.currentTimeMillis() - t0) / 1000.0);

    //      Mat imgMat = BReaderTools.byteArrayToMat(img);
    //      Imgproc.rectangle(imgMat, bound.tl(), bound.br(), new Scalar(255,255,0), 8);

    byte[][] low = ITools.normalize(normImgCropped);
    t0 = System.currentTimeMillis();
    Rect boundLow = null;
    try {
        boundLow = lowResDist(BReaderTools.byteArrayToMat(low));
    } catch (java.lang.Exception e) {
    }
    System.out.println("second bounding box: " + (System.currentTimeMillis() - t0) / 1000.0);

    if (bound == null || boundLow == null) {
        tts.doTTS("Document outside field of view. Please realign and press capture again.");
        return;
    }

    if ((bound.x + bound.width + 100) >= img[0][0].length || (bound.y + bound.height + 100) >= img[0].length) {
        tts.doTTS("Document outside field of view. Please realign and press capture again.");
        return;
    }

    //Show the cropped height map with the bounding box
    Mat color = new Mat();
    Imgproc.cvtColor(BReaderTools.byteArrayToMat(low), color, Imgproc.COLOR_GRAY2BGR);
    Imgproc.rectangle(color, boundLow.tl(), boundLow.br(), new Scalar(255, 255, 0), 1);
    new IViewer("LowRes Bounding Box", BReaderTools.bufferedImageFromMat(color));

    Imgproc.cvtColor(BReaderTools.byteArrayToMat(ITools.toGrayscale(img)), color, Imgproc.COLOR_GRAY2BGR);
    Imgproc.rectangle(color, bound.tl(), bound.br(), new Scalar(255, 255, 0), 8);
    new IViewer("HighRes Bounding Box", BReaderTools.bufferedImageFromMat(color));

    //      System.out.println(bound.height+", "+bound.width+": "+(double)bound.width/(double)bound.height);
    //      System.out.println(boundLow.height+", "+boundLow.width+": "+(double)boundLow.width/(double)boundLow.height);

    double rW = (double) bound.width / (double) boundLow.width;
    double rH = (double) bound.height / (double) boundLow.height;
    int h = 0, w = 0, yO = 0, xO = 0;
    double s = 0;

    if (rH < rW) {
        s = rH;
        h = boundLow.height;
        w = (int) (bound.width / rH);
        if ((w - boundLow.width) % 2 == 0) {
            xO = (boundLow.width - w) / 2;
        }
    } else {
        s = rW;
        h = (int) (bound.height / rW);
        w = boundLow.width;
        if ((h - boundLow.height) % 2 == 0) {
            yO = (boundLow.height - h) / 2;
        }
    }

    //show the high resolution image cropped
    byte[][][] hiRez = new byte[img.length][][];
    t0 = System.currentTimeMillis();
    for (int i = 0; i < img.length; i++) {
        hiRez[i] = ITools.crop(bound.x, bound.y, bound.x + bound.width, bound.y + bound.height, img[i]);
    }
    System.out.println("Cropping HiRez: " + (System.currentTimeMillis() - t0) / 1000.0);

    //Show the IR amplitude image cropped
    //      byte[][] amp = ITools.normalize(amplitudes);
    //      byte[][] ampRez = resize(amp, (float)s);
    //      int x0 = (int) ((boundLow.x+xO+40)*s), y0 = (int) ((boundLow.y+yO+25)*s);
    //      ampRez = ITools.crop(x0, y0, x0+bound.width, y0+bound.height, ampRez);
    //      new IViewer(ImageManipulation.getGrayBufferedImage(ampRez));

    //Show the Amplitude image in bounding box
    //      Rect nBound = new Rect(boundLow.x+xO+40, boundLow.y+yO+25, w, h);
    //      Mat gray = new Mat();
    //      Imgproc.cvtColor(BReaderTools.byteArrayToMat(ITools.normalize(amplitudes)), gray,Imgproc.COLOR_GRAY2BGR);
    //      Imgproc.rectangle(gray, nBound.tl(), nBound.br(), new Scalar(255,255,0), 1);
    //      new IViewer(BReaderTools.bufferedImageFromMat(gray));

    //Crop the distance image and prepare for correction
    float[][] distRez;
    Mat destRezM = new Mat();
    switch (disp.getInterpolationMethod()) {
    case 1:
        Imgproc.resize(BReaderTools.floatArrayToMat(normImg), destRezM, new Size(0, 0), s, s,
                Imgproc.INTER_LINEAR);//resize image
        break;
    case 2:
        Imgproc.resize(BReaderTools.floatArrayToMat(normImg), destRezM, new Size(0, 0), s, s,
                Imgproc.INTER_CUBIC);//resize image
        break;
    case 3:
        Imgproc.resize(BReaderTools.floatArrayToMat(normImg), destRezM, new Size(0, 0), s, s,
                Imgproc.INTER_LANCZOS4);//resize image
        break;
    }
    distRez = BReaderTools.matToFloatArray(destRezM);
    int xCentOff = (img[0][0].length - bound.width) / 2 - bound.x;
    int yCentOff = (img[0].length - bound.height) / 2 - bound.y;
    int x0 = (int) ((boundLow.x + xO + 40) * s), y0 = (int) ((boundLow.y + yO + 25) * s);
    distRez = ITools.crop(x0, y0, x0 + bound.width, y0 + bound.height, distRez);
    distRez = multiply(distRez, -100);

    byte[][][] foldCorrected = new byte[hiRez.length][][];
    t0 = System.currentTimeMillis();
    for (int i = 0; i < hiRez.length; i++) {
        foldCorrected[i] = BReaderTools.foldCorrection(hiRez[i], distRez, xCentOff, yCentOff);
    }
    System.out.println("Fold Correction: " + (System.currentTimeMillis() - t0) / 1000.0);

    float[][] distRezPushed = BReaderTools.foldCorrection(distRez,
            (distRez[0].length - boundLow.width) / 2 - boundLow.x,
            (distRez.length - boundLow.height) / 2 - boundLow.y);

    byte[][][] extensionCorrected = new byte[hiRez.length][][];
    t0 = System.currentTimeMillis();
    for (int i = 0; i < hiRez.length; i++) {
        extensionCorrected[i] = LuWang.extentionWithLinearInterpolation(foldCorrected[i], distRez);
    }
    System.out.println("Extension Correction: " + (System.currentTimeMillis() - t0) / 1000.0);

    new IViewer("Heigths", ImageManipulation.getGrayBufferedImage(ITools.normalize(distRez)));
    new IViewer("HiRez", ImageManipulation.getBufferedImage(hiRez));
    //      new IViewer("Corrected",ImageManipulation.getBufferedImage(foldCorrected));
    //      new IViewer("Heigths",ImageManipulation.getGrayBufferedImage(ITools.normalize(distRezPushed)));
    //      new IViewer("Flat",ImageManipulation.getBufferedImage(foldCorrected));
    //      new IViewer("Extension",ImageManipulation.getBufferedImage(extensionCorrected));
    System.out.println("Overall time: " + (System.currentTimeMillis() - t1) / 1000.0);

    SimpleDateFormat format = new SimpleDateFormat("YYYY-MM-dd-hh-mm-ss");
    String time = format.format(new Date(System.currentTimeMillis()));

    // Save Corrected High Rez.
    String imgPath = saveDir + "/correctedImage-" + time + ".tiff";
    switch (disp.getCorrectionMethod()) {
    case 1: {
        ImageManipulation.writeImage(hiRez, imgPath);
        new IViewer("Correction Results: None", ImageManipulation.getBufferedImage(hiRez));
    }
        break;
    case 2: {
        ImageManipulation.writeImage(foldCorrected, imgPath);
        new IViewer("Correction Results: Flattening", ImageManipulation.getBufferedImage(foldCorrected));
    }
        break;
    case 3: {
        ImageManipulation.writeImage(extensionCorrected, imgPath);
        new IViewer("Correction Results: Flattening + Extension",
                ImageManipulation.getBufferedImage(extensionCorrected));
    }
        break;
    }

    try {
        String text = abbyy.processImage(imgPath, saveDir + "/text-" + time + ".txt");
        System.out.println("Done!!!!");
        tts.doTTS(text);
    } catch (java.lang.NullPointerException e) {
        tts.doTTS("ABBYY License expired.");
    }
    saveData(time, img, hiRez, distRez, boundLow, bound);

}

From source file:edu.soict.hust.k57.mmdb.components.HistogramImageBulder.java

private ImageIcon createImageIcon(Mat hist, int bin, Channel c) {
    int hist_w = 150; // width of the histogram image
    int hist_h = 100; // height of the histogram image
    int bin_w = (int) Math.round(hist_w * 1.0 / bin);

    Mat histImage = new Mat(hist_h, hist_w, CvType.CV_8UC3, new Scalar(80, 60, 60));
    Mat normalizeHist = hist.clone();/*from   ww w.j  av  a 2s . c o  m*/
    Core.normalize(normalizeHist, normalizeHist, 0, histImage.rows(), Core.NORM_MINMAX, -1, new Mat());

    Scalar scalar = null;
    switch (c) {
    case B:
        scalar = new Scalar(255, 0, 0);
        break;
    case G:
        scalar = new Scalar(0, 255, 0);
        break;
    case R:
        scalar = new Scalar(0, 0, 255);
    }

    for (int i = 1; i < bin; i++) {
        Imgproc.line(histImage, new Point(bin_w * (i - 1), hist_h - Math.round(normalizeHist.get(i - 1, 0)[0])),
                new Point(bin_w * (i), hist_h - Math.round(normalizeHist.get(i - 1, 0)[0])), scalar, 1, 8, 0);
        Imgproc.line(histImage, new Point(bin_w * (i), hist_h - Math.round(normalizeHist.get(i - 1, 0)[0])),
                new Point(bin_w * (i), hist_h - Math.round(normalizeHist.get(i, 0)[0])), scalar, 1, 8, 0);
    }
    MatOfByte buffer = new MatOfByte();
    Imgcodecs.imencode(".png", histImage, buffer);
    return new ImageIcon(buffer.toArray());
}

From source file:edu.sust.cse.util.Histogram.java

public static Mat getHistogram(Mat image) {

    try {//from  w  w w  .  ja  va 2s  . co m
        Mat src = new Mat(image.height(), image.width(), CvType.CV_8UC2);
        Imgproc.cvtColor(image, src, Imgproc.COLOR_RGB2GRAY);
        ArrayList<Mat> bgr_planes = new ArrayList<>();
        Core.split(src, bgr_planes);

        MatOfInt histSize = new MatOfInt(256);

        final MatOfFloat histRange = new MatOfFloat(0f, 256f);

        boolean accumulate = false;

        Mat b_hist = new Mat();

        Imgproc.calcHist(bgr_planes, new MatOfInt(0), new Mat(), b_hist, histSize, histRange, accumulate);

        int hist_w = 512;
        int hist_h = 600;
        long bin_w;
        bin_w = Math.round((double) (hist_w / 256));

        Mat histImage = new Mat(hist_h, hist_w, CvType.CV_8UC1);

        Core.normalize(b_hist, b_hist, 3, histImage.rows(), Core.NORM_MINMAX);

        for (int i = 1; i < 256; i++) {

            Core.line(histImage, new Point(bin_w * (i - 1), hist_h - Math.round(b_hist.get(i - 1, 0)[0])),
                    new Point(bin_w * (i), hist_h - Math.round(Math.round(b_hist.get(i, 0)[0]))),
                    new Scalar(255, 0, 0), 2, 8, 0);

        }

        return histImage;
    } catch (Exception ex) {
        System.out.println("[HISTOGRAM][ERROR][" + ex.getMessage() + "]");
        return null;
    }
}

From source file:edu.ucue.tfc.Modelo.VideoProcessor.java

/**
* Processes {@code firstFrame} and {@code secondFrame}.
* @param firstFrame    the first frame of a cycle.
*///  www  . jav  a2s.c o  m
private void processFrame(Mat firstFrame) {
    double contourArea = 0;
    int position = 0;
    try {
        /**
         * Redimensiona el el cuadro actual
         *
         */
        Imgproc.resize(firstFrame, firstFrame, frameSize);

        /**
         * Convierte el cuadro por segundo a escala de grises
         */
        Imgproc.cvtColor(firstFrame, firstGrayImage, Imgproc.COLOR_BGR2GRAY);

        /**
         * Lee el siguiente cuadro, lo redimensiona y convierte a escala de grises
         */
        video.read(secondFrame);

        Imgproc.resize(secondFrame, secondFrame, frameSize);

        Imgproc.cvtColor(secondFrame, secondGrayImage, Imgproc.COLOR_BGR2GRAY);

        /**
         * Obtiene la diferencia absoluta por pixel de los cuadros anteriores.
         */
        Core.absdiff(firstGrayImage, secondGrayImage, differenceOfImages);
        Imgproc.threshold(differenceOfImages, thresholdImage, 25, 255, Imgproc.THRESH_BINARY);
        Imgproc.blur(thresholdImage, thresholdImage, new Size(12, 12));
        Imgproc.threshold(thresholdImage, thresholdImage, 20, 255, Imgproc.THRESH_BINARY);
        /////
        for (int i = 0; i < contours.size(); ++i) {
            contours.get(i).release();
        }
        contours.clear();

        /**
         * La linea Horizontal
         */
        Imgproc.line(firstFrame, controlPoints.get(6), controlPoints.get(7), new Scalar(255, 0, 0),
                Imgproc.LINE_4);
        Imgproc.findContours(thresholdImage, contours, hierarchy, Imgproc.RETR_TREE,
                Imgproc.CHAIN_APPROX_SIMPLE);

        for (int i = 0; i < hullPoints.size(); ++i) {
            hullPoints.get(i).release();
        }
        hullPoints.clear();

        for (int i = 0; i < contours.size(); i++) {
            MatOfInt tmp = new MatOfInt();
            Imgproc.convexHull(contours.get(i), tmp, false);
            hullPoints.add(tmp);
        }

        /**
         * Busca el contorno con el rea ms grande
         */
        if (contours.size() > 0) {
            for (int i = 0; i < contours.size(); i++) {
                if (Imgproc.contourArea(contours.get(i)) > contourArea) {
                    contourArea = Imgproc.contourArea(contours.get(i));
                    position = i;
                    boundingRectangle = Imgproc.boundingRect(contours.get(i));
                }

            }
        }
        secondFrame.release();
        hierarchy.release();
        secondGrayImage.release();
        firstGrayImage.release();
        thresholdImage.release();
        differenceOfImages.release();
    } catch (Exception e) {
        System.out.println(e.getMessage());
    }

    if (controlPoints.get(6).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(0), controlPoints.get(1), new Scalar(0, 0, 255), 2);
        wasAtLeftPoint = true;
    } else if (!controlPoints.get(6).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(0), controlPoints.get(1), new Scalar(0, 255, 0), 2);
    }

    if (controlPoints.get(8).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(2), controlPoints.get(3), new Scalar(0, 0, 255), 2);
        wasAtCenterPoint = true;
    } else if (!controlPoints.get(8).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(2), controlPoints.get(3), new Scalar(0, 255, 0), 2);
    }

    if (controlPoints.get(7).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(4), controlPoints.get(5), new Scalar(0, 0, 255), 2);
        wasAtRightPoint = true;
    } else if (!controlPoints.get(7).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(4), controlPoints.get(5), new Scalar(0, 255, 0), 2);
    }

    if (wasAtCenterPoint && wasAtLeftPoint && wasAtRightPoint) {
        detectedCarsCount++;
        wasDetected = true;
        wasAtCenterPoint = false;
        wasAtLeftPoint = false;
        wasAtRightPoint = false;
    }

    if (contourArea > 3000) {
        Imgproc.drawContours(frame, contours, position, new Scalar(255, 255, 255));
    }
}

From source file:edu.wpi.first.wpilibj.examples.axiscamera.Robot.java

License:Open Source License

@Override
public void robotInit() {
    m_visionThread = new Thread(() -> {
        // Get the Axis camera from CameraServer
        AxisCamera camera = CameraServer.getInstance().addAxisCamera("axis-camera.local");
        // Set the resolution
        camera.setResolution(640, 480);/*from ww w  .  j a v a2s  .  c  o m*/

        // Get a CvSink. This will capture Mats from the camera
        CvSink cvSink = CameraServer.getInstance().getVideo();
        // Setup a CvSource. This will send images back to the Dashboard
        CvSource outputStream = CameraServer.getInstance().putVideo("Rectangle", 640, 480);

        // Mats are very memory expensive. Lets reuse this Mat.
        Mat mat = new Mat();

        // This cannot be 'true'. The program will never exit if it is. This
        // lets the robot stop this thread when restarting robot code or
        // deploying.
        while (!Thread.interrupted()) {
            // Tell the CvSink to grab a frame from the camera and put it
            // in the source mat.  If there is an error notify the output.
            if (cvSink.grabFrame(mat) == 0) {
                // Send the output the error.
                outputStream.notifyError(cvSink.getError());
                // skip the rest of the current iteration
                continue;
            }
            // Put a rectangle on the image
            Imgproc.rectangle(mat, new Point(100, 100), new Point(400, 400), new Scalar(255, 255, 255), 5);
            // Give the output stream a new image to display
            outputStream.putFrame(mat);
        }
    });
    m_visionThread.setDaemon(true);
    m_visionThread.start();
}

From source file:edu.wpi.first.wpilibj.examples.intermediatevision.Robot.java

License:Open Source License

@Override
public void robotInit() {
    m_visionThread = new Thread(() -> {
        // Get the UsbCamera from CameraServer
        UsbCamera camera = CameraServer.getInstance().startAutomaticCapture();
        // Set the resolution
        camera.setResolution(640, 480);/*from www.  j  a v a  2 s  . co m*/

        // Get a CvSink. This will capture Mats from the camera
        CvSink cvSink = CameraServer.getInstance().getVideo();
        // Setup a CvSource. This will send images back to the Dashboard
        CvSource outputStream = CameraServer.getInstance().putVideo("Rectangle", 640, 480);

        // Mats are very memory expensive. Lets reuse this Mat.
        Mat mat = new Mat();

        // This cannot be 'true'. The program will never exit if it is. This
        // lets the robot stop this thread when restarting robot code or
        // deploying.
        while (!Thread.interrupted()) {
            // Tell the CvSink to grab a frame from the camera and put it
            // in the source mat.  If there is an error notify the output.
            if (cvSink.grabFrame(mat) == 0) {
                // Send the output the error.
                outputStream.notifyError(cvSink.getError());
                // skip the rest of the current iteration
                continue;
            }
            // Put a rectangle on the image
            Imgproc.rectangle(mat, new Point(100, 100), new Point(400, 400), new Scalar(255, 255, 255), 5);
            // Give the output stream a new image to display
            outputStream.putFrame(mat);
        }
    });
    m_visionThread.setDaemon(true);
    m_visionThread.start();
}

From source file:emotion.Eyebrow.java

public static void Harris(Mat img, boolean rightEyeFlag) {
    //Harris point extraction
    Mat harrisTestimg;/*from   ww w .ja v a 2 s .  co m*/
    harrisTestimg = img.clone();
    cvtColor(harrisTestimg, harrisTestimg, Imgproc.COLOR_BGR2GRAY);
    threshold(harrisTestimg, harrisTestimg, 200, 255, Imgproc.THRESH_BINARY_INV);
    Mat struct = Imgproc.getStructuringElement(Imgproc.MORPH_CROSS, new Size(3, 3));
    erode(harrisTestimg, harrisTestimg, struct);
    dilate(harrisTestimg, harrisTestimg, struct);
    imwrite("intermediateHaaris.jpg", harrisTestimg);
    harrisTestimg.convertTo(harrisTestimg, CV_8UC1);
    ArrayList<MatOfPoint> contours = new ArrayList<>();
    Mat hierarchy = new Mat();

    Imgproc.findContours(harrisTestimg, contours, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_NONE);

    //System.out.println("Average Y for contours:");
    float[] averageY = new float[contours.size()];
    for (int i = 0; i < contours.size(); ++i) {
        //We calculate mean of Y coordinates for each contour
        for (int j = 0; j < contours.get(i).total(); ++j) {
            int val = (int) contours.get(i).toArray()[j].y;
            averageY[i] += val;
        }
        averageY[i] /= contours.get(i).total();
        //System.out.println(i+") "+averageY[i]);

        if (averageY[i] <= img.height() / 2 && //We consider just up half of an image
                contours.get(i).total() >= img.width()) //and longer than threshold
            Imgproc.drawContours(harrisTestimg, contours, i, new Scalar(255, 255, 255));
        else
            Imgproc.drawContours(harrisTestimg, contours, i, new Scalar(0, 0, 0));
    }

    MatOfPoint features = new MatOfPoint();
    Imgproc.goodFeaturesToTrack(harrisTestimg, features, 100, 0.00001, 0);

    //We draw just 2 extreme points- first and last
    Point eyebrowsPoints[] = new Point[2];
    for (int i = 0; i < features.toList().size(); i++) {
        if (i == 0) {
            eyebrowsPoints[0] = new Point(harrisTestimg.width() / 2, 0);
            eyebrowsPoints[1] = new Point(harrisTestimg.width() / 2, 0);
        }
        if (features.toArray()[i].x < eyebrowsPoints[0].x
                && features.toArray()[i].y < harrisTestimg.height() / 2) {
            eyebrowsPoints[0] = features.toArray()[i];
        }
        if (features.toArray()[i].x > eyebrowsPoints[1].x
                && features.toArray()[i].y < harrisTestimg.height() / 2) {
            eyebrowsPoints[1] = features.toArray()[i];
        }
    }
    StaticFunctions.drawCross(img, eyebrowsPoints[1], StaticFunctions.Features.EYEBROWS_ENDS);
    StaticFunctions.drawCross(img, eyebrowsPoints[0], StaticFunctions.Features.EYEBROWS_ENDS);
    imwrite("testHaris.jpg", img);
    if (rightEyeFlag) {
        EyeRegion.rightInnerEyebrowsCorner = eyebrowsPoints[0];
        EyeRegion.rightInnerEyebrowsCorner.x += Eye.rightRect.x;
        EyeRegion.rightInnerEyebrowsCorner.y += Eye.rightRect.y;

        EyeRegion.rightOuterEyebrowsCorner = eyebrowsPoints[1];
        EyeRegion.rightOuterEyebrowsCorner.x += Eye.rightRect.x;
        EyeRegion.rightOuterEyebrowsCorner.y += Eye.rightRect.y;
    } else {
        EyeRegion.leftInnerEyebrowsCorner = eyebrowsPoints[1];
        EyeRegion.leftInnerEyebrowsCorner.x += Eye.leftRect.x;
        EyeRegion.leftInnerEyebrowsCorner.y += Eye.leftRect.y;

        EyeRegion.leftOuterEyebrowsCorner = eyebrowsPoints[0];
        EyeRegion.leftOuterEyebrowsCorner.x += Eye.leftRect.x;
        EyeRegion.leftOuterEyebrowsCorner.y += Eye.leftRect.y;
    }
}

From source file:emotion.EyeRegion.java

public static void areEyebrowsWrinkles() {
    //setting parameters
    int height = (int) (abs(rightInnerEyebrowsCorner.y - rightInnerEyeCorner.y) * 1.2);
    int width = (int) (rightInnerEyeCorner.x - leftInnerEyeCorner.x);
    int y = (int) (rightInnerEyebrowsCorner.y - height / 2);
    int x = (int) leftInnerEyebrowsCorner.x;

    Rect wrinklesRect = new Rect(x, y, width, height);
    Mat wrinklesArea = new Mat(_face, wrinklesRect).clone();

    wrinklesThreshold = (int) (wrinklesArea.width() * wrinklesArea.height() * 0.085);
    //Wrinkles between eyebrows are vertical
    int[] gradientMask = new int[9];
    gradientMask[0] = -1;/* www  .  j a  v a  2 s.  c  o  m*/
    gradientMask[1] = 0;
    gradientMask[2] = 1;
    gradientMask[3] = -5;
    gradientMask[4] = 0;
    gradientMask[5] = 5;
    gradientMask[6] = -1;
    gradientMask[7] = 0;
    gradientMask[8] = 1;

    wrinklesArea.convertTo(wrinklesArea, CvType.CV_32F);
    Imgproc.cvtColor(wrinklesArea, wrinklesArea, Imgproc.COLOR_BGR2GRAY);
    Core.pow(wrinklesArea, 1.09, wrinklesArea);
    imwrite("wrinklesArea.jpg", wrinklesArea);

    wrinklesArea = StaticFunctions.convolution(gradientMask, wrinklesArea);
    threshold(wrinklesArea, wrinklesArea, 110, 255, Imgproc.THRESH_BINARY);
    imwrite("wrinklesAreaGradiented.jpg", wrinklesArea);

    long wrinklesPoints = 0;
    for (int i = 0; i < wrinklesArea.width(); i++) {
        for (int j = 0; j < wrinklesArea.height(); j++) {
            if (wrinklesArea.get(j, i)[0] == 255) {
                wrinklesPoints++;
            }
        }
    }
    EyeRegion.wrinklesFactor = wrinklesPoints;
    //        System.out.println("Wrinkles factor: "+wrinklesPoints);
    if (wrinklesPoints >= wrinklesThreshold) {
        //            System.out.println("Expression wrinkles detected! Threshold exceeded");
        Imgproc.rectangle(EyeRegion._face, wrinklesRect.br(), wrinklesRect.tl(), new Scalar(0, 50, 205));
    }
}

From source file:emotion.EyeRegion.java

public static void showLegend() {
    StaticFunctions.drawCross(EyeRegion._face, new Point(5, 5), StaticFunctions.Features.EYEBROWS_ENDS);
    Imgproc.putText(EyeRegion._face, "Eyebrows ends", new Point(12, 7), Core.FONT_HERSHEY_SIMPLEX, 0.3,
            new Scalar(255, 255, 255));

    StaticFunctions.drawCross(EyeRegion._face, new Point(5, 15), StaticFunctions.Features.EYE_CORNERS);
    Imgproc.putText(EyeRegion._face, "Eyes' corners", new Point(12, 17), Core.FONT_HERSHEY_SIMPLEX, 0.3,
            new Scalar(255, 255, 255));

    StaticFunctions.drawCross(EyeRegion._face, new Point(5, 25), StaticFunctions.Features.EYELIDS);
    Imgproc.putText(EyeRegion._face, "Eyelids", new Point(12, 27), Core.FONT_HERSHEY_SIMPLEX, 0.3,
            new Scalar(255, 255, 255));
}

From source file:emotion.StaticFunctions.java

static void drawCross(Mat img, Point pt, Features feat) {
    Scalar col;/*from w  ww. ja v  a  2  s.  c  om*/
    switch (feat) {
    case EYEBROWS_ENDS:
        col = new Scalar(0, 255, 0);
        break;
    case EYELIDS:
        col = new Scalar(100, 210, 255);
        break;
    case EYE_CORNERS:
        col = new Scalar(220, 180, 30);
        break;
    case WHITE_MARK:
        col = new Scalar(255, 255, 255);
    default:
        col = new Scalar(255, 255, 255);

    }
    Imgproc.line(img, new Point(pt.x, pt.y - 5), new Point(pt.x, pt.y + 5), col, 1);

    Imgproc.line(img, new Point(pt.x - 5, pt.y), new Point(pt.x + 5, pt.y), col, 1);
}