Example usage for org.opencv.core Scalar Scalar

List of usage examples for org.opencv.core Scalar Scalar

Introduction

In this page you can find the example usage for org.opencv.core Scalar Scalar.

Prototype

public Scalar(double v0, double v1, double v2) 

Source Link

Usage

From source file:bollettini.BullettinCompiler.java

private void writeAddress(String address) {
    String temp = "";
    int offset = 24;
    if (address.length() > offset) {
        temp = address.substring(0, offset);
    } else {/*www  .ja  v a2s. co m*/
        temp = address;
    }
    Core.putText(bullettin, temp, new Point(95, 529), Core.FONT_HERSHEY_COMPLEX, 0.8, new Scalar(0, 0, 0));
    Core.putText(bullettin, temp, new Point(569, 529), Core.FONT_HERSHEY_COMPLEX, 0.8, new Scalar(0, 0, 0));

    Point A = new Point(1278, 395);
    for (int i = 0; i < 46 && i < address.length(); i++) {
        Core.putText(bullettin, String.valueOf(address.charAt(i)), A, Core.FONT_HERSHEY_COMPLEX, 0.8,
                new Scalar(0, 0, 0));
        A.x += 25;
    }
}

From source file:bollettini.BullettinCompiler.java

private void writeCap(String cap) {
    String temp = "";
    int offset = 5;
    if (cap.length() > offset) {
        temp = cap.substring(0, offset);
    } else {//from  ww  w  .  ja  va  2  s . c  o  m
        temp = cap;
    }
    Core.putText(bullettin, temp, new Point(43, 557), Core.FONT_HERSHEY_COMPLEX, 0.8, new Scalar(0, 0, 0));
    Core.putText(bullettin, temp, new Point(516, 557), Core.FONT_HERSHEY_COMPLEX, 0.8, new Scalar(0, 0, 0));

    Point A = new Point(1278, 442);
    for (int i = 0; i < 5 && i < cap.length(); i++) {
        Core.putText(bullettin, String.valueOf(cap.charAt(i)), A, Core.FONT_HERSHEY_COMPLEX, 0.8,
                new Scalar(0, 0, 0));
        A.x += 25;
    }
}

From source file:bollettini.BullettinCompiler.java

private void writeLocality(String locality) {
    String temp = "";
    int offset = 17;
    if (locality.length() > offset) {
        temp = locality.substring(0, offset);
    } else {/*from  ww  w. j  av  a2 s .  c  om*/
        temp = locality;
    }
    Core.putText(bullettin, temp, new Point(205, 557), Core.FONT_HERSHEY_COMPLEX, 0.8, new Scalar(0, 0, 0));
    Core.putText(bullettin, temp, new Point(678, 557), Core.FONT_HERSHEY_COMPLEX, 0.8, new Scalar(0, 0, 0));

    Point A = new Point(1430, 442);
    for (int i = 0; i < 17 && i < locality.length(); i++) {
        Core.putText(bullettin, String.valueOf(locality.charAt(i)), A, Core.FONT_HERSHEY_COMPLEX, 0.8,
                new Scalar(0, 0, 0));
        A.x += 25;
    }
}

From source file:br.cefetmg.lsi.opencv.multipleObjectTracking.processing.MultipleObjectTracking.java

License:Open Source License

public void startTracking() throws Exception {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    mountFrames();/*from   w w  w.j a va  2s  .c o  m*/

    // Matrices for image processing.
    Mat image = new Mat();
    Mat thresholdedImage = new Mat();
    Mat hsvImage = new Mat();

    // Opens camera capture flow.
    VideoCapture capture = null;
    String imagesource = PropertiesLoaderImpl.getValor("multipleObjectTracking.imagesource");
    if (imagesource.equalsIgnoreCase("webcam")) {
        capture = new VideoCapture(0);
    } else {

        if (imagesource.equalsIgnoreCase("ipcam")) {
            String ipcamAddress = PropertiesLoaderImpl
                    .getValor("multipleObjectTracking.imagesource.ipcam.address");
            capture = new VideoCapture(ipcamAddress);
        }

    }

    if (capture == null) {
        throw new Exception("Could not conect to camera.");
    }

    // Captures one image, for starting the process.
    try {
        capture.read(image);
    } catch (Exception e) {
        throw new Exception("Could not read from camera. Maybe the URL is not correct.");
    }

    setFramesSizes(image);

    if (capture.isOpened()) {

        while (true) {
            capture.read(image);

            if (!image.empty()) {
                Imgproc.cvtColor(image, hsvImage, Imgproc.COLOR_BGR2HSV);

                if (calibrationMode) {
                    thresholdedImage = processImage(hsvImage,
                            new Scalar(calibrationWindow.getMinHValue(), calibrationWindow.getMinSValue(),
                                    calibrationWindow.getMinVValue()),
                            new Scalar(calibrationWindow.getMaxHValue(), calibrationWindow.getMaxSValue(),
                                    calibrationWindow.getMaxVValue()));
                    trackFilteredObject(null, thresholdedImage, image);
                    updateFrames(image, thresholdedImage);
                } else {
                    Ball redBall = new Ball(Ball.Colours.RED);
                    Ball greenBall = new Ball(Ball.Colours.GREEN);
                    Ball blueBall = new Ball(Ball.Colours.BLUE);

                    ArrayList<Ball> balls = new ArrayList<Ball>();
                    balls.add(redBall);
                    balls.add(greenBall);
                    balls.add(blueBall);

                    for (Ball ball : balls) {
                        thresholdedImage = processImage(hsvImage, ball.getHsvMin(), ball.getHsvMax());
                        trackFilteredObject(ball, thresholdedImage, image);
                        updateFrames(image, thresholdedImage);
                    }

                }

            } else {
                throw new Exception("Could not read camera image.");
            }

        }

    } else {
        throw new Exception("Could not read from camera.");
    }

}

From source file:br.cefetmg.lsi.opencv.multipleObjectTracking.processing.MultipleObjectTracking.java

License:Open Source License

private void trackFilteredObject(Ball theBall, Mat threshold, Mat cameraFeed) {
    List<Ball> balls = new ArrayList<Ball>();

    Mat temp = new Mat();
    threshold.copyTo(temp);/*from  w ww.j a v  a 2  s .c  o m*/

    // The two variables below are the return of "findContours" processing.
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Mat hierarchy = new Mat();

    // find contours of filtered image using openCV findContours function      
    Imgproc.findContours(temp, contours, hierarchy, Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_SIMPLE);

    // use moments method to find our filtered object
    boolean objectFound = false;

    if (contours.size() > 0) {
        int numObjects = contours.size();

        //if number of objects greater than MAX_NUM_OBJECTS we have a noisy filter
        if (numObjects < MAX_NUM_OBJECTS) {

            for (int i = 0; i < contours.size(); i++) {
                Moments moment = Imgproc.moments(contours.get(i));
                double area = moment.get_m00();

                //if the area is less than 20 px by 20px then it is probably just noise
                //if the area is the same as the 3/2 of the image size, probably just a bad filter
                //we only want the object with the largest area so we safe a reference area each
                //iteration and compare it to the area in the next iteration.
                if (area > MIN_OBJECT_AREA) {
                    Ball ball = new Ball();
                    ball.setXPos((int) (moment.get_m10() / area));
                    ball.setYPos((int) (moment.get_m01() / area));

                    if (theBall != null) {
                        ball.setType(theBall.getType());
                        ball.setColour(theBall.getColour());
                    }

                    balls.add(ball);

                    objectFound = true;
                } else {
                    objectFound = false;
                }

            }

            //let user know you found an object
            if (objectFound) {
                //draw object location on screen
                drawObject(balls, cameraFeed);
            }

        } else {
            Core.putText(cameraFeed, "TOO MUCH NOISE! ADJUST FILTER", new Point(0, 50), 1, 2,
                    new Scalar(0, 0, 255), 2);
        }

    }

}

From source file:br.cefetmg.lsi.opencv.multipleObjectTracking.processing.MultipleObjectTracking.java

License:Open Source License

private void drawObject(List<Ball> theBalls, Mat frame) {

    for (int i = 0; i < theBalls.size(); i++) {
        Ball theBall = theBalls.get(i);//from  w  w w  .ja va 2s.c  o  m

        Core.circle(frame, new Point(theBall.getXPos(), theBall.getYPos()), 10, new Scalar(0, 0, 255));
        Core.putText(frame, theBall.getXPos() + " , " + theBall.getYPos(),
                new Point(theBall.getXPos(), theBall.getYPos() + 20), 1, 1, new Scalar(0, 255, 0));
        Core.putText(frame, theBall.getType().toString(), new Point(theBall.getXPos(), theBall.getYPos() - 30),
                1, 2, theBall.getColour());
    }

}

From source file:by.zuyeu.deyestracker.core.detection.DemoPanel.java

public static void main(String arg[]) throws DEyesTrackerException, InterruptedException, ExecutionException {
    LOG.info("main - start;");
    final String windowName = "Capture - Face detection";
    final JFrame frame = new JFrame(windowName);
    frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
    frame.setSize(400, 400);// w  w  w .  j  a  v  a  2 s  .c om
    final DemoPanel demoPanel = new DemoPanel();
    frame.setContentPane(demoPanel);
    frame.setVisible(true);

    //-- 2. Read the video stream
    final FaceInfoSampler sampler = new FaceInfoSampler();
    final IFrameCapture capture = sampler.getCapture();
    final Scalar faceRegionColor = new Scalar(0, 255, 0);
    final Scalar eyesRegionColor = new Scalar(120, 120, 120);

    final ExecutorService executorService = Executors.newSingleThreadExecutor();
    FutureTask<DetectFaceSample> detectFaceTask = TaskUtils.wrapFutureAnd(new DetectTask(sampler),
            executorService);
    DetectFaceSample sample = new DetectFaceSample();
    while (true) {
        final Mat webcamImage = capture.getNextFrame();
        if (webcamImage != null && !webcamImage.empty()) {
            frame.setSize(webcamImage.width() + 40, webcamImage.height() + 60);

            if (detectFaceTask.isDone()) {
                sample = detectFaceTask.get();

                detectFaceTask = TaskUtils.wrapFutureAnd(new DetectTask(sampler), executorService);
            }

            if (sample.getFace() != null) {
                addRectangleToImage(sample.getFace(), webcamImage, faceRegionColor);
            }
            if (sample.getLeftEye() != null) {
                addRectangleToImage(sample.getLeftEye(), webcamImage, eyesRegionColor);
            }
            if (sample.getRightEye() != null) {
                addRectangleToImage(sample.getRightEye(), webcamImage, eyesRegionColor);
            }
            if (sample.getLeftPupil() != null) {
                drawCircle(webcamImage, sample.getLeftPupil());
            }
            if (sample.getRightPupil() != null) {
                drawCircle(webcamImage, sample.getRightPupil());
            }

            //-- 4. Display the image
            demoPanel.convertMatToBufferedImage(webcamImage); // We could look at the error...
            demoPanel.repaint();
        }
    }
}

From source file:car_counter.counting.opencv.OpencvCarCounter.java

License:Apache License

@Override
public Collection<DetectedVehicle> processVideo(Path video, DateTime startDateTime) {
    CascadeClassifier carDetector = new CascadeClassifier("/Users/luke/working/car-counter/data/cars3.xml");

    VideoCapture videoCapture = new VideoCapture();
    videoCapture.open("/Users/luke/working/car-counter/data/video1.m4v");

    int index = 0;

    while (true) {
        if (!videoCapture.read(image)) {
            break;
        }//from  w w w  .j  a  va2 s  .c  o  m

        System.out.print(".");

        //processFrame();

        MatOfRect carDetections = new MatOfRect();
        carDetector.detectMultiScale(image, carDetections);

        System.out.println(String.format("Detected %s cars", carDetections.toArray().length));

        // Draw a bounding box around each hit
        for (Rect rect : carDetections.toArray()) {
            Core.rectangle(image, new Point(rect.x, rect.y),
                    new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0));
        }

        String file = String.format("/Users/luke/working/car-counter/data/out/out-%03d.jpg", index++);
        org.opencv.highgui.Highgui.imwrite(file, image);
    }

    return null;
}

From source file:ch.hslu.pren.t37.camera.BildAuswertungKorb.java

public int bildAuswerten() {

    //Bild in dem gesucht werden soll
    String inFile = "../camera.jpg";
    //das Bild dass im infile gesucht wird
    String templateFile = "../Bilder/korb.jpg";
    //Lsung wird in diesem Bild prsentiert
    String outFile = "../LoesungsBild.jpg";
    //berprfungswert wird gesetzt
    int match_method = Imgproc.TM_CCOEFF_NORMED;

    //das original Bild und das zu suchende werden geladen
    Mat img = Highgui.imread(inFile, Highgui.CV_LOAD_IMAGE_COLOR);
    Mat templ = Highgui.imread(templateFile, Highgui.CV_LOAD_IMAGE_COLOR);

    // Lsungsmatrix generieren
    int result_cols = img.cols() - templ.cols() + 1;
    int result_rows = img.rows() - templ.rows() + 1;
    Mat result = new Mat(result_rows, result_cols, CvType.CV_32FC1);

    // Suchen und normalisieren
    Imgproc.matchTemplate(img, templ, result, match_method);
    Core.normalize(result, result, 0, 1, Core.NORM_MINMAX, -1, new Mat());

    // Mit MinMax Logik wird der beste "Match" gesucht
    Core.MinMaxLocResult mmr = Core.minMaxLoc(result);

    Point matchLoc;//w ww.java2 s  . com
    if (match_method == Imgproc.TM_SQDIFF || match_method == Imgproc.TM_SQDIFF_NORMED) {
        matchLoc = mmr.minLoc;
    } else {
        matchLoc = mmr.maxLoc;
    }

    // Darstellen
    Core.rectangle(img, matchLoc, new Point(matchLoc.x + templ.cols(), matchLoc.y + templ.rows()),
            new Scalar(0, 255, 0), 10);

    // Alle 4 Eckpunkte speichern
    Point topLeft = new Point(matchLoc.x, matchLoc.y);
    Point topRight = new Point(matchLoc.x + templ.cols(), matchLoc.y);
    Point downLeft = new Point(matchLoc.x, matchLoc.y + templ.rows());
    Point downRight = new Point(matchLoc.x + templ.cols(), matchLoc.y + templ.rows());

    // Lsungsbild speichern
    Highgui.imwrite(outFile, img);

    //Mittelpunkt berechnen
    double mittePicture;
    double mitteKorb;
    double differnez;

    Mat sol = Highgui.imread(outFile, Highgui.CV_LOAD_IMAGE_COLOR);

    mittePicture = sol.width() / 2;
    mitteKorb = (topRight.x - topLeft.x) / 2;
    mitteKorb = topLeft.x + mitteKorb;
    differnez = mitteKorb - mittePicture;

    logger.log(PrenLogger.LogLevel.DEBUG, "Mitte Korb: " + mitteKorb);
    logger.log(PrenLogger.LogLevel.DEBUG, "Mitte Bild: " + mittePicture);
    logger.log(PrenLogger.LogLevel.DEBUG,
            "Differenz: " + differnez + "\nWenn Differnez negativ, nach rechts drehen");

    return (int) differnez;
}

From source file:classes.FaceDetector.java

public static String detectFace(String filePath) {

    //        String dirName = "C:/Users/ggm/Documents/NetBeansProjects/MyWebApplication";
    String dirName = "C:/Users/Gonzalo/Documents/NetBeansProjects/MyWebApplication";
    //        String dirName = "/Users/ggmendez/Development/MyWebApplication";

    System.out.println(dirName);// w  w w  .  j a  va2s  .c om

    String frontalfaceFile = dirName + "/data/lbpcascades/lbpcascade_frontalface.xml";

    System.out.println(frontalfaceFile);

    CascadeClassifier faceDetector = new CascadeClassifier(frontalfaceFile);

    Mat image = Highgui.imread(filePath);

    System.out.println(image);

    // Detect faces in the image 
    // MatOfRect is a special container class for Rect.
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);

    System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));

    // Draw a bounding box around each face.
    for (Rect rect : faceDetections.toArray()) {
        Core.rectangle(image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height),
                new Scalar(0, 255, 0));
    }

    // Save the visualized detection.            
    Date date = new Date();
    Format formatter = new SimpleDateFormat("YYYY-MM-dd_hh-mm-ss");
    String filename = dirName + "/imgs/out_" + formatter.format(date) + ".png";

    System.out.println(String.format("Writing %s", filename));
    Highgui.imwrite(filename, image);

    Gson gson = new Gson();
    String jsonResponse = gson.toJson(faceDetections.toArray());
    jsonResponse = jsonResponse.replaceAll("x", "left").replaceAll("y", "top");

    return jsonResponse;

}