List of usage examples for org.opencv.core Scalar Scalar
public Scalar(double v0, double v1, double v2)
From source file:vista.frmPrincipal.java
private void initCamara() { hilo = new Thread() { public void run() { if (cap.isOpened()) { while (true) { try { //Thread.sleep(100); cap.read(imagen); if (!imagen.empty()) { Core.putText(imagen, Extra.retornaFecha(), new Point(10, 25), 1, 2, new Scalar(0, 255, 0), 3); Core.putText(imagen, Extra.retornaHora(), new Point(10, 55), 1, 2, new Scalar(0, 255, 0), 3); faceDetector.detectMultiScale(imagen, faceDetections); for (Rect rect : faceDetections.toArray()) { //Core.rectangle(imagen, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0)); Core.rectangle(imagen, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0), 2); Core.line(imagen, new Point(rect.x + rect.width / 2, rect.y + rect.height), new Point(imagen.width() / 2, imagen.height()), new Scalar(0, 255, 0), 3); // Imgproc.adaptiveThreshold(imagen, imagen, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY_INV, 15, 4); // Imgproc.Canny(imagen,imagen, 15, 34); // Imgproc.threshold(imagen, imagen, -1, 255, 1); System.out.println("valor?"); // Imgproc.threshold(imagen, imagen, -1, 255, Imgproc.THRESH_OTSU); setPlacaImage(convertir(Sub_Image(imagen, rect))); setPlacaFiltradaImage( threshold(convertirBufferedImage(Sub_Image(imagen, rect)))); try { String result = "ABC-0123"; // result = instance.doOCR(convertirBufferedImage(Sub_Image(imagen, rect))); //result="XBU-445 "; System.out.println(result); result = Extra.posible_placa(result); // result=Extra.posible_placa(result); lista_placas.add(result); // System.out.println(result); } catch (Exception e) { // System.err.println(e.getMessage()); }/*from w w w . j ava 2 s. c o m*/ } setImage(convertir(imagen)); } } catch (Exception ex) { // Logger.getLogger(frmPrincipal.class.getName()).log(Level.SEVERE, null, ex); } } } } }; hilo.start(); }
From source file:vista.frmPrincipalConteoCars.java
private void initVideo() { hilo_initCamara = new Thread() { public void run() { Point p1 = new Point(12, 110); Point p2 = new Point(220, 110); Point pfs1 = new Point(30, 150); Point pfs2 = new Point(30, 300); Point pfb1 = new Point(620, 150); Point pfb2 = new Point(620, 300); Point pc = new Point(); boolean baja = false; video.open("video1.avi"); video.read(imagen);/* ww w.j a v a 2s . c om*/ while (!imagen.empty()) { try { video.read(imagen); Core.putText(imagen, Extra.retornaFecha(), new Point(10, 10), 1, 1, new Scalar(0, 255, 0), 2); Core.putText(imagen, Extra.retornaHora(), new Point(10, 30), 1, 1, new Scalar(0, 255, 0), 2); Core.putText(imagen, "# Suben: " + contador_sube, new Point(150, 10), 1, 1, new Scalar(0, 255, 0), 2); Core.putText(imagen, "# Bajan: " + contador_baja, new Point(150, 30), 1, 1, new Scalar(0, 255, 0), 2); faceDetector.detectMultiScale(imagen, faceDetections); Core.line(imagen, p1, p2, new Scalar(0, 255, 0), 3); int n_face = 0; Point anterior = new Point(0, 0); for (Rect rect : faceDetections.toArray()) { n_face++; //Core.rectangle(imagen, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0)); pc.x = rect.x + rect.width / 2; pc.y = rect.y + rect.height / 2; haz_puntos.add(pc); Core.putText(imagen, "#: " + n_face, new Point(rect.x - 2, rect.y - 2), 1, 1, new Scalar(0, 255, 0), 2); Core.rectangle(imagen, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0), 1); Core.circle(imagen, pc, 2, new Scalar(0, 0, 255), 3); // Core.line(imagen, new Point(rect.x + rect.width / 2, rect.y + rect.height), new Point(imagen.width() / 2, imagen.height()), new Scalar(0, 255, 0), 3); if (pc.y < p1.y) { baja = true; } if (pc.y > p1.y) { baja = false; } if ((baja == true) && (pc.y == p1.y)) { contador_baja++; } if ((pc.y == p1.y) && (baja == false)) { contador_sube++; } anterior = pc; } setImage(convertir(imagen)); Thread.sleep(50); } catch (InterruptedException ex) { // Logger.getLogger(frmPrincipalConteoCars.class.getName()).log(Level.SEVERE, null, ex); } } } }; hilo_initCamara.start(); }
From source file:vista.frmPrincipalConteoCars.java
private void initCamara() { hilo = new Thread() { public void run() { Point p1 = new Point(50, 400); Point p2 = new Point(600, 400); Point pfs1 = new Point(30, 150); Point pfs2 = new Point(30, 300); Point pfb1 = new Point(620, 150); Point pfb2 = new Point(620, 300); Point pc = new Point(); boolean baja = false; if (camara.isOpened()) { while (true) { try { //Thread.sleep(100); camara.read(imagen); if (!imagen.empty()) { Core.putText(imagen, Extra.retornaFecha(), new Point(10, 25), 1, 2, new Scalar(0, 255, 0), 3); Core.putText(imagen, Extra.retornaHora(), new Point(10, 55), 1, 2, new Scalar(0, 255, 0), 3); Core.putText(imagen, "# Suben: " + contador_sube, new Point(250, 25), 1, 2, new Scalar(0, 255, 0), 3); Core.putText(imagen, "# Bajan: " + contador_baja, new Point(250, 55), 1, 2, new Scalar(0, 255, 0), 3); faceDetector.detectMultiScale(imagen, faceDetections); Core.line(imagen, p1, p2, new Scalar(0, 255, 0), 3); int n_face = 0; Point anterior = new Point(0, 0); for (Rect rect : faceDetections.toArray()) { n_face++; //Core.rectangle(imagen, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0)); pc.x = rect.x + rect.width / 2; pc.y = rect.y + rect.height / 2; haz_puntos.add(pc); Core.putText(imagen, "#: " + n_face, new Point(rect.x - 20, rect.y - 20), 1, 2, new Scalar(0, 255, 0), 3); Core.rectangle(imagen, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0), 2); Core.circle(imagen, pc, 5, new Scalar(0, 0, 255), 6); // Core.line(imagen, new Point(rect.x + rect.width / 2, rect.y + rect.height), new Point(imagen.width() / 2, imagen.height()), new Scalar(0, 255, 0), 3); if (pc.y < p1.y) { baja = true; }/* w ww. j av a2s. co m*/ if (pc.y > p1.y) { baja = false; } if ((baja == true) && (pc.y == p1.y)) { Core.line(imagen, anterior, pc, new Scalar(0, 0, 255), 3); Core.line(imagen, p1, p2, new Scalar(0, 0, 255), 3); Core.line(imagen, pfs1, pfs2, new Scalar(255, 0, 0), 3); Core.line(imagen, pfb1, pfb2, new Scalar(255, 0, 0), 3); Core.line(imagen, pfs2, new Point(pfs2.x + 10, pfs2.y - 10), new Scalar(255, 0, 0), 3); Core.line(imagen, pfb2, new Point(pfb2.x + 10, pfb2.y - 10), new Scalar(255, 0, 0), 3); Core.line(imagen, pfs2, new Point(pfs2.x - 10, pfs2.y - 10), new Scalar(255, 0, 0), 3); Core.line(imagen, pfb2, new Point(pfb2.x - 10, pfb2.y - 10), new Scalar(255, 0, 0), 3); contador_baja++; } if ((pc.y == p1.y) && (baja == false)) { Core.line(imagen, p1, p2, new Scalar(0, 0, 255), 5); Core.line(imagen, pfs1, pfs2, new Scalar(0, 0, 255), 5); Core.line(imagen, pfb1, pfb2, new Scalar(0, 0, 255), 5); Core.line(imagen, pfb1, new Point(pfb1.x + 10, pfb1.y + 10), new Scalar(0, 0, 255), 3); Core.line(imagen, pfs1, new Point(pfs1.x + 10, pfs1.y + 10), new Scalar(0, 0, 255), 3); Core.line(imagen, pfb1, new Point(pfb1.x - 10, pfb1.y + 10), new Scalar(0, 0, 255), 3); Core.line(imagen, pfs1, new Point(pfs1.x - 10, pfs1.y + 10), new Scalar(0, 0, 255), 3); contador_sube++; } anterior = pc; } setImage(convertir(imagen)); } } catch (Exception ex) { // Logger.getLogger(frmPrincipal.class.getName()).log(Level.SEVERE, null, ex); } } } } }; hilo.start(); }
From source file:webcamfacedetect.Processor.java
public Mat detect(Mat inputframe) { Mat mRgba = new Mat(); Mat mGrey = new Mat(); MatOfRect faces = new MatOfRect(); inputframe.copyTo(mRgba);// ww w.j av a2 s . co m inputframe.copyTo(mGrey); Imgproc.cvtColor(mRgba, mGrey, Imgproc.COLOR_BGR2GRAY); Imgproc.equalizeHist(mGrey, mGrey); face_cascade.detectMultiScale(mGrey, faces); System.out.println(String.format("Detected %s faces.", faces.toArray().length)); for (Rect rect : faces.toArray()) { Point center = new Point(rect.x + rect.width * 0.5, rect.y + rect.height * 0.5); Imgproc.ellipse(mRgba, center, new Size(rect.width * 0.5, rect.height * 0.5), 0, 0, 360, new Scalar(255, 0, 255), 4, 8, 0); } return mRgba; }
From source file:xored.vpn.fixer.TokenDetector.java
public boolean run() throws IOException, InterruptedException { saveImage();/*from w w w . j a v a 2 s. co m*/ Mat source = Highgui.imread(workspace + "original.jpg", Highgui.CV_LOAD_IMAGE_COLOR); Mat destination = new Mat(source.rows(), source.cols(), source.type()); List<Mat> mats = new ArrayList<>(); mats.add(new Mat()); mats.add(new Mat()); mats.add(new Mat()); Core.split(source, mats); mats.get(1).convertTo(destination, -1, 2); imwrite(workspace + "green-ajusted.jpg", destination); Mat image = Highgui.imread(workspace + "green-ajusted.jpg", Imgproc.COLOR_RGB2GRAY); Rect rect = new Rect(176, 265, 178, 52); Mat imageB = threshold(image.submat(rect), 15, 2); int iWidth = 28; int iHeight = 45; List<Mat> matList = Arrays.asList(imageB, imageB.submat(new Rect(0, 0, iWidth, iHeight)), imageB.submat(new Rect(28, 2, iWidth, iHeight)), imageB.submat(new Rect(54, 2, iWidth, iHeight)), imageB.submat(new Rect(95, 5, iWidth, iHeight)), imageB.submat(new Rect(122, 6, iWidth, iHeight)), imageB.submat(new Rect(150, 7, iWidth, iHeight))); imwrite(temp + "1.jpg", matList.get(1)); imwrite(temp + "2.jpg", matList.get(2)); imwrite(temp + "3.jpg", matList.get(3)); imwrite(temp + "4.jpg", matList.get(4)); imwrite(temp + "5.jpg", matList.get(5)); imwrite(temp + "6.jpg", matList.get(6)); Mat m = addTo(source, matList); Core.rectangle(m, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 0, 255)); String code = recognize(); imwrite(workspace + "ResultsDebug/" + code + ".jpg", m); Main.log("Using code: " + code); Main.log("Using username: " + Main.username); Main.log("Using pin: " + Main.pin); String command = Main.nclauncher + " -url https://vpn.spirent.com/xored -r \"Contractor - Xored\" -u " + Main.username + " -p " + Main.pin + code; String response = Main.execCmd(command); Main.log(response); return response.indexOf("is already running") >= 0; }