List of usage examples for org.opencv.core Scalar Scalar
public Scalar(double v0, double v1, double v2)
From source file:emotion.StaticFunctions.java
public static Mat gabor(Mat image) { Mat img = image.clone();//from w w w. jav a2s. c o m double ksize = 15; double sigme = 4; double gamma = 1; double psi = 50; int lambd[] = new int[] { 5, 6, 7, 10/*,15,13,2*/ }; double theta[] = new double[] { 180, 200 }; ArrayList<Mat> kernels = new ArrayList<>(); for (int i = 0; i < theta.length; i++) { for (int j = 0; j < lambd.length; j++) { kernels.add(Imgproc.getGaborKernel(new Size(ksize, ksize), sigme, theta[i], lambd[j], gamma, psi, CvType.CV_32F)); } } Mat result = new Mat(img.height(), img.width(), img.type(), new Scalar(0, 0, 0)); for (Mat kernel : kernels) { Mat temp = new Mat(img.height(), img.width(), img.type(), new Scalar(0, 0, 0)); Imgproc.filter2D(img, temp, -1, kernel); Core.add(result, temp, result); } //imwrite("gaborResult.jpg",result); return result; }
From source file:es.ugr.osgiliart.drawer.OpenCVCollageDrawer.java
License:Open Source License
@Override public void draw(ArtisticIndividual artistic) { /*//from www . ja v a 2s . com * */ int imageWidth = (Integer) this.getAlgorithmParameters().getParameter(ArtisticParameters.IMAGE_WIDTH); int imageHeight = (Integer) this.getAlgorithmParameters().getParameter(ArtisticParameters.IMAGE_HEIGHT); String imageType = (String) this.getAlgorithmParameters().getParameter(ArtisticParameters.IMAGE_TYPE); String folderPath = (String) this.getAlgorithmParameters().getParameter(ArtisticParameters.DATA_FOLDER); List<Primitive> primitives = ((ArtisticGenome) artistic.getGenome()).getPrimitives(); Mat orig = new Mat(imageWidth, imageHeight, CvType.CV_8UC3, new Scalar(255, 255, 255)); for (Primitive p : primitives) { Patch patch = (Patch) p; Mat pm = patch.getMat(); int posCol = (int) (patch.getLocation().x * orig.cols()); int posRow = (int) (patch.getLocation().y * orig.rows()); int finalCol = posCol + pm.cols(); int finalRow = posRow + pm.rows(); if (finalCol > orig.cols()) finalCol = orig.cols(); if (finalRow > orig.rows()) finalRow = orig.rows(); //System.out.println("Poniendo imagen de tamao "+pm.rows()+","+pm.cols()+" en "+posRow+","+posCol+" hasta "+finalRow+","+finalCol); Mat bSubmat = orig.submat(posRow, finalRow, posCol, finalCol); pm.copyTo(bSubmat); } /* * draw image ****************************************************/ /*save image */ String imageExtension = null; if (imageType.equalsIgnoreCase(IMAGE_TYPE_JPEG)) { imageExtension = "jpg"; } else if (imageType.equalsIgnoreCase(IMAGE_TYPE_PNG)) { imageExtension = "png"; } if (imageExtension != null) { String imagePath = String.format("%s/%s.%s", folderPath, artistic.getId(), imageExtension); //System.out.println("Saving... " + imagePath + " primitives: " + primitives.size()); //graphics.save(imagePath); //applet.save(imagePath); Highgui.imwrite(imagePath, orig); artistic.setImagePath(imagePath); } }
From source file:eu.fpetersen.robobrain.behavior.followobject.OrbObjectDetector.java
License:Open Source License
public void process(Mat image) { Mat tempImage = new Mat(); Imgproc.cvtColor(image, tempImage, Imgproc.COLOR_RGBA2RGB); MatOfKeyPoint keypoints = detectInImage(tempImage); Mat descriptors = extractDescriptors(keypoints, tempImage); MatOfDMatch matches = new MatOfDMatch(); matcher.match(descriptors, originalDescriptors, matches); KeyPoint[] keypointArray = keypoints.toArray(); KeyPoint[] originalKeypointArray = originalKeypoints.toArray(); float min = 40.0f; float max = 1000.0f; for (DMatch match : matches.toList()) { if (match.distance < min) { min = match.distance;/*from w ww. ja v a 2 s .c o m*/ } else if (match.distance > max) { max = match.distance; } } float threshold = 1.5f * min; List<KeyPoint> matchedKeyPoints = new ArrayList<KeyPoint>(); List<Point> matchedPoints = new ArrayList<Point>(); List<Point> matchedOriginalPoints = new ArrayList<Point>(); for (DMatch match : matches.toList()) { if (match.distance < threshold) { KeyPoint matchedKeypoint = keypointArray[match.queryIdx]; matchedKeyPoints.add(matchedKeypoint); matchedPoints.add(matchedKeypoint.pt); KeyPoint matchedOriginalKeypoint = originalKeypointArray[match.trainIdx]; matchedOriginalPoints.add(matchedOriginalKeypoint.pt); } } if (matchedKeyPoints.size() > 10) { Mat H = Calib3d.findHomography( new MatOfPoint2f(matchedOriginalPoints.toArray(new Point[matchedOriginalPoints.size()])), new MatOfPoint2f(matchedPoints.toArray(new Point[matchedPoints.size()])), Calib3d.RANSAC, 10); List<Point> originalCorners = new ArrayList<Point>(); originalCorners.add(new Point(0, 0)); originalCorners.add(new Point(originalImage.cols(), 0)); originalCorners.add(new Point(originalImage.cols(), originalImage.rows())); originalCorners.add(new Point(0, originalImage.rows())); List<Point> corners = new ArrayList<Point>(); for (int i = 0; i < 4; i++) { corners.add(new Point(0, 0)); } Mat objectCorners = Converters.vector_Point2f_to_Mat(corners); Core.perspectiveTransform(Converters.vector_Point2f_to_Mat(originalCorners), objectCorners, H); corners.clear(); Converters.Mat_to_vector_Point2f(objectCorners, corners); Core.line(tempImage, corners.get(0), corners.get(1), new Scalar(0, 255, 0), 4); Core.line(tempImage, corners.get(1), corners.get(2), new Scalar(0, 255, 0), 4); Core.line(tempImage, corners.get(2), corners.get(3), new Scalar(0, 255, 0), 4); Core.line(tempImage, corners.get(3), corners.get(0), new Scalar(0, 255, 0), 4); } Features2d.drawKeypoints(tempImage, new MatOfKeyPoint(matchedKeyPoints.toArray(new KeyPoint[matchedKeyPoints.size()])), tempImage); Imgproc.cvtColor(tempImage, image, Imgproc.COLOR_RGB2RGBA); }
From source file:eu.fpetersen.robobrain.ui.CameraViewActivity.java
License:Open Source License
public void onCameraViewStarted(int width, int height) { mRgba = new Mat(height, width, CvType.CV_8UC4); mDetector = new ColorBlobDetector(new Scalar(3.109375, 241, 186.640625)); // ExternalStorageManager manager = new ExternalStorageManager(this); // File root = manager.getRoboBrainRoot(); // File images = new File(root, "images"); // File objectPicture = new File(images, "card2.jpg"); // BitmapFactory.Options options = new BitmapFactory.Options(); // options.inPreferredConfig = Bitmap.Config.ARGB_8888; // Bitmap bitmap = // BitmapFactory.decodeFile(objectPicture.getAbsolutePath(), options); // Mat objectImage = new Mat(); // Utils.bitmapToMat(bitmap, objectImage); // mDetector = new OrbObjectDetector(objectImage); }
From source file:facedetection.FaceDetector.java
public void findFaces() { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); CascadeClassifier faceDetector = new CascadeClassifier( "D:\\opencv\\sources\\data\\lbpcascades\\lbpcascade_frontalface.xml"); MatOfRect faceDetections = new MatOfRect(); faceDetector.detectMultiScale(img, faceDetections); System.out.println(faceDetections); for (Rect rect : faceDetections.toArray()) { faceList.add(rect);/*from w ww. jav a 2 s. c o m*/ Imgproc.rectangle(img, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0)); } }
From source file:faceDetectionV1.FaceDetection.java
public void detectFaces(File file, ImagePanel imagePanel) { Mat image = Imgcodecs.imread(file.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR); MatOfRect facedetections = new MatOfRect(); cascadeClassifier.detectMultiScale(image, facedetections); for (Rect rect : facedetections.toArray()) { Imgproc.rectangle(image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(100, 100, 250), 10); }/*from ww w . j ava 2 s . c o m*/ BufferedImage bufferedImage = convertMatToImage(image); imagePanel.updateImage(bufferedImage); }
From source file:facerecognition.sample1.java
private static Rect find_enclosing_rectangle(double[][] puntos, File image_file) { Mat image = Imgcodecs.imread(image_file.getAbsolutePath()); int i = 0;//from w w w . j a v a 2 s. c o m Mat img2 = image.clone(); for (CascadeClassifier faceDetector : faceDetectors) { // Detect faces in the image. // MatOfRect is a special container class for Rect. MatOfRect faceDetections = new MatOfRect(); faceDetector.detectMultiScale(image, faceDetections); System.out.println(String.format("Detected %s faces", faceDetections.toArray().length)); // Draw a bounding box around each face. // double percent = 0.4; for (Rect rect : faceDetections.toArray()) { Rect piv = rect.clone(); // falta expandir int h = piv.height, w = piv.width; piv.x -= w * percent / 2; piv.y -= h * percent / 2; piv.height *= (1 + percent); piv.width *= (1 + percent); // Mat croped = new Mat(image, rect); // Imgcodecs.imwrite("face"+(++i)+".png", croped); Imgproc.rectangle(img2, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0)); int r = 10; boolean dentro = true; for (double[] punto : puntos) { // Imgproc.circle(img2, new Point(rect.x, rect.y), r, new Scalar(0, 255, 0)); if (piv.contains(new Point(punto)) == false) { dentro = false; // break; } } if (dentro) { // Imgcodecs.imwrite(urlHelen + "\\face" + (Math.random()) + ".png", img2); return piv; } } } // Imgcodecs.imwrite( urlHelen + "\\face"+(Math.random())+".png", img2); return null; }
From source file:facerecognition.sample1.java
private static void draw_initial_points() { // PrintWriter pw = null; // try { faceDetectors = new CascadeClassifier[] { new CascadeClassifier("haarcascade_frontalface_alt_tree.xml"), new CascadeClassifier("haarcascade_frontalface_alt2.xml"), new CascadeClassifier("haarcascade_profileface.xml") }; File[] image_files = get_images(); int index = 0; int contador = 0; // File resumen = new File(urlHelen + "\\summary.sum"); // pw = new PrintWriter(resumen); double[][] mask = leer_mask(); for (File image_file : image_files) { System.out.println("Analizando imagen " + (++index) + " de " + image_files.length); // BufferedImage img = convert_to_BufferedImage(image_file); // File puntos_file = get_puntos_file(image_file); // double[][] puntos = LWF.leerpuntos(puntos_file); Mat image = Imgcodecs.imread(image_file.getAbsolutePath()); Mat img2 = image.clone();/*from w w w. j a v a 2 s.c o m*/ for (CascadeClassifier faceDetector : faceDetectors) { // Detect faces in the image. // MatOfRect is a special container class for Rect. MatOfRect faceDetections = new MatOfRect(); faceDetector.detectMultiScale(image, faceDetections); System.out.println(String.format("Detected %s faces", faceDetections.toArray().length)); // Draw a bounding box around each face. for (Rect rect : faceDetections.toArray()) { Rect piv = rect.clone(); // falta expandir int h = piv.height, w = piv.width; piv.x -= w * percent / 2; piv.y -= h * percent / 2; piv.height *= (1 + percent); piv.width *= (1 + percent); // Mat croped = new Mat(image, rect); // Imgcodecs.imwrite("face"+(++i)+".png", croped); Imgproc.rectangle(img2, new Point(piv.x, piv.y), new Point(piv.x + piv.width, piv.y + piv.height), new Scalar(0, 255, 0)); for (double[] punto : mask) { Imgproc.circle(img2, new Point(piv.x + piv.width * punto[0], piv.y + piv.height * punto[1]), 5, new Scalar(0, 255, 0)); } } } // pw.close(); Imgcodecs.imwrite(urlHelen + "\\face" + (Math.random()) + ".png", img2); } }
From source file:frclib.FrcFaceDetector.java
License:Open Source License
/** * This method update the video stream with the detected faces overlay on the image as rectangles. *///from ww w .j av a 2 s.c o m public void putFrame() { if (currImage != null) { super.putFrame(currImage, faceRects, new Scalar(0, 255, 0), 0); } }
From source file:frclib.FrcVisionTarget.java
License:Open Source License
/** * This method update the video stream with the detected targets overlay on the image as rectangles. */// ww w . j a va 2 s.c o m public void putFrame() { if (currImage != null) { super.putFrame(currImage, objectRects, new Scalar(0, 255, 0), 0); } }