List of usage examples for org.opencv.imgcodecs Imgcodecs imread
public static Mat imread(String filename, int flags)
From source file:ImagetoPDF.java
public static void enhance(String fileName) throws IOException { Mat source = Imgcodecs.imread(fileName, Imgcodecs.CV_LOAD_IMAGE_COLOR); Mat destination = new Mat(source.rows(), source.cols(), source.type()); // Imgproc.cvtColor(source, destination, Imgproc.COLOR_BGR2GRAY); Imgproc.cvtColor(source, source, Imgproc.COLOR_BGR2GRAY); Mat imageMat = source;//from w ww .j a v a 2 s .co m Imgproc.GaussianBlur(imageMat, imageMat, new Size(3, 3), 0); Imgproc.adaptiveThreshold(imageMat, imageMat, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 5, 4); Imgcodecs.imwrite(fileName, imageMat); }
From source file:Questao3.java
void desvioPadrao() { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); /**/* w w w . j ava2 s. co m*/ * Array temporario que ir armazenar os valores de um pixel * para todas as imagens de entrada. */ double[] tmp = new double[arquivos.length]; /** * Tranforma imagens em matrizes em escala de cinza */ for (String s : arquivos) { images.add(Imgcodecs.imread(s, Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE)); } /** * Matriz que ir armazenar os valores de desvio padro */ double[][] stdDev = new double[images.get(0).rows()][images.get(0).cols()]; /** * Calculo do desvio padro para cada pixel de todas as imagens. */ for (int i = 0; i < images.get(0).rows(); i++) { for (int j = 0; j < images.get(0).cols(); j++) { for (int k = 0; k < arquivos.length; k++) { tmp[k] = images.get(k).get(i, j)[0]; } double tmpDev = Math.sqrt(somatorio(tmp) / arquivos.length); stdDev[i][j] = tmpDev; } } /** * Cria uma matriz opencv do tipo escala de cinza */ ruido = new Mat(new Size((int) images.get(0).cols(), (int) images.get(0).rows()), CvType.CV_8UC1); /** * Matriz que ira receber a matriz de desvio padro normalizada * Maior rudo = 255 * Ausncia de rudo = 0 */ double[][] d = normalizacao(stdDev); /** * Salva o resultado na imagem ruido.jpg */ Imgcodecs.imwrite("ruido.jpg", ruido); showResult("ruido.jpg"); }
From source file:br.com.prj.TelaPrincipal.java
private void btnProcurarActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnProcurarActionPerformed // remove todos os labels do componente jPanel1.removeAll();/*from www. j a va 2 s . c o m*/ // reposicionao o primeiro label boundX = 12; boundY = 22; CascadeClassifier faceDetector = new CascadeClassifier(URL_LIB_FACE); imagemCarregada = Imgcodecs.imread(selectedFile.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR); // imagem com retangulo dos rostos encontrados imagemDest = new Mat(imagemCarregada.rows(), imagemCarregada.cols(), imagemCarregada.type()); MatOfRect faceDetections = new MatOfRect(); faceDetector.detectMultiScale(imagemCarregada, faceDetections); // tentar verificar se o rect encontrado possui olhos Rect[] faceEncontrada = new Rect[faceDetections.toArray().length]; int i = 0; for (Rect rect : faceDetections.toArray()) { faceEncontrada[i] = new Rect(new Point(rect.x - PAD_LATERAL + 5, rect.y - PAD_SUPERIOR + 5), new Point(rect.x + rect.width + PAD_LATERAL, (rect.y + rect.height + PAD_SUPERIOR) - 5)); adicionarLabel(convertMatToImage(new Mat(imagemCarregada, faceEncontrada[i])), faceEncontrada[i]); // ADICIONA RETANGULO DO ROSTO NA IMAGEM // Imgproc.rectangle(imagemDest, // new Point(rect.x - PAD_LATERAL, rect.y - PAD_SUPERIOR), // new Point(rect.x + rect.width + PAD_LATERAL, (rect.y + rect.height + PAD_SUPERIOR) - 5), // new Scalar(0, 255, 0)); i++; } if (faceDetections.toArray().length == 0) { totalRostos.setText("No foi possvel identificar nenhum rosto na imagem selecionada."); } else { totalRostos .setText("Identificamos " + faceDetections.toArray().length + " rosto(s) na imagem carregada."); } }
From source file:com.jiminger.image.ImageFile.java
License:Open Source License
public static BufferedImage readBufferedImageFromFile(final String filename) throws IOException { LOGGER.trace("Reading image from {}", filename); final File f = new File(filename); if (!f.exists()) throw new FileNotFoundException(filename); BufferedImage ret = ImageIO.read(f); if (ret == null) { LOGGER.info("Failed to read '{}' using ImageIO", filename); try (Closer closer = new Closer()) { final Mat mat = Imgcodecs.imread(filename, IMREAD_UNCHANGED); if (mat == null) throw new IllegalArgumentException("Can't read '" + filename + "' as an image. No codec available in either ImageIO or OpenCv"); if (filename.endsWith(".jp2") && CvType.channels(mat.channels()) > 1) Imgproc.cvtColor(mat, mat, Imgproc.COLOR_RGB2BGR); ret = Utils.mat2Img(mat);/* w ww. jav a 2s.c om*/ } } LOGGER.trace("Read {} from {}", ret, filename); return ret; }
From source file:com.jiminger.image.ImageFile.java
License:Open Source License
public static CvRaster readMatFromFile(final String filename, final Closer closer) throws IOException { LOGGER.trace("Reading image from {}", filename); final File f = new File(filename); if (!f.exists()) throw new FileNotFoundException(filename); final CvRaster ret; try (Closer cx = new Closer()) { final Mat mat = Imgcodecs.imread(filename, IMREAD_UNCHANGED); if (mat == null) { LOGGER.debug("Failed to read '" + filename + "' using OpenCV"); ret = Utils.img2CvRaster(ImageIO.read(f)); } else {/*from w w w .ja v a2s .c om*/ if (filename.endsWith(".jp2") && CvType.channels(mat.channels()) > 1) Imgproc.cvtColor(mat, mat, Imgproc.COLOR_RGB2BGR); ret = CvRaster.move(mat, closer); } } LOGGER.trace("Read {} from {}", ret, filename); return ret; }
From source file:com.Linguist.model.sharpeningClass.java
public File imagePreprocessing(String imgeNme, String extnsn) { File sharpen = null;//from w w w .j a va2 s . co m try { // System.loadLibrary(Core.NATIVE_LIBRARY_NAME); Mat source = Imgcodecs.imread( "C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\" + imgeNme, Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE); Mat destination = new Mat(source.rows(), source.cols(), source.type()); Imgproc.equalizeHist(source, destination); Imgcodecs.imwrite("C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\contrast.jpg", destination); sharpen = new File("C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\contrast.jpg"); } catch (Exception e) { System.out.println("error: " + e.getMessage()); } return sharpen; }
From source file:com.seleniumtests.util.imaging.ImageDetector.java
License:Apache License
/** * Compute the rectangle where the searched picture is and the rotation angle between both images * Throw {@link ImageSearchException} if picture is not found * @return//from w w w. j a v a 2 s. c o m * @Deprecated Kept here for information, but open CV 3 does not include SURF anymore for java build */ public void detectCorrespondingZone() { Mat objectImageMat = Imgcodecs.imread(objectImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR); Mat sceneImageMat = Imgcodecs.imread(sceneImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR); FeatureDetector surf = FeatureDetector.create(FeatureDetector.SURF); MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint(); MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint(); surf.detect(objectImageMat, objectKeyPoints); surf.detect(sceneImageMat, sceneKeyPoints); DescriptorExtractor surfExtractor = DescriptorExtractor.create(DescriptorExtractor.SURF); Mat objectDescriptor = new Mat(); Mat sceneDescriptor = new Mat(); surfExtractor.compute(objectImageMat, objectKeyPoints, objectDescriptor); surfExtractor.compute(sceneImageMat, sceneKeyPoints, sceneDescriptor); try { Mat outImage = new Mat(); Features2d.drawKeypoints(objectImageMat, objectKeyPoints, outImage); String tempFile = File.createTempFile("img", ".png").getAbsolutePath(); writeComparisonPictureToFile(tempFile, outImage); } catch (IOException e) { } // http://stackoverflow.com/questions/29828849/flann-for-opencv-java DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED); MatOfDMatch matches = new MatOfDMatch(); if (objectKeyPoints.toList().isEmpty()) { throw new ImageSearchException("No keypoints in object to search, check it's not uniformly coloured: " + objectImage.getAbsolutePath()); } if (sceneKeyPoints.toList().isEmpty()) { throw new ImageSearchException( "No keypoints in scene, check it's not uniformly coloured: " + sceneImage.getAbsolutePath()); } if (objectDescriptor.type() != CvType.CV_32F) { objectDescriptor.convertTo(objectDescriptor, CvType.CV_32F); } if (sceneDescriptor.type() != CvType.CV_32F) { sceneDescriptor.convertTo(sceneDescriptor, CvType.CV_32F); } matcher.match(objectDescriptor, sceneDescriptor, matches); double maxDist = 0; double minDist = 10000; for (int i = 0; i < objectDescriptor.rows(); i++) { double dist = matches.toList().get(i).distance; if (dist < minDist) { minDist = dist; } if (dist > maxDist) { maxDist = dist; } } logger.debug("-- Max dist : " + maxDist); logger.debug("-- Min dist : " + minDist); LinkedList<DMatch> goodMatches = new LinkedList<>(); MatOfDMatch gm = new MatOfDMatch(); for (int i = 0; i < objectDescriptor.rows(); i++) { if (matches.toList().get(i).distance < detectionThreshold) { goodMatches.addLast(matches.toList().get(i)); } } gm.fromList(goodMatches); Features2d.drawMatches(objectImageMat, objectKeyPoints, sceneImageMat, sceneKeyPoints, gm, imgMatch, Scalar.all(-1), Scalar.all(-1), new MatOfByte(), Features2d.NOT_DRAW_SINGLE_POINTS); if (goodMatches.isEmpty()) { throw new ImageSearchException("Cannot find matching zone"); } LinkedList<Point> objList = new LinkedList<>(); LinkedList<Point> sceneList = new LinkedList<>(); List<KeyPoint> objectKeyPointsList = objectKeyPoints.toList(); List<KeyPoint> sceneKeyPointsList = sceneKeyPoints.toList(); for (int i = 0; i < goodMatches.size(); i++) { objList.addLast(objectKeyPointsList.get(goodMatches.get(i).queryIdx).pt); sceneList.addLast(sceneKeyPointsList.get(goodMatches.get(i).trainIdx).pt); } MatOfPoint2f obj = new MatOfPoint2f(); obj.fromList(objList); MatOfPoint2f scene = new MatOfPoint2f(); scene.fromList(sceneList); // Calib3d.RANSAC could be used instead of 0 Mat hg = Calib3d.findHomography(obj, scene, 0, 5); Mat objectCorners = new Mat(4, 1, CvType.CV_32FC2); Mat sceneCorners = new Mat(4, 1, CvType.CV_32FC2); objectCorners.put(0, 0, new double[] { 0, 0 }); objectCorners.put(1, 0, new double[] { objectImageMat.cols(), 0 }); objectCorners.put(2, 0, new double[] { objectImageMat.cols(), objectImageMat.rows() }); objectCorners.put(3, 0, new double[] { 0, objectImageMat.rows() }); Core.perspectiveTransform(objectCorners, sceneCorners, hg); // points of object Point po1 = new Point(objectCorners.get(0, 0)); Point po2 = new Point(objectCorners.get(1, 0)); Point po3 = new Point(objectCorners.get(2, 0)); Point po4 = new Point(objectCorners.get(3, 0)); // point of object in scene Point p1 = new Point(sceneCorners.get(0, 0)); // top left Point p2 = new Point(sceneCorners.get(1, 0)); // top right Point p3 = new Point(sceneCorners.get(2, 0)); // bottom right Point p4 = new Point(sceneCorners.get(3, 0)); // bottom left logger.debug(po1); logger.debug(po2); logger.debug(po3); logger.debug(po4); logger.debug(p1); // top left logger.debug(p2); // top right logger.debug(p3); // bottom right logger.debug(p4); // bottom left if (debug) { try { // translate corners p1.set(new double[] { p1.x + objectImageMat.cols(), p1.y }); p2.set(new double[] { p2.x + objectImageMat.cols(), p2.y }); p3.set(new double[] { p3.x + objectImageMat.cols(), p3.y }); p4.set(new double[] { p4.x + objectImageMat.cols(), p4.y }); Imgproc.line(imgMatch, p1, p2, new Scalar(0, 255, 0), 1); Imgproc.line(imgMatch, p2, p3, new Scalar(0, 255, 0), 1); Imgproc.line(imgMatch, p3, p4, new Scalar(0, 255, 0), 1); Imgproc.line(imgMatch, p4, p1, new Scalar(0, 255, 0), 1); showResultingPicture(imgMatch); } catch (IOException e) { } } // check rotation angles checkRotationAngle(p1, p2, p3, p4, po1, po2, po3, po4); // rework on scene points as new, we are sure the object rotation is 0, 90, 180 or 270 reworkOnScenePoints(p1, p2, p3, p4); // check that aspect ratio of the detected height and width are the same checkDetectionZoneAspectRatio(p1, p2, p4, po1, po2, po4); recordDetectedRectangle(p1, p2, p3, p4); }
From source file:com.seleniumtests.util.imaging.ImageDetector.java
License:Apache License
/** * Detect the object inside the scene/* w w w . j a va 2 s .c om*/ * We also search the scale of the scene from 20% to 120% scale by steps * steps are 10%, with 0.6 accuracy * then when a good match is found, we search around by 5% scale steps with 0.7 accuracy * then when a good match is found, we search around by 2.5% scale steps with 0.8 accuracy * * example: * first pass: scales are: 200, 300, 400, 500, 600, 700, 800, 900, 1000, 1100, 1200 * good matches are found around 600 and 700 * second pass: scales are 550, 600, 650, 700, 750 * good matches are found at 650 * third pass: scales are 625, 650, 675 * * The best match is at 675 */ public void detectExactZoneWithScale() { Mat sceneImageMat = Imgcodecs.imread(sceneImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE); Mat objectImageMat = Imgcodecs.imread(objectImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE); List<TemplateMatchProperties> matches = Collections.synchronizedList(new ArrayList<>()); Map<Integer, Double> scaleSteps = new LinkedHashMap<>(); scaleSteps.put(100, 0.6); scaleSteps.put(50, 0.7); scaleSteps.put(25, 0.8); int currentStep = 100; Set<Integer> computedScales = new HashSet<>(); while (currentStep >= 25) { final double currentThreshold = scaleSteps.get(currentStep); // first loop Set<Integer> localScales = Collections.synchronizedSet(new HashSet<>()); if (currentStep == 100) { for (int scale = 200; scale < 1200; scale += currentStep) { localScales.add(scale); } } else { if (matches.isEmpty()) { throw new ImageSearchException("no matches"); } for (TemplateMatchProperties tmpM : matches) { if (tmpM.isActive()) { localScales.add(tmpM.getMatchScale() - currentStep); localScales.add(tmpM.getMatchScale() + currentStep); } } } ExecutorService executorService = Executors .newFixedThreadPool(Runtime.getRuntime().availableProcessors()); for (int scale : localScales) { if (computedScales.contains(scale)) { continue; } computedScales.add(scale); // resize to scale factor final int localScale = scale; Size sz = new Size(sceneImageMat.cols() * scale / 1000.0, sceneImageMat.rows() * localScale / 1000.0); // skip if resized image is smaller than object if (sz.width < objectImageMat.cols() || sz.height < objectImageMat.rows()) { continue; } executorService.submit(() -> { Mat resizeSceneImageMat = new Mat(); Imgproc.resize(sceneImageMat, resizeSceneImageMat, sz); try { TemplateMatchProperties match = detectExactZone2(resizeSceneImageMat, objectImageMat, localScale, currentThreshold); matches.add(match); } catch (ImageSearchException e) { } }); } executorService.shutdown(); try { executorService.awaitTermination(10, TimeUnit.SECONDS); } catch (Exception e) { logger.info("Could not compute scale within 10 seconds", e); } // shortcut if we find a very good match double cleanThreshold = currentThreshold; matches.sort((TemplateMatchProperties t1, TemplateMatchProperties t2) -> -(t1.getMatchValue().compareTo(t2.getMatchValue()))); if (!matches.isEmpty() && matches.get(0).getMatchValue() > 0.9) { cleanThreshold = 0.9; currentStep = Math.min(currentStep, 50); } currentStep = currentStep / 2; // clean matches from too low matching values for (TemplateMatchProperties t : matches) { if (t.getMatchValue() < cleanThreshold) { t.setActive(false); } } } // get the best match matches.sort((TemplateMatchProperties t1, TemplateMatchProperties t2) -> -(t1.getMatchValue().compareTo(t2.getMatchValue()))); if (!matches.isEmpty()) { TemplateMatchProperties bestMatch = matches.get(0); if (bestMatch.getMatchValue() < 1 - detectionThreshold) { throw new ImageSearchException( String.format("No match found for threshold %.2f, match found with value %.2f", 1 - detectionThreshold, bestMatch.getMatchValue())); } detectedRectangle = new Rectangle((int) (bestMatch.getMatchLoc().x / bestMatch.getDoubleScale()), (int) (bestMatch.getMatchLoc().y / bestMatch.getDoubleScale()), (int) (objectImageMat.rows() / bestMatch.getDoubleScale()), (int) (objectImageMat.cols() / bestMatch.getDoubleScale())); if (debug) { try { Imgproc.rectangle(sceneImageMat, new Point(detectedRectangle.x, detectedRectangle.y), new Point(detectedRectangle.x + detectedRectangle.width, detectedRectangle.y + detectedRectangle.height), new Scalar(0, 255, 0)); showResultingPicture(sceneImageMat); } catch (IOException e) { } } rotationAngle = 0; sizeRatio = detectedRectangle.width / (double) objectImageMat.cols(); } else { throw new ImageSearchException("no matching has been found"); } }
From source file:contador_de_moedas.Circulo.java
private void baseImageList() { File dir = new File("baseConhecimento/"); if (dir.isDirectory()) { String[] arqs = dir.list(); vetMatFile = new Mat[arqs.length]; for (int i = 0; i < arqs.length; i++) { String nome = "baseConhecimento/" + arqs[i]; vetMatFile[i] = Imgcodecs.imread(nome, CvType.CV_8UC3); }//w w w .jav a 2 s.c o m } }
From source file:faceDetectionV1.FaceDetection.java
public void detectFaces(File file, ImagePanel imagePanel) { Mat image = Imgcodecs.imread(file.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR); MatOfRect facedetections = new MatOfRect(); cascadeClassifier.detectMultiScale(image, facedetections); for (Rect rect : facedetections.toArray()) { Imgproc.rectangle(image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(100, 100, 250), 10); }// w w w. j a va 2s.c om BufferedImage bufferedImage = convertMatToImage(image); imagePanel.updateImage(bufferedImage); }