Example usage for org.opencv.imgproc Imgproc resize

List of usage examples for org.opencv.imgproc Imgproc resize

Introduction

In this page you can find the example usage for org.opencv.imgproc Imgproc resize.

Prototype

public static void resize(Mat src, Mat dst, Size dsize) 

Source Link

Usage

From source file:frmMain.java

public static void showResult(Mat img) {
    Imgproc.resize(img, img, new Size(640, 480));
    MatOfByte matOfByte = new MatOfByte();
    Highgui.imencode(".jpg", img, matOfByte);
    byte[] byteArray = matOfByte.toArray();
    BufferedImage bufImage = null;
    try {// w  w  w . j  av  a2s.  c o m
        InputStream in = new ByteArrayInputStream(byteArray);
        bufImage = ImageIO.read(in);
        JFrame frame = new JFrame();
        frame.getContentPane().add(new JLabel(new ImageIcon(bufImage)));
        frame.pack();
        frame.setVisible(true);
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:OctoEye.java

License:Open Source License

public Mat getDst2x() {
    dst2 = new Mat(src.rows() * 2, src.cols() * 2, CvType.CV_8UC3);
    Imgproc.resize(dst, dst2, new Size(src.cols() * 2, src.rows() * 2));
    return dst2;/*from  www  .j av a  2  s.co m*/
}

From source file:Retrive.java

public void Preprocess(Mat query, Mat img_corpse) {
    Imgproc.resize(img_corpse, img_corpse, new Size(400, 400));

    Imgproc.resize(query, query, new Size(400, 400));

}

From source file:arlocros.Imshow.java

License:Apache License

/**
 * @param opencvImage// www  .  j a  v a 2s. co  m
 */
public static void show(Mat opencvImage) {

    Dimension frameSize = new Dimension(opencvImage.rows(), opencvImage.cols());
    if (frame == null) {
        frame = new Imshow("", frameSize.height, frameSize.width);
        frame.Window.setVisible(true);

        frame.Window.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
        if (frame.SizeCustom) {
            Imgproc.resize(opencvImage, opencvImage, new Size(frame.Height, frame.Width));
        }
    }
    BufferedImage bufImage = null;
    try {

        int type = BufferedImage.TYPE_BYTE_GRAY;
        if (opencvImage.channels() > 1) {
            type = BufferedImage.TYPE_3BYTE_BGR;
        }
        int bufferSize = opencvImage.channels() * opencvImage.cols() * opencvImage.rows();
        byte[] b = new byte[bufferSize];
        opencvImage.get(0, 0, b);
        BufferedImage bufferedImage = new BufferedImage(opencvImage.cols(), opencvImage.rows(), type);
        final byte[] targetPixels = ((DataBufferByte) bufferedImage.getRaster().getDataBuffer()).getData();
        System.arraycopy(b, 0, targetPixels, 0, b.length);
        bufImage = bufferedImage;
        frame.image.setImage(bufImage);
        frame.Window.pack();
        frame.label.updateUI();
        //frame.Window.setVisible(true);
    } catch (RuntimeException e) {
        logger.info("Exception while visualizing.", e);
    }
}

From source file:Beans.Imagen.java

public Mat getMatFotografia() {
    byte[] pixels = ((DataBufferByte) fotografia.getRaster().getDataBuffer()).getData();

    // Create a Matrix the same size of image
    Mat image = new Mat(alto, ancho, CvType.CV_8UC3);
    // Fill Matrix with image values
    image.put(0, 0, pixels);/*from  ww  w  .  j  ava2 s.co  m*/
    Imgproc.resize(image, image, new Size(480, 640));
    return image;
}

From source file:bollettini.BullettinCompiler.java

public void show() {
    //resize to show
    Size size = new Size(1100, 335);
    Mat resize = new Mat();
    Imgproc.resize(bullettin, resize, size);

    //create image
    int type = BufferedImage.TYPE_BYTE_GRAY;
    int bufferSize = resize.channels() * resize.cols() * resize.rows();
    byte[] b = new byte[bufferSize];
    resize.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(resize.cols(), resize.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);

    ImageIcon icon = new ImageIcon(image);

    //create image and show
    View view = new View();
    view.init(this);
    view.setIcon(icon);// w  ww  .j  a  v a  2  s  .  co  m
    view.setVisible(true);
    view.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
}

From source file:ch.zhaw.facerecognitionlibrary.PreProcessor.Contours.LocalBinaryPattern.java

License:Open Source License

@Override
public PreProcessor preprocessImage(PreProcessor preProcessor) {
    List<Mat> images = preProcessor.getImages();
    List<Mat> processed = new ArrayList<Mat>();
    for (Mat img : images) {
        // Resize for Performance enhancement
        Size size = new Size(preProcessor.getN(), preProcessor.getN());
        Imgproc.resize(img, img, size);
        Mat lbp = new Mat(img.rows() - 2, img.cols() - 2, img.type());
        for (int i = 1; i < img.rows() - 1; i++) {
            for (int j = 1; j < img.cols() - 1; j++) {
                BitSet out = new BitSet(8);
                double cen = img.get(i, j)[0];
                if (img.get(i - 1, j - 1)[0] > cen)
                    out.set(0);//from www.ja v  a 2 s . co m
                if (img.get(i - 1, j)[0] > cen)
                    out.set(1);
                if (img.get(i - 1, j + 1)[0] > cen)
                    out.set(2);
                if (img.get(i, j + 1)[0] > cen)
                    out.set(3);
                if (img.get(i + 1, j + 1)[0] > cen)
                    out.set(4);
                if (img.get(i + 1, j)[0] > cen)
                    out.set(5);
                if (img.get(i + 1, j - 1)[0] > cen)
                    out.set(6);
                if (img.get(i, j - 1)[0] > cen)
                    out.set(7);
                int value = 0;
                for (int k = 0; k < out.length(); k++) {
                    int index = out.nextSetBit(k);
                    value += Math.pow(2, out.length() - 1 - index);
                    k = index;
                }
                lbp.put(i - 1, j - 1, value);
            }
        }
        processed.add(lbp);
    }
    preProcessor.setImages(processed);
    return preProcessor;
}

From source file:ch.zhaw.facerecognitionlibrary.PreProcessor.StandardPostprocessing.Resize.java

License:Open Source License

public PreProcessor preprocessImage(PreProcessor preProcessor) {
    List<Mat> images = preProcessor.getImages();
    List<Mat> processed = new ArrayList<Mat>();
    for (Mat img : images) {
        Size size = new Size(preProcessor.getN(), preProcessor.getN());
        Imgproc.resize(img, img, size);
        processed.add(img);//from  w w  w  .  ja  v  a2  s . com
    }
    preProcessor.setImages(processed);
    return preProcessor;
}

From source file:ch.zhaw.facerecognitionlibrary.Recognition.TensorFlow.java

License:Open Source License

public Mat getFeatureVector(Mat img) {
    Imgproc.resize(img, img, new Size(inputSize, inputSize));

    Bitmap bmp = Bitmap.createBitmap(inputSize, inputSize, Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(img, bmp);/*from   w w w .  jav a 2 s.c om*/

    String[] sVector = classifyImageBmp(inputLayer, outputLayer, outputSize, bmp).split(STRING_SPLIT_CHARACTER);

    System.out.println(sVector.length);

    List<Float> fVector = new ArrayList<>();
    for (String s : sVector) {
        fVector.add(Float.parseFloat(s));
    }

    return Converters.vector_float_to_Mat(fVector);
}

From source file:classes.FloodFiller.java

private void fillFrom(Point seed, int lo, int up, Scalar backgroundColor, Scalar contourFillingColor) {

    Mat object = ObjectGenerator.extract(image, seed.x, seed.y, 10, 10);
    this.meanColor = Core.mean(object);

    Rect ccomp = new Rect();
    Mat mask = Mat.zeros(image.rows() + 2, image.cols() + 2, CvType.CV_8UC1);

    int connectivity = 4;
    int newMaskVal = 255;
    int ffillMode = 1;

    int flags = connectivity + (newMaskVal << 8) + (ffillMode == 1 ? Imgproc.FLOODFILL_FIXED_RANGE : 0);

    Scalar newVal = new Scalar(0.299, 0.587, 0.114);

    Imgproc.threshold(mask, mask, 1, 128, Imgproc.THRESH_BINARY);

    filledArea = Imgproc.floodFill(image.clone(), mask, seed, newVal, ccomp, new Scalar(lo, lo, lo),
            new Scalar(up, up, up), flags);

    //        Highgui.imwrite("mask.png", mask);
    ImageUtils.saveImage(mask, "mask.png", request);

    morphologicalImage = new Mat(image.size(), CvType.CV_8UC3);

    Mat element = new Mat(3, 3, CvType.CV_8U, new Scalar(1));

    ArrayList<Mat> mask3 = new ArrayList<Mat>();
    mask3.add(mask);//from  w w w .j  av  a2s. com
    mask3.add(mask);
    mask3.add(mask);
    Core.merge(mask3, mask);

    // Applying morphological filters
    Imgproc.erode(mask, morphologicalImage, element);
    Imgproc.morphologyEx(morphologicalImage, morphologicalImage, Imgproc.MORPH_CLOSE, element,
            new Point(-1, -1), 9);
    Imgproc.morphologyEx(morphologicalImage, morphologicalImage, Imgproc.MORPH_OPEN, element, new Point(-1, -1),
            2);
    Imgproc.resize(morphologicalImage, morphologicalImage, image.size());

    //        Highgui.imwrite("morphologicalImage.png", morphologicalImage);
    ImageUtils.saveImage(morphologicalImage, "morphologicalImage.png", request);

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();

    Core.split(mask, mask3);
    Mat binarymorphologicalImage = mask3.get(0);

    Imgproc.findContours(binarymorphologicalImage.clone(), contours, new Mat(), Imgproc.RETR_EXTERNAL,
            Imgproc.CHAIN_APPROX_NONE);

    contoursImage = new Mat(image.size(), CvType.CV_8UC3, backgroundColor);

    int thickness = -1; // Thicknes should be lower than zero in order to drawn the filled contours
    Imgproc.drawContours(contoursImage, contours, -1, contourFillingColor, thickness); // Drawing all the contours found
    //        Highgui.imwrite("allContoursImage.png", contoursImage);
    ImageUtils.saveImage(contoursImage, "allContoursImage.png", request);

    if (contours.size() > 1) {

        int minContourWith = 20;
        int minContourHeight = 20;
        int maxContourWith = 6400 / 2;
        int maxContourHeight = 4800 / 2;

        contours = filterContours(contours, minContourWith, minContourHeight, maxContourWith, maxContourHeight);
    }

    if (contours.size() > 0) {

        MatOfPoint biggestContour = contours.get(0); // getting the biggest contour
        contourArea = Imgproc.contourArea(biggestContour);

        if (contours.size() > 1) {
            biggestContour = Collections.max(contours, new ContourComparator()); // getting the biggest contour in case there are more than one
        }

        Point[] points = biggestContour.toArray();
        path = "M " + (int) points[0].x + " " + (int) points[0].y + " ";
        for (int i = 1; i < points.length; ++i) {
            Point v = points[i];
            path += "L " + (int) v.x + " " + (int) v.y + " ";
        }
        path += "Z";

        biggestContourImage = new Mat(image.size(), CvType.CV_8UC3, backgroundColor);

        Imgproc.drawContours(biggestContourImage, contours, 0, contourFillingColor, thickness);

        //            Highgui.imwrite("biggestContourImage.png", biggestContourImage);
        ImageUtils.saveImage(biggestContourImage, "biggestContourImage.png", request);

        Mat maskForColorExtraction = biggestContourImage.clone();

        if (isWhite(backgroundColor)) {
            Imgproc.dilate(maskForColorExtraction, maskForColorExtraction, new Mat(), new Point(-1, -1), 3);
        } else {
            Imgproc.erode(maskForColorExtraction, maskForColorExtraction, new Mat(), new Point(-1, -1), 3);
        }

        //            Highgui.imwrite("maskForColorExtraction.png", maskForColorExtraction);
        ImageUtils.saveImage(maskForColorExtraction, "maskForColorExtraction.png", request);

        Mat extractedColor = new Mat();

        if (isBlack(backgroundColor) && isWhite(contourFillingColor)) {
            Core.bitwise_and(maskForColorExtraction, image, extractedColor);

        } else {
            Core.bitwise_or(maskForColorExtraction, image, extractedColor);
        }

        //            Highgui.imwrite("extractedColor.png", extractedColor);
        ImageUtils.saveImage(extractedColor, "extractedColor.png", request);

        computedSearchWindow = Imgproc.boundingRect(biggestContour);
        topLeftCorner = computedSearchWindow.tl();

        Rect croppingRect = new Rect(computedSearchWindow.x, computedSearchWindow.y,
                computedSearchWindow.width - 1, computedSearchWindow.height - 1);

        Mat imageForTextRecognition = new Mat(extractedColor.clone(), croppingRect);
        //            Highgui.imwrite(outImageName, imageForTextRecognition);
        ImageUtils.saveImage(imageForTextRecognition, outImageName, request);

        //            
        //
        //            Mat data = new Mat(imageForTextRecognition.size(), CvType.CV_8UC3, backgroundColor);
        //            imageForTextRecognition.copyTo(data);
        //            data.convertTo(data, CvType.CV_8UC3);
        //
        //            // The meanColor variable represents the color in the GBR space, the following line transforms this to the RGB color space, which
        //            // is assumed in the prepareImage method of the TextRecognitionPreparer class
        //            Scalar userColor = new Scalar(meanColor.val[2], meanColor.val[1], meanColor.val[0]);
        //
        //            ArrayList<String> recognizableImageNames = TextRecognitionPreparer.generateRecognizableImagesNames(data, backgroundColor, userColor);
        //            for (String imageName : recognizableImageNames) {
        //
        //                try {
        //                    // First recognition step
        //                    String recognizedText = TextRecognizer.recognize(imageName, true).trim();
        //                    if (recognizedText != null && !recognizedText.isEmpty()) {
        //                        recognizedStrings.add(recognizedText);
        //                    }
        //                    // Second recognition step
        //                    recognizedText = TextRecognizer.recognize(imageName, false).trim();
        //                    if (recognizedText != null && !recognizedText.isEmpty()) {
        //                        recognizedStrings.add(recognizedText);
        //                    }
        //                    
        //                } catch (Exception e) {
        //                }
        //            }
        //            
        ////            ArrayList<BufferedImage> recognizableBufferedImages = TextRecognitionPreparer.generateRecognizableBufferedImages(data, backgroundColor, userColor);
        ////            for (BufferedImage bufferedImage : recognizableBufferedImages) {
        ////                try {
        ////                    // First recognition step
        ////                    String recognizedText = TextRecognizer.recognize(bufferedImage, true).trim();
        ////                    if (recognizedText != null && !recognizedText.isEmpty()) {
        ////                        recognizedStrings.add(recognizedText);
        ////                    }
        ////                    // Second recognition step
        ////                    recognizedText = TextRecognizer.recognize(bufferedImage, false).trim();
        ////                    if (recognizedText != null && !recognizedText.isEmpty()) {
        ////                        recognizedStrings.add(recognizedText);
        ////                    }
        ////                    
        ////                } catch (Exception e) {
        ////                }
        ////            }
        //
        //            
        //            

        // compute all moments
        Moments mom = Imgproc.moments(biggestContour);
        massCenter = new Point(mom.get_m10() / mom.get_m00(), mom.get_m01() / mom.get_m00());

        // draw black dot
        Core.circle(contoursImage, massCenter, 4, contourFillingColor, 8);
    }

}