Example usage for org.opencv.core CvType CV_8UC1

List of usage examples for org.opencv.core CvType CV_8UC1

Introduction

In this page you can find the example usage for org.opencv.core CvType CV_8UC1.

Prototype

int CV_8UC1

To view the source code for org.opencv.core CvType CV_8UC1.

Click Source Link

Usage

From source file:com.Linguist.model.grayscaleClass.java

@Override
public File imagePreprocessing(String image, String extnsn) {

    BufferedImage bImge = null;/*from  w  ww .j  a v  a2 s  .  co m*/
    BufferedImage bImage2 = null;
    File grayscle = null;

    try {

        // loadOpenCV_Lib();
        //String path = "opencv\\build\\java\\x64\\opencv_java300.dll";
        FileInputStream fileName = new FileInputStream(
                "C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\" + image);
        InputStream input = fileName;
        bImge = ImageIO.read(input);
        byte[] imgeByte = ((DataBufferByte) bImge.getRaster().getDataBuffer()).getData();
        Mat mat1 = new Mat(bImge.getHeight(), bImge.getWidth(), CvType.CV_8UC3);
        mat1.put(0, 0, imgeByte);
        Mat mat2 = new Mat(bImge.getHeight(), bImge.getWidth(), CvType.CV_8UC1);
        Imgproc.cvtColor(mat1, mat2, Imgproc.COLOR_RGB2GRAY);
        byte[] imageData = new byte[mat2.rows() * mat2.cols() * (int) (mat2.elemSize())];
        mat2.get(0, 0, imageData);
        bImage2 = new BufferedImage(mat2.cols(), mat2.rows(), BufferedImage.TYPE_BYTE_GRAY);
        bImage2.getRaster().setDataElements(0, 0, mat2.cols(), mat2.rows(), imageData);

        String extn = null;
        switch (extnsn) {
        case ".jpg":
            extn = "jpg";
            break;
        case ".png":
            extn = "png";
            break;
        case ".pdf":
            extn = "pdf";
            break;
        case ".tiff":
            extn = "tif";
            break;

        }
        //writing the grayscale image to the folder
        grayscle = new File(
                "C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\grayscale" + "." + extn);
        ImageIO.write(bImage2, "jpg", grayscle);
    } catch (IOException ex) {
        System.out.println("" + ex.getMessage());
    } catch (Exception ex) {
        Logger.getLogger(grayscaleClass.class.getName()).log(Level.SEVERE, null, ex);
    }
    return grayscle;

}

From source file:com.mitzuli.core.ocr.OcrPreprocessor.java

License:Open Source License

/**
 * Binarizes and cleans the input image for OCR, saving debugging images in the given directory.
 *
 * @param input the input image, which is recycled by this method, so the caller should make a defensive copy of it if necessary.
 * @param debugDir the directory to write the debugging images to, or null to disable debugging.
 * @return the preprocessed image.//from   ww w .  j  ava2s  .  com
 */
static Image preprocess(final Image input, final File debugDir) {
    // TODO Temporary workaround to allow to manually enable debugging (the global final variable should be used)
    boolean DEBUG = debugDir != null;

    // Initialization
    final Mat mat = input.toGrayscaleMat();
    final Mat debugMat = DEBUG ? input.toRgbMat() : null;
    input.recycle();
    final Mat aux = new Mat(mat.size(), CvType.CV_8UC1);
    final Mat binary = new Mat(mat.size(), CvType.CV_8UC1);
    if (DEBUG)
        Image.fromMat(mat).write(new File(debugDir, "1_input.jpg"));

    // Binarize the input image in mat through adaptive Gaussian thresholding
    Imgproc.adaptiveThreshold(mat, binary, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY, 51,
            13);
    // Imgproc.adaptiveThreshold(mat, binary, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY, 31, 7);

    // Edge detection
    Imgproc.morphologyEx(mat, mat, Imgproc.MORPH_OPEN, KERNEL_3X3); // Open
    Imgproc.morphologyEx(mat, aux, Imgproc.MORPH_CLOSE, KERNEL_3X3); // Close
    Core.addWeighted(mat, 0.5, aux, 0.5, 0, mat); // Average
    Imgproc.morphologyEx(mat, mat, Imgproc.MORPH_GRADIENT, KERNEL_3X3); // Gradient
    Imgproc.threshold(mat, mat, 0, 255, Imgproc.THRESH_BINARY | Imgproc.THRESH_OTSU); // Edge map
    if (DEBUG)
        Image.fromMat(mat).write(new File(debugDir, "2_edges.jpg"));

    // Extract word level connected-components from the dilated edge map
    Imgproc.dilate(mat, mat, KERNEL_3X3);
    if (DEBUG)
        Image.fromMat(mat).write(new File(debugDir, "3_dilated_edges.jpg"));
    final List<MatOfPoint> wordCCs = new ArrayList<MatOfPoint>();
    Imgproc.findContours(mat, wordCCs, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

    // Filter word level connected-components individually and calculate their average attributes
    final List<MatOfPoint> individuallyFilteredWordCCs = new ArrayList<MatOfPoint>();
    final List<MatOfPoint> removedWordCCs = new ArrayList<MatOfPoint>();
    double avgWidth = 0, avgHeight = 0, avgArea = 0;
    for (MatOfPoint cc : wordCCs) {
        final Rect boundingBox = Imgproc.boundingRect(cc);
        if (boundingBox.height >= 6 // bounding box height >= 6
                && boundingBox.area() >= 50 // bounding box area >= 50
                && (double) boundingBox.width / (double) boundingBox.height >= 0.25 // bounding box aspect ratio >= 1:4
                && boundingBox.width <= 0.75 * mat.width() // bounding box width <= 0.75 image width
                && boundingBox.height <= 0.75 * mat.height()) // bounding box height <= 0.75 image height
        {
            individuallyFilteredWordCCs.add(cc);
            avgWidth += boundingBox.width;
            avgHeight += boundingBox.height;
            avgArea += boundingBox.area();
        } else {
            if (DEBUG)
                removedWordCCs.add(cc);
        }
    }
    wordCCs.clear();
    avgWidth /= individuallyFilteredWordCCs.size();
    avgHeight /= individuallyFilteredWordCCs.size();
    avgArea /= individuallyFilteredWordCCs.size();
    if (DEBUG) {
        Imgproc.drawContours(debugMat, removedWordCCs, -1, BLUE, -1);
        removedWordCCs.clear();
    }

    // Filter word level connected-components in relation to their average attributes
    final List<MatOfPoint> filteredWordCCs = new ArrayList<MatOfPoint>();
    for (MatOfPoint cc : individuallyFilteredWordCCs) {
        final Rect boundingBox = Imgproc.boundingRect(cc);
        if (boundingBox.width >= 0.125 * avgWidth // bounding box width >= 0.125 average width
                && boundingBox.width <= 8 * avgWidth // bounding box width <= 8 average width
                && boundingBox.height >= 0.25 * avgHeight // bounding box height >= 0.25 average height
                && boundingBox.height <= 4 * avgHeight) // bounding box height <= 4 average height
        {
            filteredWordCCs.add(cc);
        } else {
            if (DEBUG)
                removedWordCCs.add(cc);
        }
    }
    individuallyFilteredWordCCs.clear();
    if (DEBUG) {
        Imgproc.drawContours(debugMat, filteredWordCCs, -1, GREEN, -1);
        Imgproc.drawContours(debugMat, removedWordCCs, -1, PURPLE, -1);
        removedWordCCs.clear();
    }

    // Extract paragraph level connected-components
    mat.setTo(BLACK);
    Imgproc.drawContours(mat, filteredWordCCs, -1, WHITE, -1);
    final List<MatOfPoint> paragraphCCs = new ArrayList<MatOfPoint>();
    Imgproc.morphologyEx(mat, aux, Imgproc.MORPH_CLOSE, KERNEL_30X30);
    Imgproc.findContours(aux, paragraphCCs, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

    // Filter paragraph level connected-components according to the word level connected-components inside
    final List<MatOfPoint> textCCs = new ArrayList<MatOfPoint>();
    for (MatOfPoint paragraphCC : paragraphCCs) {
        final List<MatOfPoint> wordCCsInParagraphCC = new ArrayList<MatOfPoint>();
        aux.setTo(BLACK);
        Imgproc.drawContours(aux, Collections.singletonList(paragraphCC), -1, WHITE, -1);
        Core.bitwise_and(mat, aux, aux);
        Imgproc.findContours(aux, wordCCsInParagraphCC, new Mat(), Imgproc.RETR_EXTERNAL,
                Imgproc.CHAIN_APPROX_SIMPLE);
        final Rect boundingBox = Imgproc.boundingRect(paragraphCC);
        final double center = mat.size().width / 2;
        final double distToCenter = center > boundingBox.x + boundingBox.width
                ? center - boundingBox.x - boundingBox.width
                : center < boundingBox.x ? boundingBox.x - center : 0.0;
        if (DEBUG) {
            System.err.println("****************************************");
            System.err.println("\tArea:                " + boundingBox.area());
            System.err.println("\tDistance to center:  " + distToCenter);
            System.err.println("\tCCs inside:          " + wordCCsInParagraphCC.size());
        }
        if ((wordCCsInParagraphCC.size() >= 10 || wordCCsInParagraphCC.size() >= 0.3 * filteredWordCCs.size())
                && mat.size().width / distToCenter >= 4) {
            textCCs.addAll(wordCCsInParagraphCC);
            if (DEBUG) {
                System.err.println("\tText:                YES");
                Imgproc.drawContours(debugMat, Collections.singletonList(paragraphCC), -1, DARK_GREEN, 5);
            }
        } else {
            if (DEBUG) {
                System.err.println("\tText:                NO");
                Imgproc.drawContours(debugMat, Collections.singletonList(paragraphCC), -1, DARK_RED, 5);
            }
        }
    }
    filteredWordCCs.clear();
    paragraphCCs.clear();
    mat.setTo(WHITE);
    Imgproc.drawContours(mat, textCCs, -1, BLACK, -1);
    textCCs.clear();
    if (DEBUG)
        Image.fromMat(debugMat).write(new File(debugDir, "4_filtering.jpg"));

    // Obtain the final text mask from the filtered connected-components
    Imgproc.erode(mat, mat, KERNEL_15X15);
    Imgproc.morphologyEx(mat, mat, Imgproc.MORPH_OPEN, KERNEL_30X30);
    if (DEBUG)
        Image.fromMat(mat).write(new File(debugDir, "5_text_mask.jpg"));

    // Apply the text mask to the binarized image
    if (DEBUG)
        Image.fromMat(binary).write(new File(debugDir, "6_binary.jpg"));
    binary.setTo(WHITE, mat);
    if (DEBUG)
        Image.fromMat(binary).write(new File(debugDir, "7_binary_text.jpg"));

    // Dewarp the text using Leptonica
    Pix pixs = Image.fromMat(binary).toGrayscalePix();
    Pix pixsDewarp = Dewarp.dewarp(pixs, 0, Dewarp.DEFAULT_SAMPLING, 5, true);
    final Image result = Image.fromGrayscalePix(pixsDewarp);
    if (DEBUG)
        result.write(new File(debugDir, "8_dewarp.jpg"));

    // Clean up
    pixs.recycle();
    mat.release();
    aux.release();
    binary.release();
    if (debugMat != null)
        debugMat.release();

    return result;
}

From source file:com.orange.documentare.core.image.Binarization.java

License:Open Source License

public static Mat getFrom(Mat mat) {
    Mat greyscaleMat = isGreyscale(mat) ? mat : getGreyscaleImage(mat);
    Mat binaryMat = new Mat(greyscaleMat.size(), CvType.CV_8UC1);
    Imgproc.adaptiveThreshold(greyscaleMat, binaryMat, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C,
            Imgproc.THRESH_BINARY, ADAPTIVE_BLOCK_SIZE, ADAPTIVE_MEAN_ADJUSTMENT);
    return binaryMat;
}

From source file:com.orange.documentare.core.image.opencv.OpenCvImage.java

License:Open Source License

private static Mat bytesToMat(byte[] bytes, int rows, int columns, boolean raw) {
    int simDocLineExtra = raw ? 1 : 0;
    Mat mat = new Mat(rows, columns, CvType.CV_8UC1);
    byte[] dat = new byte[1];
    for (int y = 0; y < rows; y++) {
        for (int x = 0; x < columns; x++) {
            dat[0] = bytes[y * (columns + simDocLineExtra) + x];
            mat.put(y, x, dat);/*from w ww . j  a va 2  s .c o m*/
        }
    }
    return mat;
}

From source file:com.shootoff.camera.Camera.java

License:Open Source License

public static Mat colorTransfer(Mat source, Mat target) {
    Mat src = new Mat();
    Mat dst = new Mat();

    Imgproc.cvtColor(source, src, Imgproc.COLOR_BGR2Lab);
    Imgproc.cvtColor(target, dst, Imgproc.COLOR_BGR2Lab);

    ArrayList<Mat> src_channels = new ArrayList<Mat>();
    ArrayList<Mat> dst_channels = new ArrayList<Mat>();
    Core.split(src, src_channels);// w ww.j  a  va  2s .c  om
    Core.split(dst, dst_channels);

    for (int i = 0; i < 3; i++) {
        MatOfDouble src_mean = new MatOfDouble(), src_std = new MatOfDouble();
        MatOfDouble dst_mean = new MatOfDouble(), dst_std = new MatOfDouble();
        Core.meanStdDev(src_channels.get(i), src_mean, src_std);
        Core.meanStdDev(dst_channels.get(i), dst_mean, dst_std);

        dst_channels.get(i).convertTo(dst_channels.get(i), CvType.CV_64FC1);
        Core.subtract(dst_channels.get(i), dst_mean, dst_channels.get(i));
        Core.divide(dst_std, src_std, dst_std);
        Core.multiply(dst_channels.get(i), dst_std, dst_channels.get(i));
        Core.add(dst_channels.get(i), src_mean, dst_channels.get(i));
        dst_channels.get(i).convertTo(dst_channels.get(i), CvType.CV_8UC1);
    }

    Core.merge(dst_channels, dst);

    Imgproc.cvtColor(dst, dst, Imgproc.COLOR_Lab2BGR);

    return dst;
}

From source file:com.sikulix.core.SXElement.java

License:Open Source License

protected static Mat makeMat(BufferedImage bImg) {
    Mat aMat = new Mat();
    if (bImg.getType() == BufferedImage.TYPE_INT_RGB) {
        log.trace("makeMat: INT_RGB (%dx%d)", bImg.getWidth(), bImg.getHeight());
        int[] data = ((DataBufferInt) bImg.getRaster().getDataBuffer()).getData();
        ByteBuffer byteBuffer = ByteBuffer.allocate(data.length * 4);
        IntBuffer intBuffer = byteBuffer.asIntBuffer();
        intBuffer.put(data);/* w ww  .jav  a  2s. c  om*/
        aMat = new Mat(bImg.getHeight(), bImg.getWidth(), CvType.CV_8UC4);
        aMat.put(0, 0, byteBuffer.array());
        Mat oMatBGR = new Mat(bImg.getHeight(), bImg.getWidth(), CvType.CV_8UC3);
        Mat oMatA = new Mat(bImg.getHeight(), bImg.getWidth(), CvType.CV_8UC1);
        java.util.List<Mat> mixIn = new ArrayList<Mat>(Arrays.asList(new Mat[] { aMat }));
        java.util.List<Mat> mixOut = new ArrayList<Mat>(Arrays.asList(new Mat[] { oMatA, oMatBGR }));
        //A 0 - R 1 - G 2 - B 3 -> A 0 - B 1 - G 2 - R 3
        Core.mixChannels(mixIn, mixOut, new MatOfInt(0, 0, 1, 3, 2, 2, 3, 1));
        return oMatBGR;
    } else if (bImg.getType() == BufferedImage.TYPE_3BYTE_BGR) {
        log.error("makeMat: 3BYTE_BGR (%dx%d)", bImg.getWidth(), bImg.getHeight());
        byte[] data = ((DataBufferByte) bImg.getRaster().getDataBuffer()).getData();
        aMat = new Mat(bImg.getHeight(), bImg.getWidth(), CvType.CV_8UC3);
        aMat.put(0, 0, data);
        return aMat;
    } else if (bImg.getType() == BufferedImage.TYPE_4BYTE_ABGR) {
        log.trace("makeMat: TYPE_4BYTE_ABGR (%dx%d)", bImg.getWidth(), bImg.getHeight());
        byte[] data = ((DataBufferByte) bImg.getRaster().getDataBuffer()).getData();
        aMat = new Mat(bImg.getHeight(), bImg.getWidth(), CvType.CV_8UC4);
        aMat.put(0, 0, data);
        Mat oMatBGR = new Mat(bImg.getHeight(), bImg.getWidth(), CvType.CV_8UC3);
        Mat oMatA = new Mat(bImg.getHeight(), bImg.getWidth(), CvType.CV_8UC1);
        java.util.List<Mat> mixIn = new ArrayList<Mat>(Arrays.asList(new Mat[] { aMat }));
        java.util.List<Mat> mixOut = new ArrayList<Mat>(Arrays.asList(new Mat[] { oMatA, oMatBGR }));
        //A 0 - R 1 - G 2 - B 3 -> A 0 - B 1 - G 2 - R 3
        Core.mixChannels(mixIn, mixOut, new MatOfInt(0, 0, 1, 1, 2, 2, 3, 3));
        return oMatBGR;
    } else {
        log.error("makeMat: Type not supported: %d (%dx%d)", bImg.getType(), bImg.getWidth(), bImg.getHeight());
    }
    return aMat;
}

From source file:com.sikulix.core.Visual.java

License:Open Source License

protected static Mat makeMat(BufferedImage bImg) {
    Mat aMat = null;//from   w  w w  .  ja v  a  2s. com
    if (bImg.getType() == BufferedImage.TYPE_INT_RGB) {
        vLog.trace("makeMat: INT_RGB (%dx%d)", bImg.getWidth(), bImg.getHeight());
        int[] data = ((DataBufferInt) bImg.getRaster().getDataBuffer()).getData();
        ByteBuffer byteBuffer = ByteBuffer.allocate(data.length * 4);
        IntBuffer intBuffer = byteBuffer.asIntBuffer();
        intBuffer.put(data);
        aMat = new Mat(bImg.getHeight(), bImg.getWidth(), CvType.CV_8UC4);
        aMat.put(0, 0, byteBuffer.array());
        Mat oMatBGR = new Mat(bImg.getHeight(), bImg.getWidth(), CvType.CV_8UC3);
        Mat oMatA = new Mat(bImg.getHeight(), bImg.getWidth(), CvType.CV_8UC1);
        List<Mat> mixIn = new ArrayList<Mat>(Arrays.asList(new Mat[] { aMat }));
        List<Mat> mixOut = new ArrayList<Mat>(Arrays.asList(new Mat[] { oMatA, oMatBGR }));
        //A 0 - R 1 - G 2 - B 3 -> A 0 - B 1 - G 2 - R 3
        Core.mixChannels(mixIn, mixOut, new MatOfInt(0, 0, 1, 3, 2, 2, 3, 1));
        return oMatBGR;
    } else if (bImg.getType() == BufferedImage.TYPE_3BYTE_BGR) {
        vLog.error("makeMat: 3BYTE_BGR (%dx%d)", bImg.getWidth(), bImg.getHeight());
    } else {
        vLog.error("makeMat: Type not supported: %d (%dx%d)", bImg.getType(), bImg.getWidth(),
                bImg.getHeight());
    }
    return aMat;
}

From source file:com.trandi.opentld.tld.Util.java

License:Apache License

static byte getByte(final int row, final int col, final Mat mat) {
    if (CvType.CV_8UC1 != mat.type())
        throw new IllegalArgumentException(
                "Expected type is CV_8UC1, we found: " + CvType.typeToString(mat.type()));

    mat.get(row, col, _byteBuff1);//from   w  ww  .  java  2 s . c  om
    return _byteBuff1[0];
}

From source file:com.trandi.opentld.tld.Util.java

License:Apache License

/**
 * The corresponding Java primitive array type depends on the Mat type:
 * CV_8U and CV_8S -> byte[]//  w  w w . j  a  v a  2 s .  c  o  m
 * CV_16U and CV_16S -> short[]
 * CV_32S -> int[]
 * CV_32F -> float[]
 * CV_64F-> double[]
 */
static byte[] getByteArray(final Mat mat) {
    if (CvType.CV_8UC1 != mat.type())
        throw new IllegalArgumentException(
                "Expected type is CV_8UC1, we found: " + CvType.typeToString(mat.type()));

    final int size = (int) (mat.total() * mat.channels());
    if (_byteBuff.length != size) {
        _byteBuff = new byte[size];
    }
    mat.get(0, 0, _byteBuff); // 0 for row and col means the WHOLE Matrix
    return _byteBuff;
}

From source file:com.wallerlab.compcellscope.calcDPCTask.java

License:BSD License

protected Long doInBackground(Mat... matrix_list) {
    //int count = urls.length;
    Mat in1 = matrix_list[0];//from  w  ww  .j a v  a 2  s . c  om
    Mat in2 = matrix_list[1];
    Mat outputMat = matrix_list[2];

    Mat Mat1 = new Mat(in1.width(), in1.height(), in1.type());
    Mat Mat2 = new Mat(in2.width(), in2.height(), in2.type());
    in1.copyTo(Mat1);
    in2.copyTo(Mat2);

    Imgproc.cvtColor(Mat1, Mat1, Imgproc.COLOR_RGBA2GRAY, 1);
    Imgproc.cvtColor(Mat2, Mat2, Imgproc.COLOR_RGBA2GRAY, 1);

    Mat output = new Mat(Mat1.width(), Mat1.height(), CvType.CV_8UC4);
    Mat dpcSum = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1);
    Mat dpcDifference = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1);
    Mat dpcImgF = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1);

    /*
    Log.d(TAG,String.format("Mat1 format is %.1f-%.1f, type: %d",Mat1.size().width,Mat1.size().height,Mat1.type()));
    Log.d(TAG,String.format("Mat2 format is %.1f-%.1f, type: %d",Mat2.size().width,Mat2.size().height,Mat2.type()));
    */

    // Convert to Floats
    Mat1.convertTo(Mat1, CvType.CV_32FC1);
    Mat2.convertTo(Mat2, CvType.CV_32FC1);
    Core.add(Mat1, Mat2, dpcSum);
    Core.subtract(Mat1, Mat2, dpcDifference);
    Core.divide(dpcDifference, dpcSum, dpcImgF);
    Core.add(dpcImgF, new Scalar(1.0), dpcImgF); // Normalize to 0-2.0
    Core.multiply(dpcImgF, new Scalar(110), dpcImgF); // Normalize to 0-255
    dpcImgF.convertTo(output, CvType.CV_8UC1); // Convert back into RGB
    Imgproc.cvtColor(output, output, Imgproc.COLOR_GRAY2RGBA, 4);

    dpcSum.release();
    dpcDifference.release();
    dpcImgF.release();
    Mat1.release();
    Mat2.release();

    Mat maskedImg = Mat.zeros(output.rows(), output.cols(), CvType.CV_8UC4);
    int radius = maskedImg.width() / 2 + 25;
    Core.circle(maskedImg, new Point(maskedImg.width() / 2, maskedImg.height() / 2), radius,
            new Scalar(255, 255, 255), -1, 8, 0);
    output.copyTo(outputMat, maskedImg);
    output.release();
    maskedImg.release();
    return null;
}