Example usage for org.opencv.core Core bitwise_or

List of usage examples for org.opencv.core Core bitwise_or

Introduction

In this page you can find the example usage for org.opencv.core Core bitwise_or.

Prototype

public static void bitwise_or(Mat src1, Mat src2, Mat dst) 

Source Link

Usage

From source file:Questao1.java

void or() {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    imagemBinaria();//from   w  w w . j  ava  2  s. com
    Core.bitwise_or(image1bin, image2bin, output);
    normalizarBinario();

    Imgcodecs.imwrite("or.jpg", output);
    showResult("or.jpg");
}

From source file:classes.FloodFiller.java

private void fillFrom(Point seed, int lo, int up, Scalar backgroundColor, Scalar contourFillingColor) {

    Mat object = ObjectGenerator.extract(image, seed.x, seed.y, 10, 10);
    this.meanColor = Core.mean(object);

    Rect ccomp = new Rect();
    Mat mask = Mat.zeros(image.rows() + 2, image.cols() + 2, CvType.CV_8UC1);

    int connectivity = 4;
    int newMaskVal = 255;
    int ffillMode = 1;

    int flags = connectivity + (newMaskVal << 8) + (ffillMode == 1 ? Imgproc.FLOODFILL_FIXED_RANGE : 0);

    Scalar newVal = new Scalar(0.299, 0.587, 0.114);

    Imgproc.threshold(mask, mask, 1, 128, Imgproc.THRESH_BINARY);

    filledArea = Imgproc.floodFill(image.clone(), mask, seed, newVal, ccomp, new Scalar(lo, lo, lo),
            new Scalar(up, up, up), flags);

    //        Highgui.imwrite("mask.png", mask);
    ImageUtils.saveImage(mask, "mask.png", request);

    morphologicalImage = new Mat(image.size(), CvType.CV_8UC3);

    Mat element = new Mat(3, 3, CvType.CV_8U, new Scalar(1));

    ArrayList<Mat> mask3 = new ArrayList<Mat>();
    mask3.add(mask);//from   w ww .  j  a  v a  2s .c  o m
    mask3.add(mask);
    mask3.add(mask);
    Core.merge(mask3, mask);

    // Applying morphological filters
    Imgproc.erode(mask, morphologicalImage, element);
    Imgproc.morphologyEx(morphologicalImage, morphologicalImage, Imgproc.MORPH_CLOSE, element,
            new Point(-1, -1), 9);
    Imgproc.morphologyEx(morphologicalImage, morphologicalImage, Imgproc.MORPH_OPEN, element, new Point(-1, -1),
            2);
    Imgproc.resize(morphologicalImage, morphologicalImage, image.size());

    //        Highgui.imwrite("morphologicalImage.png", morphologicalImage);
    ImageUtils.saveImage(morphologicalImage, "morphologicalImage.png", request);

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();

    Core.split(mask, mask3);
    Mat binarymorphologicalImage = mask3.get(0);

    Imgproc.findContours(binarymorphologicalImage.clone(), contours, new Mat(), Imgproc.RETR_EXTERNAL,
            Imgproc.CHAIN_APPROX_NONE);

    contoursImage = new Mat(image.size(), CvType.CV_8UC3, backgroundColor);

    int thickness = -1; // Thicknes should be lower than zero in order to drawn the filled contours
    Imgproc.drawContours(contoursImage, contours, -1, contourFillingColor, thickness); // Drawing all the contours found
    //        Highgui.imwrite("allContoursImage.png", contoursImage);
    ImageUtils.saveImage(contoursImage, "allContoursImage.png", request);

    if (contours.size() > 1) {

        int minContourWith = 20;
        int minContourHeight = 20;
        int maxContourWith = 6400 / 2;
        int maxContourHeight = 4800 / 2;

        contours = filterContours(contours, minContourWith, minContourHeight, maxContourWith, maxContourHeight);
    }

    if (contours.size() > 0) {

        MatOfPoint biggestContour = contours.get(0); // getting the biggest contour
        contourArea = Imgproc.contourArea(biggestContour);

        if (contours.size() > 1) {
            biggestContour = Collections.max(contours, new ContourComparator()); // getting the biggest contour in case there are more than one
        }

        Point[] points = biggestContour.toArray();
        path = "M " + (int) points[0].x + " " + (int) points[0].y + " ";
        for (int i = 1; i < points.length; ++i) {
            Point v = points[i];
            path += "L " + (int) v.x + " " + (int) v.y + " ";
        }
        path += "Z";

        biggestContourImage = new Mat(image.size(), CvType.CV_8UC3, backgroundColor);

        Imgproc.drawContours(biggestContourImage, contours, 0, contourFillingColor, thickness);

        //            Highgui.imwrite("biggestContourImage.png", biggestContourImage);
        ImageUtils.saveImage(biggestContourImage, "biggestContourImage.png", request);

        Mat maskForColorExtraction = biggestContourImage.clone();

        if (isWhite(backgroundColor)) {
            Imgproc.dilate(maskForColorExtraction, maskForColorExtraction, new Mat(), new Point(-1, -1), 3);
        } else {
            Imgproc.erode(maskForColorExtraction, maskForColorExtraction, new Mat(), new Point(-1, -1), 3);
        }

        //            Highgui.imwrite("maskForColorExtraction.png", maskForColorExtraction);
        ImageUtils.saveImage(maskForColorExtraction, "maskForColorExtraction.png", request);

        Mat extractedColor = new Mat();

        if (isBlack(backgroundColor) && isWhite(contourFillingColor)) {
            Core.bitwise_and(maskForColorExtraction, image, extractedColor);

        } else {
            Core.bitwise_or(maskForColorExtraction, image, extractedColor);
        }

        //            Highgui.imwrite("extractedColor.png", extractedColor);
        ImageUtils.saveImage(extractedColor, "extractedColor.png", request);

        computedSearchWindow = Imgproc.boundingRect(biggestContour);
        topLeftCorner = computedSearchWindow.tl();

        Rect croppingRect = new Rect(computedSearchWindow.x, computedSearchWindow.y,
                computedSearchWindow.width - 1, computedSearchWindow.height - 1);

        Mat imageForTextRecognition = new Mat(extractedColor.clone(), croppingRect);
        //            Highgui.imwrite(outImageName, imageForTextRecognition);
        ImageUtils.saveImage(imageForTextRecognition, outImageName, request);

        //            
        //
        //            Mat data = new Mat(imageForTextRecognition.size(), CvType.CV_8UC3, backgroundColor);
        //            imageForTextRecognition.copyTo(data);
        //            data.convertTo(data, CvType.CV_8UC3);
        //
        //            // The meanColor variable represents the color in the GBR space, the following line transforms this to the RGB color space, which
        //            // is assumed in the prepareImage method of the TextRecognitionPreparer class
        //            Scalar userColor = new Scalar(meanColor.val[2], meanColor.val[1], meanColor.val[0]);
        //
        //            ArrayList<String> recognizableImageNames = TextRecognitionPreparer.generateRecognizableImagesNames(data, backgroundColor, userColor);
        //            for (String imageName : recognizableImageNames) {
        //
        //                try {
        //                    // First recognition step
        //                    String recognizedText = TextRecognizer.recognize(imageName, true).trim();
        //                    if (recognizedText != null && !recognizedText.isEmpty()) {
        //                        recognizedStrings.add(recognizedText);
        //                    }
        //                    // Second recognition step
        //                    recognizedText = TextRecognizer.recognize(imageName, false).trim();
        //                    if (recognizedText != null && !recognizedText.isEmpty()) {
        //                        recognizedStrings.add(recognizedText);
        //                    }
        //                    
        //                } catch (Exception e) {
        //                }
        //            }
        //            
        ////            ArrayList<BufferedImage> recognizableBufferedImages = TextRecognitionPreparer.generateRecognizableBufferedImages(data, backgroundColor, userColor);
        ////            for (BufferedImage bufferedImage : recognizableBufferedImages) {
        ////                try {
        ////                    // First recognition step
        ////                    String recognizedText = TextRecognizer.recognize(bufferedImage, true).trim();
        ////                    if (recognizedText != null && !recognizedText.isEmpty()) {
        ////                        recognizedStrings.add(recognizedText);
        ////                    }
        ////                    // Second recognition step
        ////                    recognizedText = TextRecognizer.recognize(bufferedImage, false).trim();
        ////                    if (recognizedText != null && !recognizedText.isEmpty()) {
        ////                        recognizedStrings.add(recognizedText);
        ////                    }
        ////                    
        ////                } catch (Exception e) {
        ////                }
        ////            }
        //
        //            
        //            

        // compute all moments
        Moments mom = Imgproc.moments(biggestContour);
        massCenter = new Point(mom.get_m10() / mom.get_m00(), mom.get_m01() / mom.get_m00());

        // draw black dot
        Core.circle(contoursImage, massCenter, 4, contourFillingColor, 8);
    }

}

From source file:com.astrocytes.core.operationsengine.CoreOperations.java

License:Open Source License

public static Mat or(Mat first, Mat second) {
    Mat dest = new Mat();
    Core.bitwise_or(first, second, dest);
    return dest;
}

From source file:ctPrincipal.Operacoes.java

String realizarOperacoes(int op) {
    String resultImgOutput = "";
    switch (op) {
    case 1://and
        imagemBinaria();/*from w  ww.j  a va 2 s  .  com*/
        Core.bitwise_and(image1bin, image2bin, output);
        normalizarBinario();
        Imgcodecs.imwrite("OutputImg/and.jpg", output);
        resultImgOutput = "OutputImg/and.jpg";
        break;
    case 2://or
        imagemBinaria();
        Core.bitwise_or(image1bin, image2bin, output);
        normalizarBinario();
        Imgcodecs.imwrite("OutputImg/or.jpg", output);
        resultImgOutput = "OutputImg/or.jpg";
        break;
    case 3://xor
        imagemBinaria();
        Core.bitwise_xor(image1bin, image2bin, output);
        normalizarBinario();
        Imgcodecs.imwrite("OutputImg/xor.jpg", output);
        resultImgOutput = "OutputImg/xor.jpg";
        break;
    case 4://not
        Core.bitwise_not(image1bin, output);
        Imgcodecs.imwrite("OutputImg/not.jpg", output);
        resultImgOutput = "OutputImg/not.jpg";
        break;
    case 5://soma
        Core.add(image1, image2, output);
        Imgcodecs.imwrite("OutputImg/soma.jpg", output);
        resultImgOutput = "OutputImg/soma.jpg";
        break;
    case 6://subtracao
        Core.subtract(image1, image2, output);
        Imgcodecs.imwrite("OutputImg/subtracao.jpg", output);
        resultImgOutput = "OutputImg/subtracao.jpg";
        break;
    case 7:// multiplicacao
        Core.multiply(image1, image2, output);
        Imgcodecs.imwrite("OutputImg/multiplicacao.jpg", output);
        resultImgOutput = "OutputImg/multiplicacao.jpg";
        break;
    case 8://divisao
        Core.divide(image1, image2, output);
        Imgcodecs.imwrite("OutputImg/divisao.jpg", output);
        resultImgOutput = "OutputImg/divisao.jpg";
        break;
    }

    return resultImgOutput;
}

From source file:logic.featurepointextractor.EyeBrowsFPE.java

/**
 * getSkeleton  obtain thin 1-pixel region from contour. 
 * @param src   input binary image//  w w  w .j ava 2s . c  o  m
 * @return      binary image 
 */

private Mat getSkeleton(Mat src) {
    Mat skel = new Mat(src.rows(), src.cols(), CV_8UC1, new Scalar(0));
    Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_CROSS, new Size(3, 3));
    Mat tmp = new Mat();
    Mat eroded = new Mat();
    boolean done = false;

    do {
        Imgproc.morphologyEx(src, eroded, Imgproc.MORPH_ERODE, element);
        Imgproc.morphologyEx(eroded, tmp, Imgproc.MORPH_DILATE, element);
        Core.subtract(src, tmp, tmp);
        Core.bitwise_or(skel, tmp, skel);
        eroded.copyTo(src);

        done = (Core.countNonZero(src) == 0);
    } while (!done);

    return skel;
}

From source file:opencv.fark.ResimSecMainFrame.java

private void jButton3ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton3ActionPerformed
    Mat sonuc = new Mat(resim1.rows(), resim1.cols(), CvType.CV_8UC1);
    Mat gray1 = new Mat(resim1.rows(), resim1.cols(), CvType.CV_8UC1);
    Mat gray2 = new Mat(resim1.rows(), resim1.cols(), CvType.CV_8UC1);

    Imgproc.cvtColor(resim1, gray1, Imgproc.COLOR_BGR2GRAY);
    Imgproc.cvtColor(resim2, gray2, Imgproc.COLOR_BGR2GRAY);

    MatToBufImg matToBufImage = new MatToBufImg();

    //        matToBufImage.setMatrix(gray1, ".jpg");
    //        g.drawImage(matToBufImage.getBufferedImage(), 0, 0, null);

    //        matToBufImage.setMatrix(gray2, ".jpg");
    //        g1.drawImage(matToBufImage.getBufferedImage(), 0, 0, null);

    Core.absdiff(gray1, gray2, sonuc);// ww w .j av  a2  s .  co m
    Imgproc.blur(sonuc, sonuc, new Size(10, 10));
    Imgproc.threshold(sonuc, sonuc, 10, 255, Imgproc.THRESH_BINARY);
    Imgproc.erode(sonuc, sonuc, element);
    Imgproc.dilate(sonuc, sonuc, element);

    farkli_pixel_say = 0;
    float boyut = sonuc.cols() * sonuc.rows();
    for (int i = 0; i < sonuc.cols(); i++) {
        for (int j = 0; j < sonuc.rows(); j++) {
            double a[] = sonuc.get(j, i);
            if (a[0] == 255) {
                farkli_pixel_say++;
                jLabelPixelCount.setText(String.valueOf(farkli_pixel_say));
            }
        }
    }

    float hata = (farkli_pixel_say / boyut) * 100;

    if (hata == 0) {
        jLabelYuzdeHata.setText("Hata Yok !");
    } else {
        jLabelYuzdeHata.setText(String.valueOf(hata).substring(0, 5));
    }

    Imgproc.cvtColor(sonuc, sonuc, Imgproc.COLOR_GRAY2BGR);

    Core.bitwise_or(resim2, sonuc, sonuc);

    matToBufImage.setMatrix(sonuc, ".png");
    g1.drawImage(matToBufImage.getBufferedImage(), 0, 0, null);

}

From source file:org.lasarobotics.vision.detection.ColorBlobDetector.java

License:Open Source License

/**
 * Process an rgba image. The results can be drawn on retrieved later.
 * This method does not modify the image.
 *
 * @param rgbaImage An RGBA image matrix
 *//*w ww. j a  v  a2  s .  c  om*/
public void process(Mat rgbaImage) {
    Imgproc.pyrDown(rgbaImage, mPyrDownMat);
    Imgproc.pyrDown(mPyrDownMat, mPyrDownMat);

    Imgproc.cvtColor(mPyrDownMat, mHsvMat, Imgproc.COLOR_RGB2HSV_FULL);

    //Test whether we need two inRange operations (only if the hue crosses over 255)
    if (upperBound.getScalar().val[0] <= 255) {
        Core.inRange(mHsvMat, lowerBound.getScalar(), upperBound.getScalar(), mMask);
    } else {
        //We need two operations - we're going to OR the masks together
        Scalar lower = lowerBound.getScalar().clone();
        Scalar upper = upperBound.getScalar().clone();
        while (upper.val[0] > 255)
            upper.val[0] -= 255;
        double tmp = lower.val[0];
        lower.val[0] = 0;
        //Mask 1 - from 0 to n
        Core.inRange(mHsvMat, lower, upper, mMaskOne);
        //Mask 2 - from 255-n to 255
        lower.val[0] = tmp;
        upper.val[0] = 255;

        Core.inRange(mHsvMat, lower, upper, mMask);
        //OR the two masks
        Core.bitwise_or(mMaskOne, mMask, mMask);
    }

    //Dilate (blur) the mask to decrease processing power
    Imgproc.dilate(mMask, mDilatedMask, new Mat());

    List<MatOfPoint> contourListTemp = new ArrayList<>();

    Imgproc.findContours(mDilatedMask, contourListTemp, mHierarchy, Imgproc.RETR_EXTERNAL,
            Imgproc.CHAIN_APPROX_SIMPLE);

    // Filter contours by area and resize to fit the original image size
    contours.clear();
    for (MatOfPoint c : contourListTemp) {
        Core.multiply(c, new Scalar(4, 4), c);
        contours.add(new Contour(c));
    }
}

From source file:servershootingstar.BallDetector.java

public static String getAngleFromRobot(int input) {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    System.out.println("before");
    int point;//from w ww .  ja  v a 2s  . c om
    try {
        Mat frame = new Mat();
        System.out.println("AAAAAA");
        Mat originalFrame = new Mat();
        System.out.println("BBBBBB");
        VideoCapture videoCapture = new VideoCapture(0);
        System.out.println("CCCCCCCC");
        videoCapture.read(originalFrame);
        //                System.out.println("original" + originalFrame.dump());
        //                initSwing(originalFrame);
        int workaround = 20;
        while (workaround > 0) {
            System.out.println("workaround " + workaround);
            videoCapture.read(originalFrame);
            //                    System.out.println(originalFrame.dump() + originalFrame.dump().length());
            workaround--;
        }
        //                Imgcodecs.imwrite("C:\\Users\\Goran\\Desktop\\Goran.jpg", originalFrame);
        Mat cropped = originalFrame.submat(originalFrame.rows() / 4, originalFrame.rows() / 4 * 3, 0,
                originalFrame.cols());
        initSwing(cropped);
        Imgproc.cvtColor(cropped, frame, Imgproc.COLOR_BGR2HSV);

        // insert lower and upper bounds for colors
        Scalar greenLowerB = new Scalar(20, 55, 55);
        Scalar greenUpperB = new Scalar(40, 255, 255);

        Scalar redLowerB = new Scalar(160, 100, 35);
        Scalar red1LowerB = new Scalar(0, 100, 35);

        Scalar redUpperB = new Scalar(180, 255, 255);
        Scalar red1UpperB = new Scalar(20, 255, 255);

        Scalar blueLowerB = new Scalar(100, 100, 35);
        Scalar blueUpperB = new Scalar(120, 255, 155);

        Mat mask = new Mat();

        if (input == 1) {
            Mat otherMask = new Mat();
            Core.inRange(frame, redLowerB, redUpperB, mask);
            Core.inRange(frame, red1LowerB, red1UpperB, otherMask);
            Core.bitwise_or(mask, otherMask, mask);
        } else if (input == 2) {
            Core.inRange(frame, greenLowerB, greenUpperB, mask);
        } else {
            Core.inRange(frame, blueLowerB, blueUpperB, mask);
        }
        Imgproc.erode(mask, mask, Imgproc.getStructuringElement(Imgproc.CV_SHAPE_ELLIPSE, new Size(5, 5)));
        Imgproc.erode(mask, mask, Imgproc.getStructuringElement(Imgproc.CV_SHAPE_ELLIPSE, new Size(5, 5)));
        Imgproc.erode(mask, mask, Imgproc.getStructuringElement(Imgproc.CV_SHAPE_ELLIPSE, new Size(5, 5)));
        Imgproc.erode(mask, mask, Imgproc.getStructuringElement(Imgproc.CV_SHAPE_ELLIPSE, new Size(5, 5)));

        int minX = Integer.MAX_VALUE, maxX = Integer.MIN_VALUE, minY = Integer.MAX_VALUE,
                maxY = Integer.MIN_VALUE;
        for (int i = 0; i < mask.rows(); ++i) {
            for (int j = 0; j < mask.cols(); ++j) {
                double value = mask.get(i, j)[0];
                //System.out.println(value);
                if (value > 1) {
                    minX = Math.min(minX, i);
                    maxX = Math.max(maxX, i);
                    minY = Math.min(minY, j);
                    maxY = Math.max(maxY, j);
                }
            }
        }

        Imgproc.circle(mask, new Point((maxY + minY) / 2, (minX + maxX) / 2), 3, new Scalar(0, 0, 0));
        initSwing(mask);

        point = (minY + maxY) / 2;

        point = point - 320;

        cos = point / 320.0;
        System.out.println("OK");
    } catch (Exception ex) {
        point = (new Random()).nextInt(640);
        cos = -1;
        System.out.println("error imase, davam random brojka: " + point);
        ex.printStackTrace();

    }

    //            System.out.println();
    //            System.out.println("tockata u granica od [-320, 320]");
    //            System.out.println(point);
    //            System.out.println("cosinus vrednost");
    //            System.out.println(cos);
    //            System.out.println();
    System.out.println("cos = " + cos);
    if (cos == -1) {
        return "-1";
    }
    int res = (int) (2 * Math.toDegrees(Math.acos(cos)) / 3);
    System.out.println("Res: " + res);
    return String.valueOf(res);
}

From source file:video.PictureAnalyser.java

public List<MatOfPoint> getConturs(Scalar low, Scalar high, Mat img) {

    Mat imgThresholded = new Mat();
    Mat imgThresholded2 = new Mat();
    Core.inRange(img, low, high, imgThresholded);
    if (low.val[0] < 0) {
        low.val[0] = 180 + low.val[0];
        high.val[0] = 179;

        Core.inRange(img, low, high, imgThresholded2);
        Core.bitwise_or(imgThresholded, imgThresholded2, imgThresholded);

    }//from   w  ww  . j  a  v  a  2s  .  co  m
    if (high.val[0] > 179) {
        low.val[0] = 0;
        high.val[0] = high.val[0] - 180;

        Core.inRange(img, low, high, imgThresholded2);
        Core.bitwise_or(imgThresholded, imgThresholded2, imgThresholded);
    }

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    int dilation_size = 3;
    Mat element1 = Imgproc.getStructuringElement(Imgproc.MORPH_RECT,
            new Size(2 * dilation_size + 1, 2 * dilation_size + 1));
    Imgproc.dilate(imgThresholded, imgThresholded, element1);
    Imgproc.findContours(imgThresholded, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);
    MatOfPoint2f approxCurve = new MatOfPoint2f();

    for (int i = 0; i < contours.size(); i++) {
        MatOfPoint2f contour2f = new MatOfPoint2f(contours.get(i).toArray());
        double approxDistance = Imgproc.arcLength(contour2f, true) * 0.02;
        Imgproc.approxPolyDP(contour2f, approxCurve, approxDistance, true);
        MatOfPoint points = new MatOfPoint(approxCurve.toArray());
        Rect rect = Imgproc.boundingRect(points);
        int area = (rect.width) * (rect.height);
        //tester og arealet er for smt
        if (area > 500) {
        } else {
            contours.remove(i);
            i--;
        }
    }

    return contours;
}