Example usage for org.opencv.core Core absdiff

List of usage examples for org.opencv.core Core absdiff

Introduction

In this page you can find the example usage for org.opencv.core Core absdiff.

Prototype

public static void absdiff(Mat src1, Scalar src2, Mat dst) 

Source Link

Usage

From source file:at.ac.tuwien.caa.docscan.camera.CameraPreview.java

License:Open Source License

public boolean isFrameSame(Mat frame1, Mat frame2) {

    Mat tmp1 = new Mat(frame1.rows(), frame1.cols(), CvType.CV_8UC1);
    Imgproc.cvtColor(frame1, tmp1, Imgproc.COLOR_RGB2GRAY);

    Mat tmp2 = new Mat(frame2.rows(), frame2.cols(), CvType.CV_8UC1);
    Imgproc.cvtColor(frame2, tmp2, Imgproc.COLOR_RGB2GRAY);

    Mat subtractResult = new Mat(frame2.rows(), frame2.cols(), CvType.CV_8UC1);
    Core.absdiff(frame1, frame2, subtractResult);
    Imgproc.threshold(subtractResult, subtractResult, 50, 1, Imgproc.THRESH_BINARY);
    Scalar sumDiff = Core.sumElems(subtractResult);
    double diffRatio = sumDiff.val[0] / (frame1.cols() * frame2.rows());

    return diffRatio < .05;

}

From source file:ch.zhaw.facerecognitionlibrary.PreProcessor.Contours.DifferenceOfGaussian.java

License:Open Source License

public PreProcessor preprocessImage(PreProcessor preProcessor) {
    List<Mat> images = preProcessor.getImages();
    List<Mat> processed = new ArrayList<Mat>();
    for (Mat img : images) {
        Mat gauss1 = new Mat();
        Mat gauss2 = new Mat();
        Imgproc.GaussianBlur(img, gauss1, size1, sigma1);
        Imgproc.GaussianBlur(img, gauss2, size2, sigma2);
        Core.absdiff(gauss1, gauss2, img);
        processed.add(img);/*from   w  w  w  .j a v  a 2s  .co  m*/
    }
    preProcessor.setImages(processed);
    return preProcessor;
}

From source file:com.example.root.dipproj.MainActivity.java

@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
    super.onActivityResult(requestCode, resultCode, data);
    if (resultCode == RESULT_OK) {
        if (requestCode == 1) {
            File f = new File(Environment.getExternalStorageDirectory().toString());
            for (File temp : f.listFiles()) {
                if (temp.getName().equals("temp.jpg")) {
                    f = temp;//from  w  w  w.  j  a  va 2s . c o  m
                    break;
                }
            }
            try {
                Bitmap bitmap;
                BitmapFactory.Options bitmapOptions = new BitmapFactory.Options();
                bitmap = BitmapFactory.decodeFile(f.getAbsolutePath(), bitmapOptions);
                viewImage.setImageBitmap(bitmap);
                String path = android.os.Environment.getExternalStorageDirectory() + File.separator + "Phoenix"
                        + File.separator + "default";
                f.delete();
                OutputStream outFile = null;
                File file = new File(path, String.valueOf(System.currentTimeMillis()) + ".jpg");
                try {
                    outFile = new FileOutputStream(file);
                    bitmap.compress(Bitmap.CompressFormat.JPEG, 85, outFile);
                    outFile.flush();
                    outFile.close();
                } catch (FileNotFoundException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } catch (Exception e) {
                    e.printStackTrace();
                }
            } catch (Exception e) {
                e.printStackTrace();
            }
        } else if (requestCode == 2) {
            Uri selectedImage = data.getData();
            String[] filePath = { MediaStore.Images.Media.DATA };
            Cursor c = getContentResolver().query(selectedImage, filePath, null, null, null);
            c.moveToFirst();
            int columnIndex = c.getColumnIndex(filePath[0]);
            String picturePath = c.getString(columnIndex);
            c.close();
            Bitmap thumbnail = (BitmapFactory.decodeFile(picturePath));
            Log.w("path of image", picturePath + "");
            Mat imgMat = new Mat();
            Mat imgMat2 = new Mat();
            Mat imgMat3 = new Mat();
            Utils.bitmapToMat(thumbnail, imgMat);
            Imgproc.cvtColor(imgMat, imgMat, Imgproc.COLOR_RGB2GRAY);
            org.opencv.core.Size s = new Size(3, 3);
            Imgproc.createCLAHE();
            Imgproc.GaussianBlur(imgMat, imgMat, s, 2);
            Imgproc.erode(imgMat, imgMat2, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2, 2)));
            Imgproc.dilate(imgMat2, imgMat3, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2, 2)));
            Imgproc.Sobel(imgMat, imgMat, CvType.CV_8UC1, 1, 0);
            Core.absdiff(imgMat, imgMat3, imgMat);
            Imgproc.threshold(imgMat, imgMat, 123, 255, Imgproc.THRESH_OTSU);
            Utils.matToBitmap(imgMat, thumbnail);
            viewImage.setImageBitmap(thumbnail);
            saveBitmaptoSDCard(thumbnail);
        }
    }
}

From source file:com.example.thibautg.libreaudioview.VideoProcessing.java

License:Open Source License

/**
 *
 * @param data//from w  w w  .  j  a v  a  2  s  . co  m
 */
public void processFrame(byte[] data) {

    int detectionThreshold = 50;
    mInputMat320240.put(0, 0, data);

    mInputGray320240 = mInputMat320240.submat(0, mInputMat320240.height() * 2 / 3, 0, mInputMat320240.width());
    Imgproc.resize(mInputGray320240, mInputGray, new Size(160, 120));

    //mInputMat.put(0, 0, data);
    //mInputGray = mInputMat.submat(0, mInputMat.height() * 2 / 3, 0, mInputMat.width());
    Imgproc.GaussianBlur(mInputGray, mInputGray, windowSize, 0.6, 0.6);
    Core.absdiff(mInputGray, mPreviousMat, mDiffMat2);
    mInputGray.copyTo(mPreviousMat);
    Imgproc.threshold(mDiffMat2, mOutputGrayMat, detectionThreshold, 255, 0);

    if (mBoolFirstImage) {
        mOutputGrayMat.setTo(new Scalar(0));
        mBoolFirstImage = false;
    }

    mSonifier.sonifyFrame(mOutputGrayMat);
}

From source file:com.sikulix.core.Finder.java

License:Open Source License

public boolean hasChanges(Mat base, Mat current) {
    int PIXEL_DIFF_THRESHOLD = 5;
    int IMAGE_DIFF_THRESHOLD = 5;
    Mat bg = new Mat();
    Mat cg = new Mat();
    Mat diff = new Mat();
    Mat tdiff = new Mat();

    Imgproc.cvtColor(base, bg, Imgproc.COLOR_BGR2GRAY);
    Imgproc.cvtColor(current, cg, Imgproc.COLOR_BGR2GRAY);
    Core.absdiff(bg, cg, diff);
    Imgproc.threshold(diff, tdiff, PIXEL_DIFF_THRESHOLD, 0.0, Imgproc.THRESH_TOZERO);
    if (Core.countNonZero(tdiff) <= IMAGE_DIFF_THRESHOLD) {
        return false;
    }/*from w w w  .  j a v  a2 s . c  o  m*/

    Imgproc.threshold(diff, diff, PIXEL_DIFF_THRESHOLD, 255, Imgproc.THRESH_BINARY);
    Imgproc.dilate(diff, diff, new Mat());
    Mat se = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5));
    Imgproc.morphologyEx(diff, diff, Imgproc.MORPH_CLOSE, se);

    List<MatOfPoint> points = new ArrayList<MatOfPoint>();
    Mat contours = new Mat();
    Imgproc.findContours(diff, points, contours, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);
    int n = 0;
    for (Mat pm : points) {
        log.trace("(%d) %s", n++, pm);
        printMatI(pm);
    }
    log.trace("contours: %s", contours);
    printMatI(contours);
    return true;
}

From source file:cx.uni.jk.mms.iaip.filter.LogOfOnePlusAbs.java

License:Open Source License

@Override
public Mat convert(Mat mat) {

    /** make absolute values and log */
    Mat tempMat = mat.clone();//  w ww  .  j a  v a 2  s  . c o m
    Core.absdiff(tempMat, new Scalar(0.0d), tempMat);
    Core.add(tempMat, new Scalar(1.0d), tempMat);
    Core.log(tempMat, tempMat);

    /** find contrast and brightness to fit into 8 bit */
    MinMaxLocResult mmlr = Core.minMaxLoc(tempMat);
    double min = Math.min(mmlr.minVal, 0);
    double max = mmlr.maxVal;
    double alpha = 256.0d / (max - min);
    double beta = -min * alpha;

    /** conversion to 8 bit Mat applying contrast alpha and brightness beta */
    Mat byteMat = new MatOfByte();
    tempMat.convertTo(byteMat, CvType.CV_8U, alpha, beta);

    return byteMat;
}

From source file:edu.ucue.tfc.Modelo.VideoProcessor.java

/**
* Processes {@code firstFrame} and {@code secondFrame}.
* @param firstFrame    the first frame of a cycle.
*//*  w  ww .  jav  a 2s .  c  om*/
private void processFrame(Mat firstFrame) {
    double contourArea = 0;
    int position = 0;
    try {
        /**
         * Redimensiona el el cuadro actual
         *
         */
        Imgproc.resize(firstFrame, firstFrame, frameSize);

        /**
         * Convierte el cuadro por segundo a escala de grises
         */
        Imgproc.cvtColor(firstFrame, firstGrayImage, Imgproc.COLOR_BGR2GRAY);

        /**
         * Lee el siguiente cuadro, lo redimensiona y convierte a escala de grises
         */
        video.read(secondFrame);

        Imgproc.resize(secondFrame, secondFrame, frameSize);

        Imgproc.cvtColor(secondFrame, secondGrayImage, Imgproc.COLOR_BGR2GRAY);

        /**
         * Obtiene la diferencia absoluta por pixel de los cuadros anteriores.
         */
        Core.absdiff(firstGrayImage, secondGrayImage, differenceOfImages);
        Imgproc.threshold(differenceOfImages, thresholdImage, 25, 255, Imgproc.THRESH_BINARY);
        Imgproc.blur(thresholdImage, thresholdImage, new Size(12, 12));
        Imgproc.threshold(thresholdImage, thresholdImage, 20, 255, Imgproc.THRESH_BINARY);
        /////
        for (int i = 0; i < contours.size(); ++i) {
            contours.get(i).release();
        }
        contours.clear();

        /**
         * La linea Horizontal
         */
        Imgproc.line(firstFrame, controlPoints.get(6), controlPoints.get(7), new Scalar(255, 0, 0),
                Imgproc.LINE_4);
        Imgproc.findContours(thresholdImage, contours, hierarchy, Imgproc.RETR_TREE,
                Imgproc.CHAIN_APPROX_SIMPLE);

        for (int i = 0; i < hullPoints.size(); ++i) {
            hullPoints.get(i).release();
        }
        hullPoints.clear();

        for (int i = 0; i < contours.size(); i++) {
            MatOfInt tmp = new MatOfInt();
            Imgproc.convexHull(contours.get(i), tmp, false);
            hullPoints.add(tmp);
        }

        /**
         * Busca el contorno con el rea ms grande
         */
        if (contours.size() > 0) {
            for (int i = 0; i < contours.size(); i++) {
                if (Imgproc.contourArea(contours.get(i)) > contourArea) {
                    contourArea = Imgproc.contourArea(contours.get(i));
                    position = i;
                    boundingRectangle = Imgproc.boundingRect(contours.get(i));
                }

            }
        }
        secondFrame.release();
        hierarchy.release();
        secondGrayImage.release();
        firstGrayImage.release();
        thresholdImage.release();
        differenceOfImages.release();
    } catch (Exception e) {
        System.out.println(e.getMessage());
    }

    if (controlPoints.get(6).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(0), controlPoints.get(1), new Scalar(0, 0, 255), 2);
        wasAtLeftPoint = true;
    } else if (!controlPoints.get(6).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(0), controlPoints.get(1), new Scalar(0, 255, 0), 2);
    }

    if (controlPoints.get(8).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(2), controlPoints.get(3), new Scalar(0, 0, 255), 2);
        wasAtCenterPoint = true;
    } else if (!controlPoints.get(8).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(2), controlPoints.get(3), new Scalar(0, 255, 0), 2);
    }

    if (controlPoints.get(7).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(4), controlPoints.get(5), new Scalar(0, 0, 255), 2);
        wasAtRightPoint = true;
    } else if (!controlPoints.get(7).inside(boundingRectangle)) {
        Imgproc.line(frame, controlPoints.get(4), controlPoints.get(5), new Scalar(0, 255, 0), 2);
    }

    if (wasAtCenterPoint && wasAtLeftPoint && wasAtRightPoint) {
        detectedCarsCount++;
        wasDetected = true;
        wasAtCenterPoint = false;
        wasAtLeftPoint = false;
        wasAtRightPoint = false;
    }

    if (contourArea > 3000) {
        Imgproc.drawContours(frame, contours, position, new Scalar(255, 255, 255));
    }
}

From source file:gab.opencv.OpenCV.java

License:Open Source License

/**
 * Calculate the difference between the current image
 * loaded into OpenCV and a second image. The result is stored
 * in the loaded image in OpenCV. Works on both color and grayscale
 * images.//from ww w  . j  av  a  2 s.c  om
 * 
 * @param img
 *       A PImage to diff against.
 */
public void diff(PImage img) {
    Mat imgMat = imitate(getColor());
    toCv(img, imgMat);

    Mat dst = imitate(getCurrentMat());

    if (useColor) {
        ARGBtoBGRA(imgMat, imgMat);
        Core.absdiff(getCurrentMat(), imgMat, dst);
    } else {
        Core.absdiff(getCurrentMat(), OpenCV.gray(imgMat), dst);
    }

    dst.assignTo(getCurrentMat());
}

From source file:gab.opencv.OpenCV.java

License:Open Source License

/**
 * A helper function that diffs two Mats using absdiff.
 * Places the result back into mat1 /*from   w w  w.j ava2s.  com*/
 * 
 * @param mat1
 *       The destination Mat
 * @param mat2
 *       The Mat to diff against
 */
public static void diff(Mat mat1, Mat mat2) {
    Mat dst = imitate(mat1);
    Core.absdiff(mat1, mat2, dst);
    dst.assignTo(mat1);
}

From source file:gab.opencv.OpenCVProcessingUtils.java

License:Open Source License

public void diff(PImage img) {
    Mat imgMat = imitate(getColor());/*from   w ww.  j  a  va2 s  . c om*/
    toCv(img, imgMat);

    Mat dst = imitate(getCurrentMat());

    if (useColor) {
        ARGBtoBGRA(imgMat, imgMat);
        Core.absdiff(getCurrentMat(), imgMat, dst);
    } else {
        Core.absdiff(getCurrentMat(), OpenCVProcessingUtils.gray(imgMat), dst);
    }

    dst.assignTo(getCurrentMat());
}