Example usage for org.opencv.android Utils matToBitmap

List of usage examples for org.opencv.android Utils matToBitmap

Introduction

In this page you can find the example usage for org.opencv.android Utils matToBitmap.

Prototype

public static void matToBitmap(Mat mat, Bitmap bmp) 

Source Link

Document

Short form of the matToBitmap(mat, bmp, premultiplyAlpha=false)

Usage

From source file:com.wallerlab.processing.utilities.ImageUtils.java

License:BSD License

public static Bitmap toBitmap(Mat mat) {
    Bitmap bmp = Bitmap.createBitmap(mat.cols(), mat.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(mat, bmp);
    return bmp;
}

From source file:edu.sfsu.cs.orange.ocr.OcrRecognizeAsyncTask.java

License:Apache License

@Override
protected Boolean doInBackground(Void... arg0) {
    long start = System.currentTimeMillis();
    Bitmap bitmap = activity.getCameraManager().buildLuminanceSource(data, width, height)
            .renderCroppedGreyscaleBitmap();

    String textResult;/*from   w w  w  . jav  a 2s  . c  o m*/
    Mat image = new Mat();
    Utils.bitmapToMat(bitmap, image);
    Mat gray = new Mat();
    Utils.bitmapToMat(bitmap, gray);

    Mat background = new Mat();
    Utils.bitmapToMat(bitmap, background); //to test with BinarizeBG
    Mat finalimage = new Mat();
    Utils.bitmapToMat(bitmap, finalimage);

    //image.convertTo( gray,CvType.CV_8UC1);
    //image.convertTo(image,CvType.CV_64F);
    try {
        Imgcodecs.imwrite("/storage/emulated/0/DCIM/orig.jpg", image);
        OpencvNativeClass.BinarizeShafait(gray.getNativeObjAddr(), image.getNativeObjAddr());

        Imgcodecs.imwrite("/storage/emulated/0/DCIM/binarized.jpg", image);
        Utils.matToBitmap(image, bitmap);

        //Pix fimage = ReadFile.readBitmap(bitmap);
        //fimage = Binarize.otsuAdaptiveThreshold(fimage);

        //float angle = Skew.findSkew(fimage);
        //Log.i("Skew: ", Float.toString(angle));
        //double deg2rad = 3.14159265 / 180.;

        //fimage = Rotate.rotate(fimage, angle);

        //bitmap = WriteFile.writeBitmap(fimage);

        Mat skewed = new Mat();

        //Utils.bitmapToMat(bitmap,skewed);
        //Imgcodecs.imwrite("/storage/emulated/0/DCIM/deskewed.jpg", skewed);

        baseApi.setImage(ReadFile.readBitmap(bitmap));

        textResult = baseApi.getUTF8Text();
        timeRequired = System.currentTimeMillis() - start;

        // Check for failure to recognize text
        if (textResult == null || textResult.equals("")) {
            return false;
        }

        ocrResult = new OcrResult();
        ocrResult.setWordConfidences(baseApi.wordConfidences());
        ocrResult.setMeanConfidence(baseApi.meanConfidence());
        ocrResult.setRegionBoundingBoxes(baseApi.getRegions().getBoxRects());
        ocrResult.setTextlineBoundingBoxes(baseApi.getTextlines().getBoxRects());
        ocrResult.setWordBoundingBoxes(baseApi.getWords().getBoxRects());
        ocrResult.setStripBoundingBoxes(baseApi.getStrips().getBoxRects());

        // Iterate through the results.
        final ResultIterator iterator = baseApi.getResultIterator();
        int[] lastBoundingBox;
        ArrayList<Rect> charBoxes = new ArrayList<Rect>();
        iterator.begin();
        do {
            lastBoundingBox = iterator.getBoundingBox(PageIteratorLevel.RIL_SYMBOL);
            Rect lastRectBox = new Rect(lastBoundingBox[0], lastBoundingBox[1], lastBoundingBox[2],
                    lastBoundingBox[3]);
            charBoxes.add(lastRectBox);
        } while (iterator.next(PageIteratorLevel.RIL_SYMBOL));
        iterator.delete();
        ocrResult.setCharacterBoundingBoxes(charBoxes);

    } catch (RuntimeException e) {
        Log.e("OcrRecognizeAsyncTask",
                "Caught RuntimeException in request to Tesseract. Setting state to CONTINUOUS_STOPPED.");
        e.printStackTrace();
        try {
            baseApi.clear();
            activity.stopHandler();
        } catch (NullPointerException e1) {
            // Continue
        }
        return false;
    }
    timeRequired = System.currentTimeMillis() - start;
    ocrResult.setBitmap(bitmap);
    String[] temp = textResult.split("\n");
    if (temp.length != 0)
        textResult = "";
    for (int i = 0; i < temp.length; i++) {
        if (temp[i].length() != 0) {
            if (i < temp.length - 1) {
                textResult = textResult + temp[i] + "\n";
            } else
                textResult = textResult + temp[i];
        }
    }
    String textResult2 = ParsingNativeClass.ParseAddress(textResult);
    Log.d("Return parsing", textResult2);
    ocrResult.setViewtext(textResult);
    ocrResult.setText(textResult2);
    ocrResult.setRecognitionTimeRequired(timeRequired);
    return true;
}

From source file:i2r.snap2inspect.SamplePresentation.java

License:Apache License

public void setImageDynamic(Mat m) {
    // convert to bitmap:
    Bitmap bm = Bitmap.createBitmap(m.cols(), m.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(m, bm);
    mImageView.setImageBitmap(bm);/* ww w  .  j  av  a  2s.c o  m*/
}

From source file:it.baywaylabs.jumpersumo.FrameDisplayCV.java

License:Open Source License

@Override
protected Bitmap doInBackground(Void... params) {

    if (bitmapOriginal != null) {
        this.imgMAT = new Mat(bitmapOriginal.getWidth(), bitmapOriginal.getHeight(), CvType.CV_8UC4);
        try {//from w w w.  ja va  2  s.c o  m
            zxing();
        } catch (ChecksumException e) {
            e.printStackTrace();
        } catch (FormatException e) {
            e.printStackTrace();
        }
        Bitmap bitmapTranform = Bitmap.createBitmap(this.imgMAT.width(), this.imgMAT.height(),
                Bitmap.Config.ARGB_8888);
        //bitmapOriginal = Bitmap.createBitmap(imgMAT.width(), imgMAT.height(), Bitmap.Config.ARGB_8888);
        Utils.matToBitmap(this.imgMAT, bitmapTranform);

        // return bitmapTranform;
    }

    return bitmapOriginal;
}

From source file:it.baywaylabs.jumpersumo.MainActivity.java

License:Open Source License

public void zxing(Mat mRgba) throws ChecksumException, FormatException {

    Bitmap bMap = Bitmap.createBitmap(mRgba.width(), mRgba.height(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(mRgba, bMap);
    int[] intArray = new int[bMap.getWidth() * bMap.getHeight()];
    //copy pixel data from the Bitmap into the 'intArray' array
    bMap.getPixels(intArray, 0, bMap.getWidth(), 0, 0, bMap.getWidth(), bMap.getHeight());

    LuminanceSource source = new RGBLuminanceSource(bMap.getWidth(), bMap.getHeight(), intArray);

    BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
    Reader reader = new QRCodeMultiReader();

    String sResult = "";
    Double AREA_RIFERIMENTO = 11500.0;

    try {//from www.  ja  v  a2 s .c  o  m

        Result result = reader.decode(bitmap);
        sResult = result.getText();
        if (result.getBarcodeFormat().compareTo(BarcodeFormat.QR_CODE) == 0)
            Log.d(TAG, "SI! E' Un QRCode");
        ResultPoint[] points = result.getResultPoints();
        Log.d(TAG, "PUNTI: " + points.toString());
        //for (ResultPoint point : result.getResultPoints()) {
        Point a = new Point(points[0].getX(), points[0].getY());
        Point b = new Point(points[2].getX(), points[2].getY());
        Rect rect = new Rect(a, b);
        Log.d(TAG, "Area del rettangolo: " + rect.area());
        if (rect.area() < AREA_RIFERIMENTO)
            Log.w(TAG, "Mi devo avvicinare!");
        else
            Log.w(TAG, "Mi devo allontanare!");
        Imgproc.rectangle(this.mRgba, new Point(points[0].getX(), points[0].getY()),
                new Point(points[2].getX(), points[2].getY()), new Scalar(0, 255, 0), 3);
        Log.d(TAG, sResult);
        Point center = new Point(0, 0);

        Imgproc.circle(this.mRgba, center, 10, new Scalar(0, 0, 255), 2);
        //if (!"".equals(sResult))
        //Toast.makeText(MainActivity.this, "QRCode Scanned: " + sResult, Toast.LENGTH_LONG).show();
    } catch (Resources.NotFoundException e) {
        Log.e(TAG, "Code Not Found");
        e.printStackTrace();
    } catch (NotFoundException e) {
        e.printStackTrace();
    }

}

From source file:karthiknr.TextID.ProcessAsyncActivity.java

License:Apache License

@Override
protected Bitmap doInBackground(Object... params) {

    try {//from  w  w w .ja v a 2s  . c  om

        if (params.length < 2) {
            Log.e(TAG, "Error passing parameter to execute - missing params");
            return null;
        }

        if (!(params[0] instanceof Context) || !(params[1] instanceof Bitmap)) {
            Log.e(TAG, "Error passing parameter to execute(context, bitmap)");
            return null;
        }

        context = (Context) params[0];

        bmp = (Bitmap) params[1];

        if (context == null || bmp == null) {
            Log.e(TAG, "Error passed null parameter to execute(context, bitmap)");
            return null;
        }

        Log.v(TAG, "Saving original bitmap");
        FileOutputStream out = null;
        try {
            out = new FileOutputStream(DATA_PATH + "/oocr.png");
            bmp.compress(Bitmap.CompressFormat.PNG, 100, out);
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            try {
                if (out != null) {
                    out.close();
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        Log.v(TAG, "Starting Processing");

        //OpenCV Warping
        Bitmap mutableBitmap = bmp.copy(Bitmap.Config.ARGB_8888, true);

        Mat imgSource = new Mat(mutableBitmap.getHeight(), mutableBitmap.getWidth(), CvType.CV_8UC1);
        Utils.bitmapToMat(mutableBitmap, imgSource);
        Mat startM = findWarpedMat(imgSource);

        Mat sourceImage = new Mat(mutableBitmap.getHeight(), mutableBitmap.getWidth(), CvType.CV_8UC1);
        Utils.bitmapToMat(mutableBitmap, sourceImage);
        Mat warpedMat = warpImage(sourceImage, startM);

        Bitmap resultBitmap = Bitmap.createBitmap(warpedMat.cols(), warpedMat.rows(), Bitmap.Config.ARGB_8888);
        Utils.matToBitmap(warpedMat, resultBitmap);

        Log.v(TAG, "Got warped bitmap");
        Log.v(TAG, "Saving warped bitmap");

        out = null;
        try {
            out = new FileOutputStream(DATA_PATH + "/wocr.png");
            resultBitmap.compress(Bitmap.CompressFormat.PNG, 100, out);
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            try {
                if (out != null) {
                    out.close();
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        return resultBitmap;

    } catch (Exception ex) {
        Log.d(TAG, "Error: " + ex + "\n" + ex.getMessage());
    }

    return null;
}

From source file:net.hydex11.opencvinteropexample.MainActivity.java

License:Open Source License

private void example() {
    RenderScript mRS = RenderScript.create(this);

    // Loads input image
    Bitmap inputBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.houseimage);

    // Puts input image inside an OpenCV mat
    Mat inputMat = new Mat();
    Utils.bitmapToMat(inputBitmap, inputMat);

    Mat outputMat = new Mat(inputMat.size(), inputMat.type());

    // Testing bitmap, used to test that the OpenCV mat actually has bitmap data inside
    Bitmap initialBitmap = Bitmap.createBitmap(inputMat.width(), inputMat.height(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(inputMat, initialBitmap);

    // Retrieve OpenCV mat data address
    long inputMatDataAddress = inputMat.dataAddr();
    long outputMatDataAddress = outputMat.dataAddr();

    // Creates a RS type that matches the input mat one.
    Element element = Element.RGBA_8888(mRS);
    Type.Builder tb = new Type.Builder(mRS, element);
    tb.setX(inputMat.width());//from ww w.j  av a  2 s. c o  m
    tb.setY(inputMat.height());

    Type inputMatType = tb.create();

    // Creates a RenderScript allocation that uses directly the OpenCV input mat address
    Allocation inputAllocation = createTypedAllocationWithDataPointer(mRS, inputMatType, inputMatDataAddress);
    Allocation outputAllocation = createTypedAllocationWithDataPointer(mRS, inputMatType, outputMatDataAddress);

    // Define a simple convolve script
    // Note: here, ANY kernel can be applied!
    ScriptIntrinsicConvolve3x3 convolve3x3 = ScriptIntrinsicConvolve3x3.create(mRS, element);

    float convolveCoefficients[] = new float[9];
    convolveCoefficients[0] = 1;
    convolveCoefficients[2] = 1;
    convolveCoefficients[5] = 1;
    convolveCoefficients[6] = 1;
    convolveCoefficients[8] = 1;
    convolve3x3.setCoefficients(convolveCoefficients);

    convolve3x3.setInput(inputAllocation);
    convolve3x3.forEach(outputAllocation);

    mRS.finish();

    // Converts the result to a bitmap
    Bitmap cvOutputBitmap = Bitmap.createBitmap(outputMat.width(), outputMat.height(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(outputMat, cvOutputBitmap);

    // Testing bitmap, used to test the RenderScript ouput allocation contents
    // Note: it is placed here because the copyTo function clears the input buffer
    Bitmap rsOutputBitmap = Bitmap.createBitmap(outputMat.width(), outputMat.height(), Bitmap.Config.ARGB_8888);
    outputAllocation.copyTo(rsOutputBitmap);

    // Testing bitmap, used to test that RenderScript input allocation pointed to the OpenCV mat
    // Note: it is placed here because the copyTo function clears the input buffer
    Bitmap rsInitialBitmap = Bitmap.createBitmap(inputMat.width(), inputMat.height(), Bitmap.Config.ARGB_8888);
    inputAllocation.copyTo(rsInitialBitmap);

    // Display input and output
    ImageView originalImageIV = (ImageView) findViewById(R.id.imageView);
    ImageView inputRSImageIV = (ImageView) findViewById(R.id.imageView2);
    ImageView outputRSImageIV = (ImageView) findViewById(R.id.imageView3);
    ImageView outputCVIV = (ImageView) findViewById(R.id.imageView4);

    originalImageIV.setImageBitmap(initialBitmap);
    inputRSImageIV.setImageBitmap(rsInitialBitmap);
    outputRSImageIV.setImageBitmap(rsOutputBitmap);
    outputCVIV.setImageBitmap(cvOutputBitmap);

}

From source file:nz.ac.auckland.lablet.vision.CamShiftTracker.java

License:Open Source License

/**
 * Saves a Mat based image to /sdcard/ for debugging.
 *
 * @param frame The frame to save.//from  www  .j  av  a2  s . c  om
 * @param name The name of the file (without a file type).
 */
public void saveFrame(Mat frame, String name) {
    Bitmap bmp = Bitmap.createBitmap(frame.width(), frame.height(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(frame, bmp);
    this.saveFrame(bmp, name);
}

From source file:org.akvo.caddisfly.helper.ImageHelper.java

License:Open Source License

/**
 * Gets the center of the backdrop in the test chamber
 *
 * @param bitmap the photo to analyse//from  w  w w .j a  va2  s  .  com
 * @return the center point of the found circle
 */
public static Point getCenter(@NonNull Bitmap bitmap) {

    // convert bitmap to mat
    Mat mat = new Mat(bitmap.getWidth(), bitmap.getHeight(), CvType.CV_8UC1);
    Mat grayMat = new Mat(bitmap.getWidth(), bitmap.getHeight(), CvType.CV_8UC1);

    Utils.bitmapToMat(bitmap, mat);

    // convert to grayScale
    int colorChannels = (mat.channels() == 3) ? Imgproc.COLOR_BGR2GRAY
            : ((mat.channels() == 4) ? Imgproc.COLOR_BGRA2GRAY : 1);

    Imgproc.cvtColor(mat, grayMat, colorChannels);

    // reduce the noise so we avoid false circle detection
    //Imgproc.GaussianBlur(grayMat, grayMat, new Size(9, 9), 2, 2);

    // param1 = gradient value used to handle edge detection
    // param2 = Accumulator threshold value for the
    // cv2.CV_HOUGH_GRADIENT method.
    // The smaller the threshold is, the more circles will be
    // detected (including false circles).
    // The larger the threshold is, the more circles will
    // potentially be returned.
    double param1 = 10, param2 = 100;

    // create a Mat object to store the circles detected
    Mat circles = new Mat(bitmap.getWidth(), bitmap.getHeight(), CvType.CV_8UC1);

    // find the circle in the image
    Imgproc.HoughCircles(grayMat, circles, Imgproc.CV_HOUGH_GRADIENT, RESOLUTION_INVERSE_RATIO,
            (double) MIN_CIRCLE_CENTER_DISTANCE, param1, param2, MIN_RADIUS, MAX_RADIUS);

    int numberOfCircles = (circles.rows() == 0) ? 0 : circles.cols();

    // draw the circles found on the image
    if (numberOfCircles > 0) {

        double[] circleCoordinates = circles.get(0, 0);

        int x = (int) circleCoordinates[0], y = (int) circleCoordinates[1];

        org.opencv.core.Point center = new org.opencv.core.Point(x, y);
        int foundRadius = (int) circleCoordinates[2];

        // circle outline
        Imgproc.circle(mat, center, foundRadius, COLOR_GREEN, 4);

        Utils.matToBitmap(mat, bitmap);

        return new Point((int) center.x, (int) center.y);
    }

    return null;
}

From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.ResultUtil.java

License:Open Source License

public static Bitmap makeBitmap(@NonNull Mat mat) {
    try {//from ww w . java2s .c  om
        Bitmap bitmap = Bitmap.createBitmap(mat.width(), mat.height(), Bitmap.Config.ARGB_8888);
        Utils.matToBitmap(mat, bitmap);

        //double max = bitmap.getHeight() > bitmap.getWidth() ? bitmap.getHeight() : bitmap.getWidth();
        //double min = bitmap.getHeight() < bitmap.getWidth() ? bitmap.getHeight() : bitmap.getWidth();
        //double ratio = min / max;
        //int width = (int) Math.max(600, max);
        //int height = (int) Math.round(ratio * width);

        return Bitmap.createScaledBitmap(bitmap, mat.width(), mat.height(), false);

    } catch (Exception e) {
        Timber.e(e);
    }
    return null;
}