Example usage for org.opencv.android Utils matToBitmap

List of usage examples for org.opencv.android Utils matToBitmap

Introduction

In this page you can find the example usage for org.opencv.android Utils matToBitmap.

Prototype

public static void matToBitmap(Mat mat, Bitmap bmp) 

Source Link

Document

Short form of the matToBitmap(mat, bmp, premultiplyAlpha=false)

Usage

From source file:com.example.colordetector.CamMainActivity.java

License:Apache License

public void takePicture(View view) {
    // Make bitmaps to display images and (if the user want) save them on storage memory
    bitmap = Bitmap.createBitmap(camWidth, camHeight, Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(rgbFrame, bitmap);

    bitmapMask = Bitmap.createBitmap(camWidth, camHeight, Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(filteredFrame, bitmapMask);

    // Showing the image at the user, and ask if save them or not; the response will be processed on method onActivityResult
    Intent intent = new Intent(this, CapturedFrameActivity.class);
    startActivityForResult(intent, 1);//from   w w w  .  ja v a  2s  . co  m
}

From source file:com.example.root.dipproj.MainActivity.java

@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
    super.onActivityResult(requestCode, resultCode, data);
    if (resultCode == RESULT_OK) {
        if (requestCode == 1) {
            File f = new File(Environment.getExternalStorageDirectory().toString());
            for (File temp : f.listFiles()) {
                if (temp.getName().equals("temp.jpg")) {
                    f = temp;/* w  ww .j a va  2 s . com*/
                    break;
                }
            }
            try {
                Bitmap bitmap;
                BitmapFactory.Options bitmapOptions = new BitmapFactory.Options();
                bitmap = BitmapFactory.decodeFile(f.getAbsolutePath(), bitmapOptions);
                viewImage.setImageBitmap(bitmap);
                String path = android.os.Environment.getExternalStorageDirectory() + File.separator + "Phoenix"
                        + File.separator + "default";
                f.delete();
                OutputStream outFile = null;
                File file = new File(path, String.valueOf(System.currentTimeMillis()) + ".jpg");
                try {
                    outFile = new FileOutputStream(file);
                    bitmap.compress(Bitmap.CompressFormat.JPEG, 85, outFile);
                    outFile.flush();
                    outFile.close();
                } catch (FileNotFoundException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } catch (Exception e) {
                    e.printStackTrace();
                }
            } catch (Exception e) {
                e.printStackTrace();
            }
        } else if (requestCode == 2) {
            Uri selectedImage = data.getData();
            String[] filePath = { MediaStore.Images.Media.DATA };
            Cursor c = getContentResolver().query(selectedImage, filePath, null, null, null);
            c.moveToFirst();
            int columnIndex = c.getColumnIndex(filePath[0]);
            String picturePath = c.getString(columnIndex);
            c.close();
            Bitmap thumbnail = (BitmapFactory.decodeFile(picturePath));
            Log.w("path of image", picturePath + "");
            Mat imgMat = new Mat();
            Mat imgMat2 = new Mat();
            Mat imgMat3 = new Mat();
            Utils.bitmapToMat(thumbnail, imgMat);
            Imgproc.cvtColor(imgMat, imgMat, Imgproc.COLOR_RGB2GRAY);
            org.opencv.core.Size s = new Size(3, 3);
            Imgproc.createCLAHE();
            Imgproc.GaussianBlur(imgMat, imgMat, s, 2);
            Imgproc.erode(imgMat, imgMat2, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2, 2)));
            Imgproc.dilate(imgMat2, imgMat3, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2, 2)));
            Imgproc.Sobel(imgMat, imgMat, CvType.CV_8UC1, 1, 0);
            Core.absdiff(imgMat, imgMat3, imgMat);
            Imgproc.threshold(imgMat, imgMat, 123, 255, Imgproc.THRESH_OTSU);
            Utils.matToBitmap(imgMat, thumbnail);
            viewImage.setImageBitmap(thumbnail);
            saveBitmaptoSDCard(thumbnail);
        }
    }
}

From source file:com.example.thibautg.libreaudioview.VideoProcessing.java

License:Open Source License

/**
 *
 * @param data/*ww  w.  java 2 s  . c om*/
 * @param width
 * @param height
 * @param bmp
 */
public void convertByteArrayToBitmap(byte[] data, int width, int height, Bitmap bmp) {
    //Log.i(TAG, "convertByteArrayToBitmap");
    Mat usedMat = mOutputGrayMat;
    //Mat usedMat = mInputGray
    try {
        Imgproc.cvtColor(usedMat, mRgba, Imgproc.COLOR_GRAY2RGBA);
        Utils.matToBitmap(mRgba, bmp);
    } catch (CvException e) {
        Log.d("Exception", e.getMessage());

    }
}

From source file:com.github.mbillingr.correlationcheck.ImageProcessor.java

License:Open Source License

Bitmap matToBitmap(Mat input) {
    if (input == null) {
        return Bitmap.createBitmap(0, 0, Bitmap.Config.ARGB_8888);
    }/* ww w. ja  v  a 2s  .  c  om*/
    Mat tmp = new Mat();
    if (input.channels() == 1) {
        Imgproc.cvtColor(input, tmp, Imgproc.COLOR_GRAY2RGB);
    } else {
        Imgproc.cvtColor(input, tmp, Imgproc.COLOR_BGR2RGB);
    }
    Core.transpose(tmp, tmp);
    Core.flip(tmp, tmp, 1);

    Bitmap bm = Bitmap.createBitmap(tmp.cols(), tmp.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(tmp, bm);
    return bm;
}

From source file:com.joravasal.keyface.EigenFacesActivity.java

License:Open Source License

@Override
public void onCreate(Bundle savedInstanceState) {
    Log.i("eigenFacesActivity::", "OnCreate");
    super.onCreate(savedInstanceState);

    setContentView(R.layout.eigenfaces);
    setTitle("Eigenfaces");
    Mat aver = ((PCAfaceRecog) KeyFaceActivity.recogAlgorithm).getAverage();
    Mat faces = ((PCAfaceRecog) KeyFaceActivity.recogAlgorithm).getEigenFaces();

    int size = new Integer(KeyFaceActivity.prefs.getString("savedFaceSize", "200"));
    Mat aux = new Mat();

    aver = aver.reshape(1, size);//from   w ww.  j av a 2 s . co  m
    //aver.convertTo(aux, );
    aver = toGrayscale(aver);
    average = Bitmap.createBitmap(size, size, Bitmap.Config.ARGB_8888);
    Imgproc.cvtColor(aver, aux, Imgproc.COLOR_GRAY2RGBA, 4);
    Utils.matToBitmap(aux, average);
    LinearLayout layout = (LinearLayout) findViewById(id.eigenFacesHorizontalLayout);

    TextView avrgImgTV = new TextView(getApplicationContext());
    avrgImgTV.setText("Average image:");
    avrgImgTV.setPadding(5, 10, 10, 20);
    avrgImgTV.setGravity(Gravity.CENTER);

    TextView eigenfacesImgsTV = new TextView(getApplicationContext());
    eigenfacesImgsTV.setText("Eigenfaces:");
    eigenfacesImgsTV.setPadding(5, 10, 10, 20);
    eigenfacesImgsTV.setGravity(Gravity.CENTER);

    ImageView imgV = new ImageView(getApplicationContext());

    imgV.setClickable(false);
    imgV.setVisibility(0);
    imgV.setPadding(0, 10, 10, 20);
    imgV.setImageBitmap(average);

    layout.addView(avrgImgTV);
    layout.addView(imgV);
    layout.addView(eigenfacesImgsTV);

    LinkedList<ImageView> variables = new LinkedList<ImageView>();
    eigenfacesList = new LinkedList<Bitmap>();
    for (int i = 0; i < faces.rows(); i++) {
        variables.add(new ImageView(getApplicationContext()));
        eigenfacesList.add(Bitmap.createBitmap(size, size, Bitmap.Config.ARGB_8888));

        aux = new Mat();
        aux = faces.row(i).reshape(1, size);
        aux = toGrayscale(aux);
        Mat auxGreyC4 = new Mat();
        Imgproc.cvtColor(aux, auxGreyC4, Imgproc.COLOR_GRAY2RGBA, 4);
        Utils.matToBitmap(auxGreyC4, eigenfacesList.get(i));

        variables.get(i).setClickable(false);
        variables.get(i).setVisibility(0);
        variables.get(i).setPadding(0, 10, 10, 20);
        variables.get(i).setImageBitmap(eigenfacesList.get(i));
        layout.addView(variables.get(i));
    }

    Button save = (Button) findViewById(id.saveEigenfacesB);
    save.setOnClickListener(this);
}

From source file:com.joravasal.keyface.FindFacesView.java

License:Open Source License

@Override
protected Bitmap processFrame(VideoCapture camera) {
    //Log.i(tag,"Processing frame for our delight");

    Mat mRgbaAux = new Mat();
    Mat mGrayAux = new Mat();
    camera.retrieve(mRgbaAux, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
    camera.retrieve(mGrayAux, Highgui.CV_CAP_ANDROID_GREY_FRAME);
    //Correct the direction of the image
    mRgba = correctCameraImage(mRgbaAux);
    mGray = correctCameraImage(mGrayAux);

    AlgorithmReturnValue resExample = null;
    //We look for faces in the captured images
    if (cascade != null) {
        int faceSize = Math.round(mGray.rows() * KeyFaceActivity.minFaceSize);
        List<Rect> faces = new LinkedList<Rect>();
        try {// www.jav  a 2s . co  m
            cascade.detectMultiScale(mGray, faces, 1.1, 2, 2, new Size(faceSize, faceSize));
        } catch (CvException e) {
            System.err.println(e.getMessage());
        }
        for (Rect r : faces) { //For each face

            //The Rectangle commented is the area that will be used to check the face,
            //but an ellipse is shown instead, I think it looks better.
            //Core.rectangle(mRgba, r.tl(), r.br(), new Scalar(0,0,255,100), 3);

            String nombre = null;

            // We try to recognize it
            AlgorithmReturnValue res = KeyFaceActivity.recogAlgorithm.recognizeFace(mGray.submat(r));
            resExample = res;
            if (res.getResult() != -1) {
                //if it worked, we find the name
                nombre = findName(res.getResult());
            }
            Point center = new Point(r.x + (r.width / 2), r.y + (r.height / 2));
            //If nombre is null we have no name, thus is unrecognized and draw a red circle, together with the text "Unknown"
            if (nombre == null) {
                Core.ellipse(mRgba, center, new Size(r.width / 2 - 5, r.height / 2 + 20), 0, 0, 360,
                        new Scalar(255, 0, 0, 30), 3);
                Core.rectangle(mRgba, new Point(r.x + 45, r.y + r.height + 20),
                        new Point(r.x + 200, r.y + r.height + 60), new Scalar(70, 50, 50, 255), Core.FILLED);
                Core.putText(mRgba, "Unknown", new Point(r.x + 50, r.y + r.height + 50),
                        Core.FONT_HERSHEY_PLAIN, 2, new Scalar(200, 200, 200, 100));

                //Check if the user is tryaing to save a new face
                if (KeyFaceActivity.addingFaces && faces.size() == 1) {
                    //All is in order, we save a new image and update our account of faces. We update the recognizer data as well.
                    addFaceToDB(mGray, r, savedFaces);

                    KeyFaceActivity.toastHandler.post(new Runnable() {
                        public void run() {
                            KeyFaceActivity.prefs.edit()
                                    .putInt("savedFaces", KeyFaceActivity.prefs.getInt("savedFaces", 0) + 1)
                                    .apply();
                        }
                    });

                    /*KeyFaceActivity.lock.lock();
                    try {
                       KeyFaceActivity.faceAdded = true;
                       KeyFaceActivity.addingFaces = false;
                       KeyFaceActivity.condition.signalAll();
                    }
                    finally {
                       KeyFaceActivity.lock.unlock();
                    }
                    */

                    if (!KeyFaceActivity.recogAlgorithm.updateData(false)) {
                        System.err.println("Couldn't update the recognition algorithm with the new picture.");
                    }
                    KeyFaceActivity.addingFaces = false;

                    KeyFaceActivity.toastHandler.post(new Runnable() {
                        public void run() {
                            Toast.makeText(KeyFaceActivity.globalappcontext, "Face saved successfully!",
                                    Toast.LENGTH_SHORT).show();
                        }
                    });
                }
                //The user tried to save a face when there was more than one, it fails and sends a message to the user.
                else if (KeyFaceActivity.addingFaces && faces.size() > 1) {
                    KeyFaceActivity.toastHandler.post(new Runnable() {
                        public void run() {
                            Toast.makeText(KeyFaceActivity.globalappcontext,
                                    "Make sure there is only one face!", Toast.LENGTH_SHORT).show();
                        }
                    });
                    KeyFaceActivity.addingFaces = false;
                }
            }

            else { //We know this face!
                Core.ellipse(mRgba, center, new Size(r.width / 2 - 5, r.height / 2 + 20), 0, 0, 360,
                        new Scalar(0, 255, 0, 100), 3);
                Core.rectangle(mRgba, new Point(r.x + 45, r.y + r.height + 20),
                        new Point(r.x + 200, r.y + r.height + 60), new Scalar(50, 70, 50, 255), Core.FILLED);
                Core.putText(mRgba, nombre, new Point(r.x + 50, r.y + r.height + 50), Core.FONT_HERSHEY_PLAIN,
                        2, new Scalar(0, 255, 0, 100));
                if (KeyFaceActivity.addingFaces && faces.size() == 1) {
                    //If the user tries to save a face when it is already known we don let him.
                    KeyFaceActivity.toastHandler.post(new Runnable() {
                        public void run() {
                            Toast.makeText(KeyFaceActivity.globalappcontext, "This face is already known!",
                                    Toast.LENGTH_SHORT).show();
                        }
                    });
                    KeyFaceActivity.addingFaces = false;
                }
            }
        }
        //If there is no face we tell the user there was a mistake
        if (KeyFaceActivity.addingFaces && faces.size() <= 0) {
            KeyFaceActivity.toastHandler.post(new Runnable() {
                public void run() {
                    Toast.makeText(KeyFaceActivity.globalappcontext, "No face found!", Toast.LENGTH_SHORT)
                            .show();
                }
            });
            KeyFaceActivity.addingFaces = false;
        }
    }

    savedFaces = KeyFaceActivity.prefs.getInt("savedFaces", savedFaces);

    if (KeyFaceActivity.prefs.getBoolean("showData", false)) {
        try {
            if (resExample != null) {
                //background rectangle for extra info on PCA
                Core.rectangle(mRgba, new Point(0, mRgba.height() - 100),
                        new Point(mRgba.width(), mRgba.height()), new Scalar(50, 50, 50, 50), Core.FILLED);
                //Data for closest image 
                Core.putText(mRgba, "1st", new Point(5, mRgba.height() - 80), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                Core.putText(mRgba, Integer.toString(resExample.getClosestImage()),
                        new Point(5, mRgba.height() - 55), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                Core.putText(mRgba, Double.toString(resExample.getDistClosestImage() / 100000).substring(0, 6),
                        new Point(5, mRgba.height() - 30), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                //Data for second closest image
                Core.putText(mRgba, "2nd", new Point(180, mRgba.height() - 80), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                Core.putText(mRgba, Integer.toString(resExample.getSecondClosestImage()),
                        new Point(180, mRgba.height() - 55), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                Core.putText(mRgba,
                        Double.toString(resExample.getDistSecondClosestImage() / 100000).substring(0, 6),
                        new Point(180, mRgba.height() - 30), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                //Data for farthest image
                Core.putText(mRgba, "Last", new Point(355, mRgba.height() - 80), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                Core.putText(mRgba, Integer.toString(resExample.getFarthestImage()),
                        new Point(355, mRgba.height() - 55), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                Core.putText(mRgba, Double.toString(resExample.getDistFarthestImage() / 100000).substring(0, 6),
                        new Point(355, mRgba.height() - 30), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
                //Num images and threshold
                Core.putText(mRgba, "Images:" + savedFaces, new Point(15, mRgba.height() - 5),
                        Core.FONT_HERSHEY_PLAIN, 2, new Scalar(250, 250, 250, 200));
                Core.putText(mRgba,
                        "Th:" + Double.toString(resExample.getThreshold() / 100000).substring(0,
                                Math.min(6, Double.toString(resExample.getThreshold() / 100000).length())),
                        new Point(240, mRgba.height() - 5), Core.FONT_HERSHEY_PLAIN, 2,
                        new Scalar(250, 250, 250, 200));
            } else {
                Core.rectangle(mRgba, new Point(0, mRgba.height() - 30), new Point(200, mRgba.height()),
                        new Scalar(50, 50, 50, 50), Core.FILLED);
                Core.putText(mRgba, "Images:" + savedFaces, new Point(15, mRgba.height() - 5),
                        Core.FONT_HERSHEY_PLAIN, 2, new Scalar(250, 250, 250, 200));
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);

    if (Utils.matToBitmap(mRgba, bmp))
        return bmp;

    bmp.recycle();
    return null;
}

From source file:com.joravasal.keyface.FindFacesView.java

License:Open Source License

private void addFaceToDB(Mat mRGBA, Rect r, int numFaces) {
    //Code to put the face in a bitmap and save it in the phone memory
    Mat aux = mRGBA.submat(r);/*  w  w  w.  j a v  a 2s .c  o m*/
    int saveSize = new Integer(KeyFaceActivity.prefs.getString("savedFaceSize", "200"));
    //Mat aux2 = new Mat(new Size(saveSize, saveSize), aux.type());
    Imgproc.resize(aux, aux, new Size(saveSize, saveSize));
    final Bitmap bm = Bitmap.createBitmap(saveSize, saveSize, Bitmap.Config.ARGB_8888);
    Imgproc.cvtColor(aux, aux, Imgproc.COLOR_GRAY2RGBA, 4);
    Utils.matToBitmap(aux, bm);

    try {
        //Check if our folder exists (where all the photos are)
        File directory = new File(Environment.getExternalStorageDirectory(),
                KeyFaceActivity.globalappcontext.getString(R.string.app_dir));
        if (!directory.exists() && !directory.mkdirs()) {
            throw new IOException("Path to app directory could not be opened or created.");
        }
        //save image
        String lfile = Environment.getExternalStorageDirectory().getPath() + "/"
                + KeyFaceActivity.globalappcontext.getString(R.string.app_dir) + "/Face" + numFaces + ".png";
        OutputStream out = new FileOutputStream(lfile);
        bm.compress(Bitmap.CompressFormat.PNG, 100, out);
        out.flush();
        out.close();
    } catch (IOException e) {
        System.err.println(e.getMessage());
    }
}

From source file:com.minio.io.alice.MatVideoWriter.java

License:Open Source License

private byte[] captureBitmap(Mat mat) {
    Bitmap bitmap;/*  ww w . j  av a  2 s.  c o m*/
    try {
        bitmap = Bitmap.createBitmap(mat.cols(), mat.rows(), Bitmap.Config.ARGB_8888);
        Utils.matToBitmap(mat, bitmap);

        ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
        bitmap.compress(Bitmap.CompressFormat.JPEG, 100, byteStream);

        // Convert ByteArrayOutputStream to byte array. Close stream.
        matByteArray = byteStream.toByteArray();
        byteStream.close();
        return matByteArray;

    } catch (Exception ex) {
        System.out.println(ex.getMessage());
    }
    return null;
}

From source file:com.nekomeshi312.whiteboardcorrection.CameraViewFragment.java

License:Open Source License

@Override
public void onPreviewFrame(byte[] data, Camera camera) {
    // TODO Auto-generated method stub
    if (data == null || mMatYuv.cols() * mMatYuv.rows() != data.length)
        return;/*from  www.j  a v a2 s.c o  m*/

    mMatYuv.put(0, 0, data);
    //java?Hough?
    //      decodeYUV420SP(mRgb, data, mViewWidth, mViewHeight);
    //      mBmp.setPixels(mRgb, 0, mViewWidth, 0, 0, mViewWidth, mViewHeight);
    //      Utils.bitmapToMat(mBmp, mMatRgba);
    //Canny -> Hough?
    //      Mat gray = new Mat(mViewHeight, mViewWidth, CvType.CV_8UC1);
    //        Imgproc.cvtColor(mMatRgba, gray, Imgproc.COLOR_BGRA2GRAY, 0);
    //        Imgproc.Canny(gray, gray, 80, 100);
    //        Mat lines = new Mat();
    //        Imgproc.HoughLinesP(gray, lines, 1, Math.PI/180.0, (int) 50, 100.0, 20.0);        
    //      gray.release();
    //

    int lineNum = mLineDetector.lineDetect(mMatYuv, true, mMatRgba, mLines);
    if (MyDebug.DEBUG)
        Log.d(LOG_TAG, "line Num = " + lineNum);

    //?4
    ArrayList<Point> points = new ArrayList<Point>();
    LineDetectCallback callback = (LineDetectCallback) mParentActivity;

    if (mWbDetect.detectWhiteBoard(mLines, lineNum, points, mMatRgba)) {
        mWhiteBoardView.setWhiteBoardCorners(points);
        if (callback == null)
            return;
        callback.onLineDetected(points);
        if (mLastUnDetectTime > 0) {
            mTextViewLineDetectErrorMsg.setVisibility(View.INVISIBLE);
            mLastUnDetectTime = -1;
        }
    } else {
        callback.onLineDetected(null);
        long currentSec = System.currentTimeMillis();
        if (mLastUnDetectTime < 0) {//???????????
            mLastUnDetectTime = currentSec;
        } else if ((currentSec - mLastUnDetectTime) < CHECK_UNDETECT_TIME) {//??????
        } else if (mTextViewLineDetectErrorMsg.getVisibility() != View.VISIBLE) {//????
            mTextViewLineDetectErrorMsg.setVisibility(View.VISIBLE);
        }
    }
    if (mMatRgba != null) {
        Utils.matToBitmap(mMatRgba, mBmp);
        mCameraSurfaceView.invalidate();
    }
    if (mCameraSetting == null)
        return;
    if (mCameraSetting.getCamera() == null)
        return;
    if (!mCameraSetting.isCameraOpen())
        return;
    mCameraSetting.getCamera().addCallbackBuffer(mBuffer);
}

From source file:com.serenegiant.usbcameratest.MainActivity.java

License:Apache License

@Override
public boolean onTouchEvent(MotionEvent e) {
    //?/*from  w w w .j a  va2  s. c  o  m*/
    if (createTemplateFlag) {
        if (templateRect.x == -1 || templateRect.y == -1) {
            templateRect.x = (int) e.getX();
            templateRect.y = (int) e.getY();
        } else {
            templateRect.width = (int) e.getX() - templateRect.x;
            templateRect.height = (int) e.getY() - templateRect.y;
        }
        textView1.setText("tl:" + templateRect.tl());

        //???
        showImg = searchImg.clone();
        Imgproc.rectangle(showImg, templateRect.tl(), templateRect.br(), new Scalar(0, 0, 255), 5);
        Bitmap bitmap = Bitmap.createBitmap(showImg.width(), showImg.height(), Bitmap.Config.ARGB_8888);
        Utils.matToBitmap(showImg, bitmap);
        imageView1.setImageBitmap(bitmap);
    }

    return true;
}