Example usage for android.media Image close

List of usage examples for android.media Image close

Introduction

In this page you can find the example usage for android.media Image close.

Prototype

@Override
public abstract void close();

Source Link

Document

Free up this frame for reuse.

Usage

From source file:com.sien.cpshoot.screencapture.ScreenCaptureFragment.java

private void saveImage(ImageReader mImageReader, int mWidth, int mHeight) {
    if (mImageReader == null)
        return;/*from w  ww  .j a v  a 2  s . co m*/

    Image image = mImageReader.acquireLatestImage();
    if (image == null)
        return;
    final Image.Plane[] planes = image.getPlanes();
    if (planes.length <= 0)
        return;

    final ByteBuffer buffer = planes[0].getBuffer();
    int offset = 0;
    int pixelStride = planes[0].getPixelStride();
    int rowStride = planes[0].getRowStride();
    int rowPadding = rowStride - pixelStride * mWidth;
    Bitmap bitmap = Bitmap.createBitmap(mWidth + rowPadding / pixelStride, mHeight, Bitmap.Config.RGB_565);
    bitmap.copyPixelsFromBuffer(buffer);
    image.close();

    SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy_MM_dd_hh_mm_ss");
    String strDate = dateFormat.format(new java.util.Date());
    String pathImage = Environment.getExternalStorageDirectory().getPath() + "/Pictures/";
    String nameImage = pathImage + strDate + ".png";
    if (bitmap != null) {
        try {
            File fileImage = new File(nameImage);
            if (!fileImage.exists()) {
                fileImage.createNewFile();
            }
            FileOutputStream out = new FileOutputStream(fileImage);
            if (out != null) {
                bitmap.compress(Bitmap.CompressFormat.PNG, 100, out);
                out.flush();
                out.close();
                Toast.makeText(getActivity(), "get phone's screen succeed", Toast.LENGTH_SHORT).show();
                Intent media = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
                Uri contentUri = Uri.fromFile(fileImage);
                media.setData(contentUri);
                getActivity().sendBroadcast(media);

                //
                beginCrop(Uri.fromFile(new File(nameImage)));
            }
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
}

From source file:MainActivity.java

protected void takePicture(View view) {
    if (null == mCameraDevice) {
        return;/*from w  ww  .j a v a 2  s.  co  m*/
    }
    CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
    try {
        CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraDevice.getId());
        StreamConfigurationMap configurationMap = characteristics
                .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
        if (configurationMap == null)
            return;
        Size largest = Collections.max(Arrays.asList(configurationMap.getOutputSizes(ImageFormat.JPEG)),
                new CompareSizesByArea());
        ImageReader reader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG,
                1);
        List<Surface> outputSurfaces = new ArrayList<Surface>(2);
        outputSurfaces.add(reader.getSurface());
        outputSurfaces.add(new Surface(mTextureView.getSurfaceTexture()));
        final CaptureRequest.Builder captureBuilder = mCameraDevice
                .createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
        captureBuilder.addTarget(reader.getSurface());
        captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
        ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
            @Override
            public void onImageAvailable(ImageReader reader) {
                Image image = null;
                try {
                    image = reader.acquireLatestImage();
                    ByteBuffer buffer = image.getPlanes()[0].getBuffer();
                    byte[] bytes = new byte[buffer.capacity()];
                    buffer.get(bytes);
                    OutputStream output = new FileOutputStream(getPictureFile());
                    output.write(bytes);
                    output.close();
                } catch (FileNotFoundException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    if (image != null) {
                        image.close();
                    }
                }
            }
        };
        HandlerThread thread = new HandlerThread("CameraPicture");
        thread.start();
        final Handler backgroudHandler = new Handler(thread.getLooper());
        reader.setOnImageAvailableListener(readerListener, backgroudHandler);
        final CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() {
            @Override
            public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
                    TotalCaptureResult result) {
                super.onCaptureCompleted(session, request, result);
                Toast.makeText(MainActivity.this, "Picture Saved", Toast.LENGTH_SHORT).show();
                startPreview(session);
            }
        };
        mCameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
            @Override
            public void onConfigured(CameraCaptureSession session) {
                try {
                    session.capture(captureBuilder.build(), captureCallback, backgroudHandler);
                } catch (CameraAccessException e) {
                    e.printStackTrace();
                }
            }

            @Override
            public void onConfigureFailed(CameraCaptureSession session) {
            }
        }, backgroudHandler);
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }
}

From source file:freed.cam.apis.camera2.modules.PictureModuleApi2.java

@NonNull
private void process_jpeg(Image image, File file) {

    Log.d(TAG, "Create JPEG");
    ByteBuffer buffer = image.getPlanes()[0].getBuffer();
    byte[] bytes = new byte[buffer.remaining()];
    buffer.get(bytes);/*from  w ww  .j  ava2  s  .c  om*/
    saveJpeg(file, bytes);
    image.close();
    buffer.clear();
    image = null;

}

From source file:com.android.camera2.its.ItsService.java

private ImageReader.OnImageAvailableListener createAvailableListenerDropper(final CaptureCallback listener) {
    return new ImageReader.OnImageAvailableListener() {
        @Override/*www  .  j av a 2s  .c  om*/
        public void onImageAvailable(ImageReader reader) {
            Image i = reader.acquireNextImage();
            i.close();
        }
    };
}

From source file:org.tensorflow.demo.Camera2BasicFragment.java

private void classifyImage(Image image) {
    try {// w  w w.  j  a va2  s  .  c o  m
        Log.d("predict_class", "1");

        if (image == null) {
            return;
        }

        if (computing) {
            image.close();
            return;
        }
        Log.d("predict_class", "2");
        computing = true;

        Trace.beginSection("imageAvailable");
        Log.d("predict_class", image.getHeight() + "");
        Log.d("predict_class", image.getWidth() + "");

        final Image.Plane[] planes = image.getPlanes();

        fillBytes(planes, yuvBytes);

        final int yRowStride = planes[0].getRowStride();

        final int uvRowStride = planes[1].getRowStride();

        final int uvPixelStride = planes[1].getPixelStride();

        ImageUtils.convertYUV420ToARGB8888(yuvBytes[0], yuvBytes[1], yuvBytes[2], rgbBytes, previewWidth,
                previewHeight, yRowStride, uvRowStride, uvPixelStride, false);

        image.close();

    } catch (final Exception e) {
        Log.d("predict_class", "error: " + e.getMessage());

        if (image != null) {
            image.close();
        }
        LOGGER.e(e, "Exception!");
        Trace.endSection();
        return;
    }

    rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);

    final Canvas canvas = new Canvas(croppedBitmap);
    canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);

    // For examining the actual TF input.
    if (SAVE_PREVIEW_BITMAP) {
        ImageUtils.saveBitmap(croppedBitmap);
    }

    runInBackground(new Runnable() {
        @Override
        public void run() {
            final long startTime = SystemClock.uptimeMillis();
            final List<Classifier.Recognition> results = classifier.recognizeImage(croppedBitmap);
            lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
            String toastString = "";
            Log.d("predict_class", results.toString());
            for (Classifier.Recognition cr : results) {
                toastString = toastString + " " + cr.getTitle() + ": " + cr.getConfidence() + ";";
            }
            Log.d("predict_class", toastString);

            //showToast(toastString);
            Intent intent = new Intent(getActivity(), ClassifierResultActivity.class);
            Gson gs = new Gson();
            String resultString = gs.toJson(results);
            intent.putExtra("result", resultString);
            startActivity(intent);

            cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
            computing = false;
        }
    });

    Trace.endSection();
}

From source file:com.android.camera2.its.ItsService.java

public ImageReader.OnImageAvailableListener createAvailableListener(final CaptureCallback listener) {
    return new ImageReader.OnImageAvailableListener() {
        @Override/*  ww  w .  j  av a  2s .c o m*/
        public void onImageAvailable(ImageReader reader) {
            Image i = null;
            try {
                i = reader.acquireNextImage();
                listener.onCaptureAvailable(i);
            } finally {
                if (i != null) {
                    i.close();
                }
            }
        }
    };
}

From source file:org.tensorflow.demo.Camera2BasicFragment.java

private void ocrImage(Image image) {
    Log.d("predict_class", "ocrImage");

    ByteBuffer buffer = image.getPlanes()[0].getBuffer();
    byte[] bytes = new byte[buffer.remaining()];
    buffer.get(bytes);/*from  ww  w.  j  av  a2 s  .  c o m*/

    ocrBitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
    //Log.d("predict_class", ocrBitmap.toString());
    /* FileOutputStream output = null;
     try {
    output = new FileOutputStream(mFile);
    output.write(bytes);
     } catch (IOException e) {
    e.printStackTrace();
     } finally {
    image.close();
    if (null != output) {
        try {
            output.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
     }*/

    runInBackground(new Runnable() {
        @Override
        public void run() {
            Log.d("predict_class", "run...");
            TessBaseAPI tessBaseAPI = new TessBaseAPI();
            tessBaseAPI.init("/mnt/sdcard/", "eng", 0);
            tessBaseAPI.setImage(ocrBitmap);
            ImageUtils.saveBitmap(ocrBitmap);
            String recognisedText = tessBaseAPI.getUTF8Text();

            Log.d("predict_class", "recognisedText: " + recognisedText);
        }
    });

    Log.d("predict_class", "Saved image");
    image.close();
}