Example usage for android.graphics ImageFormat YUV_420_888

List of usage examples for android.graphics ImageFormat YUV_420_888

Introduction

In this page you can find the example usage for android.graphics ImageFormat YUV_420_888.

Prototype

int YUV_420_888

To view the source code for android.graphics ImageFormat YUV_420_888.

Click Source Link

Document

Multi-plane Android YUV 420 format

This format is a generic YCbCr format, capable of describing any 4:2:0 chroma-subsampled planar or semiplanar buffer (but not fully interleaved), with 8 bits per color sample.

Images in this format are always represented by three separate buffers of data, one for each color plane.

Usage

From source file:Main.java

public static String translatePreviewFormat(int supportedPreviewFormat) {
    switch (supportedPreviewFormat) {
    case ImageFormat.JPEG:
        return "ImageFormat.JPEG";
    case ImageFormat.NV16:
        return "ImageFormat.NV16";
    case ImageFormat.NV21:
        return "ImageFormat.NV21";
    case ImageFormat.RAW10:
        return "ImageFormat.RAW10";
    case ImageFormat.RAW_SENSOR:
        return "ImageFormat.RAW_SENSOR";
    case ImageFormat.RGB_565:
        return "ImageFormat.RGB_565";
    case ImageFormat.UNKNOWN:
        return "ImageFormat.UNKNOWN";
    case ImageFormat.YUV_420_888:
        return "ImageFormat.YUV_420_888";
    case ImageFormat.YUY2:
        return "ImageFormat.YUY2";
    case ImageFormat.YV12:
        return "ImageFormat.YV12";
    default:/*from  w w w .j a  v  a  2 s  .  c o  m*/
        return "xxxxxxxxdefault";
    }
}

From source file:Main.java

private static boolean checkAndroidImageFormat(Image image) {
    int format = image.getFormat();
    Plane[] planes = image.getPlanes();//w ww. j av a 2s . com
    switch (format) {
    case ImageFormat.YUV_420_888:
    case ImageFormat.NV21:
    case ImageFormat.YV12:
        return 3 == planes.length;
    case ImageFormat.RAW_SENSOR:
    case ImageFormat.RAW10:
    case ImageFormat.JPEG:
        return 1 == planes.length;
    default:
        return false;
    }
}

From source file:Main.java

private static boolean checkAndroidImageFormat(Image image) {
    int format = image.getFormat();
    Plane[] planes = image.getPlanes();/*  w ww . jav  a2s.  c o  m*/
    switch (format) {
    case ImageFormat.YUV_420_888:
    case ImageFormat.NV21:
    case ImageFormat.YV12:
        return 3 == planes.length;
    case ImageFormat.RAW_SENSOR:
    case ImageFormat.RAW10:
    case ImageFormat.RAW12:
    case ImageFormat.JPEG:
        return 1 == planes.length;
    default:
        return false;
    }
}

From source file:com.android.camera2.its.ItsUtils.java

public static Size[] getYuvOutputSizes(CameraCharacteristics ccs) throws ItsException {
    return getOutputSizes(ccs, ImageFormat.YUV_420_888);
}

From source file:com.android.camera2.its.ItsUtils.java

public static byte[] getDataFromImage(Image image) throws ItsException {
    int format = image.getFormat();
    int width = image.getWidth();
    int height = image.getHeight();
    byte[] data = null;

    // Read image data
    Plane[] planes = image.getPlanes();//from w w w. j  av a2 s  .  c o  m

    // Check image validity
    if (!checkAndroidImageFormat(image)) {
        throw new ItsException("Invalid image format passed to getDataFromImage: " + image.getFormat());
    }

    if (format == ImageFormat.JPEG) {
        // JPEG doesn't have pixelstride and rowstride, treat it as 1D buffer.
        ByteBuffer buffer = planes[0].getBuffer();
        data = new byte[buffer.capacity()];
        buffer.get(data);
        return data;
    } else if (format == ImageFormat.YUV_420_888 || format == ImageFormat.RAW_SENSOR
            || format == ImageFormat.RAW10) {
        int offset = 0;
        data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
        byte[] rowData = new byte[planes[0].getRowStride()];
        for (int i = 0; i < planes.length; i++) {
            ByteBuffer buffer = planes[i].getBuffer();
            int rowStride = planes[i].getRowStride();
            int pixelStride = planes[i].getPixelStride();
            int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8;
            Logt.i(TAG, String.format("Reading image: fmt %d, plane %d, w %d, h %d, rowStride %d, pixStride %d",
                    format, i, width, height, rowStride, pixelStride));
            // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling.
            int w = (i == 0) ? width : width / 2;
            int h = (i == 0) ? height : height / 2;
            for (int row = 0; row < h; row++) {
                if (pixelStride == bytesPerPixel) {
                    // Special case: optimized read of the entire row
                    int length = w * bytesPerPixel;
                    buffer.get(data, offset, length);
                    // Advance buffer the remainder of the row stride
                    buffer.position(buffer.position() + rowStride - length);
                    offset += length;
                } else {
                    // Generic case: should work for any pixelStride but slower.
                    // Use intermediate buffer to avoid read byte-by-byte from
                    // DirectByteBuffer, which is very bad for performance.
                    // Also need avoid access out of bound by only reading the available
                    // bytes in the bytebuffer.
                    int readSize = rowStride;
                    if (buffer.remaining() < readSize) {
                        readSize = buffer.remaining();
                    }
                    buffer.get(rowData, 0, readSize);
                    if (pixelStride >= 1) {
                        for (int col = 0; col < w; col++) {
                            data[offset++] = rowData[col * pixelStride];
                        }
                    } else {
                        // PixelStride of 0 can mean pixel isn't a multiple of 8 bits, for
                        // example with RAW10. Just copy the buffer, dropping any padding at
                        // the end of the row.
                        int length = (w * ImageFormat.getBitsPerPixel(format)) / 8;
                        System.arraycopy(rowData, 0, data, offset, length);
                        offset += length;
                    }
                }
            }
        }
        Logt.i(TAG, String.format("Done reading image, format %d", format));
        return data;
    } else {
        throw new ItsException("Unsupported image format: " + format);
    }
}

From source file:com.andrasta.dashi.MainActivity.java

private void createCameraSizesAdapter() {
    Pair<String, List<Size>> pair = CameraUtils.getMainCameraImageSizes(this, ImageFormat.YUV_420_888);
    if (pair == null) {
        throw new RuntimeException("No camera sizes");
    }//from www  .  ja  va  2  s.  c o m
    cameraRecSize = pair.second.get(0);
    updatePolygonView();
    ArrayAdapter<Size> adapter = new ArrayAdapter<>(this, R.layout.simple_spinner_item, pair.second);
    adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
    spinner.setAdapter(adapter);
}

From source file:com.andrasta.dashi.MainActivity.java

@SuppressWarnings("MissingPermission")
private void openCamera(int width, int height) {
    try {/*from w  w w. j ava 2 s .  c o m*/
        configBuilder = CameraUtils.initCameraConfig(this, display, width, height);
        onCameraOrientationSet(configBuilder.getCameraOrientation());

        int cameraWidth = configBuilder.getSize().getWidth();
        int cameraHeight = configBuilder.getSize().getHeight();

        // We fit the aspect ratio of TextureView to the size of preview we picked.
        int orientation = this.getResources().getConfiguration().orientation;
        if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
            textureView.setAspectRatio(cameraWidth, cameraHeight);
            laneView.setAspectRatio(cameraWidth, cameraHeight);
        } else {
            textureView.setAspectRatio(cameraHeight, cameraWidth);
            laneView.setAspectRatio(cameraHeight, cameraWidth);
        }
        Matrix matrix = CameraUtils.configureTransform(display.getRotation(), width, height, cameraWidth,
                cameraHeight);
        textureView.setTransform(matrix);
        SurfaceTexture texture = textureView.getSurfaceTexture();
        if (texture == null) {
            Log.d(TAG, "No SurfaceTexture");
            return;
        }
        // We configure the size of default buffer to be the size of camera preview we want.
        texture.setDefaultBufferSize(cameraWidth, cameraHeight);

        CameraConfig.Request request = new CameraConfig.Request(CameraDevice.TEMPLATE_PREVIEW,
                new Surface(texture));
        request.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
        configBuilder.addRequest(request);
        Log.d(TAG, "Display camera resolution " + cameraWidth + 'x' + cameraHeight);

        imageReader = ImageReader.newInstance(cameraRecSize.getWidth(), cameraRecSize.getHeight(),
                ImageFormat.YUV_420_888, alprHandler.getThreadsNum() + 1);
        imageReader.setOnImageAvailableListener(this, null);
        request = new CameraConfig.Request(CameraDevice.TEMPLATE_PREVIEW, imageReader.getSurface());
        configBuilder.addRequest(request);
        Log.d(TAG,
                "Recognition camera resolution " + cameraRecSize.getWidth() + 'x' + cameraRecSize.getHeight());

        camera.open(configBuilder.build());
        Log.d(TAG, "Camera opened: " + configBuilder.getCameraId());
    } catch (CameraAccessException e) {
        onError(false, e);
    } catch (NullPointerException e) {
        // Currently an NPE is thrown when the Camera2API is used but not supported on the device
        onError(true, e);
    }
}

From source file:com.tzutalin.dlibtest.CameraConnectionFragment.java

/**
 * Sets up member variables related to camera.
 *
 * @param width  The width of available size for camera preview
 * @param height The height of available size for camera preview
 *//* w  ww  . jav  a2 s.c  om*/
@DebugLog
@SuppressLint("LongLogTag")
private void setUpCameraOutputs(final int width, final int height) {
    final Activity activity = getActivity();
    final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    try {
        SparseArray<Integer> cameraFaceTypeMap = new SparseArray<>();
        // Check the facing types of camera devices
        for (final String cameraId : manager.getCameraIdList()) {
            final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
            final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
            if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
                if (cameraFaceTypeMap.get(CameraCharacteristics.LENS_FACING_FRONT) != null) {
                    cameraFaceTypeMap.append(CameraCharacteristics.LENS_FACING_FRONT,
                            cameraFaceTypeMap.get(CameraCharacteristics.LENS_FACING_FRONT) + 1);
                } else {
                    cameraFaceTypeMap.append(CameraCharacteristics.LENS_FACING_FRONT, 1);
                }
            }

            if (facing != null && facing == CameraCharacteristics.LENS_FACING_BACK) {
                if (cameraFaceTypeMap.get(CameraCharacteristics.LENS_FACING_FRONT) != null) {
                    cameraFaceTypeMap.append(CameraCharacteristics.LENS_FACING_BACK,
                            cameraFaceTypeMap.get(CameraCharacteristics.LENS_FACING_BACK) + 1);
                } else {
                    cameraFaceTypeMap.append(CameraCharacteristics.LENS_FACING_BACK, 1);
                }
            }
        }

        Integer num_facing_back_camera = cameraFaceTypeMap.get(CameraCharacteristics.LENS_FACING_BACK);
        for (final String cameraId : manager.getCameraIdList()) {
            final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
            final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
            // If facing back camera or facing external camera exist, we won't use facing front camera
            if (num_facing_back_camera != null && num_facing_back_camera > 0) {
                // We don't use a front facing camera in this sample if there are other camera device facing types
                if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
                    continue;
                }
            }

            final StreamConfigurationMap map = characteristics
                    .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

            if (map == null) {
                continue;
            }

            // For still image captures, we use the largest available size.
            final Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)),
                    new CompareSizesByArea());

            // Danger, W.R.! Attempting to use too large a preview size could  exceed the camera
            // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
            // garbage capture data.
            previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height, largest);

            // We fit the aspect ratio of TextureView to the size of preview we picked.
            final int orientation = getResources().getConfiguration().orientation;
            if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
                textureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight());
            } else {
                textureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
            }

            CameraConnectionFragment.this.cameraId = cameraId;
            return;
        }
    } catch (final CameraAccessException e) {
        Timber.tag(TAG).e("Exception!", e);
    } catch (final NullPointerException e) {
        // Currently an NPE is thrown when the Camera2API is used but not supported on the
        // device this code runs.
        ErrorDialog.newInstance(getString(R.string.camera_error)).show(getChildFragmentManager(),
                FRAGMENT_DIALOG);
    }
}

From source file:com.microblink.barcode.customcamera.camera2.Camera2Fragment.java

/**
 * Sets up member variables related to camera.
 *
 * @param width  The width of available size for camera preview
 * @param height The height of available size for camera preview
 *//*w ww.  j  a  v  a  2  s.  c o m*/
private void setUpCameraOutputs(int width, int height) {
    Activity activity = getActivity();
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    try {
        for (String cameraId : manager.getCameraIdList()) {
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

            // We don't use a front facing camera in this sample.
            Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
            if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
                continue;
            }

            StreamConfigurationMap map = characteristics
                    .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
            if (map == null) {
                continue;
            }

            // For still image captures, we use the largest available size.
            Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)),
                    new CompareSizesByArea());

            // Find out if we need to swap dimension to get the preview size relative to sensor
            // coordinate.
            int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
            int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
            boolean swappedDimensions = false;
            switch (displayRotation) {
            case Surface.ROTATION_0:
            case Surface.ROTATION_180:
                if (sensorOrientation == 90 || sensorOrientation == 270) {
                    swappedDimensions = true;
                }
                break;
            case Surface.ROTATION_90:
            case Surface.ROTATION_270:
                if (sensorOrientation == 0 || sensorOrientation == 180) {
                    swappedDimensions = true;
                }
                break;
            default:
                Log.e(TAG, "Display rotation is invalid: " + displayRotation);
            }

            Point displaySize = new Point();
            activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
            int maxPreviewWidth = displaySize.x;
            int maxPreviewHeight = displaySize.y;

            if (swappedDimensions) {
                maxPreviewWidth = displaySize.y;
                maxPreviewHeight = displaySize.x;
            }

            if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
                maxPreviewWidth = MAX_PREVIEW_WIDTH;
            }

            if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
                maxPreviewHeight = MAX_PREVIEW_HEIGHT;
            }

            // Danger, W.R.! Attempting to use too large a preview size could  exceed the camera
            // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
            // garbage capture data.
            mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), maxPreviewWidth,
                    maxPreviewHeight, largest);

            Log.i(TAG, "Preview size is " + mPreviewSize.toString());

            mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(),
                    ImageFormat.YUV_420_888, /*maxImages*/1);
            mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);

            // We fit the aspect ratio of TextureView to the size of preview we picked.
            int orientation = getResources().getConfiguration().orientation;
            if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
                mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
            } else {
                mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
            }

            mCameraId = cameraId;
            return;
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    } catch (NullPointerException e) {
        // Currently an NPE is thrown when the Camera2API is used but not supported on the
        // device this code runs.
        ErrorDialog.newInstance("Camera error").show(getChildFragmentManager(), FRAGMENT_DIALOG);
    }
}

From source file:com.team254.cheezdroid.SelfieModeFragment.java

/**
 * Sets up member variables related to camera.
 *
 * @param width  The width of available size for camera preview
 * @param height The height of available size for camera preview
 *//*ww w  . j  av  a  2 s.  c om*/
private void setUpCameraOutputs(int width, int height) {
    Activity activity = getActivity();
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    try {
        for (String cameraId : manager.getCameraIdList()) {
            Log.i("CameraId", cameraId);
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

            // We don't use a front facing camera in this sample.
            Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
            if (facing != null && facing == CameraCharacteristics.LENS_FACING_BACK) {
                Log.w(TAG, "Skipped for facing " + cameraId);
                continue;
            }

            StreamConfigurationMap map = characteristics
                    .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
            if (map == null) {
                Log.w(TAG, "Skipped for null map " + cameraId);
                continue;
            }

            // For still image captures, we use the largest available size.
            Size[] imgSizes = map.getOutputSizes(ImageFormat.YUV_420_888);
            Size selected = imgSizes[imgSizes.length - 1];
            for (Size s : imgSizes) {
                if (s.getWidth() <= 352) {
                    selected = s;
                    break;
                }
            }
            mImageReader = ImageReader.newInstance(selected.getWidth(), selected.getHeight(),
                    ImageFormat.YUV_420_888, /*maxImages*/3);
            mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);

            // Find out if we need to swap dimension to get the preview size relative to sensor
            // coordinate.
            int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
            int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
            boolean swappedDimensions = false;
            switch (displayRotation) {
            case Surface.ROTATION_0:
            case Surface.ROTATION_180:
                if (sensorOrientation == 90 || sensorOrientation == 270) {
                    swappedDimensions = true;
                }
                break;
            case Surface.ROTATION_90:
            case Surface.ROTATION_270:
                if (sensorOrientation == 0 || sensorOrientation == 180) {
                    swappedDimensions = true;
                }
                break;
            default:
                Log.e(TAG, "Display rotation is invalid: " + displayRotation);
            }

            Point displaySize = new Point();
            activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
            int rotatedPreviewWidth = width;
            int rotatedPreviewHeight = height;
            int maxPreviewWidth = displaySize.x;
            int maxPreviewHeight = displaySize.y;

            if (swappedDimensions) {
                rotatedPreviewWidth = height;
                rotatedPreviewHeight = width;
                maxPreviewWidth = displaySize.y;
                maxPreviewHeight = displaySize.x;
            }

            if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
                maxPreviewWidth = MAX_PREVIEW_WIDTH;
            }

            if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
                maxPreviewHeight = MAX_PREVIEW_HEIGHT;
            }

            // Danger, W.R.! Attempting to use too large a preview size could  exceed the camera
            // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
            // garbage capture data.
            mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth,
                    rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, selected);

            Log.i(TAG, "Size : " + mPreviewSize);

            // We fit the aspect ratio of TextureView to the size of preview we picked.
            int orientation = getResources().getConfiguration().orientation;
            if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
                mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
            } else {
                mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
            }

            // Check if the flash is supported.
            Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
            mFlashSupported = available == null ? false : available;

            mCameraId = cameraId;
            return;
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    } catch (NullPointerException e) {
        e.printStackTrace();
        // Currently an NPE is thrown when the Camera2API is used but not supported on the
        // device this code runs.
        ErrorDialog.newInstance("Camera error").show(getChildFragmentManager(), FRAGMENT_DIALOG);
    }
}