Example usage for android.graphics ImageFormat JPEG

List of usage examples for android.graphics ImageFormat JPEG

Introduction

In this page you can find the example usage for android.graphics ImageFormat JPEG.

Prototype

int JPEG

To view the source code for android.graphics ImageFormat JPEG.

Click Source Link

Document

Compressed JPEG format.

Usage

From source file:freed.cam.apis.camera2.modules.PictureModuleApi2.java

private void saveImage(ImageHolder image) {
    int burstcount = parameterHandler.Burst.GetValue() + 1;
    String f;/* w w  w  . j ava  2 s  . c  o m*/
    if (burstcount > 1)
        f = cameraUiWrapper.getActivityInterface().getStorageHandler()
                .getNewFilePath(appSettingsManager.GetWriteExternal(), "_" + imagecount);
    else
        f = cameraUiWrapper.getActivityInterface().getStorageHandler()
                .getNewFilePath(appSettingsManager.GetWriteExternal(), "");
    File file = null;
    imagecount++;
    switch (image.getImage().getFormat()) {
    case ImageFormat.JPEG:
        file = new File(f + ".jpg");
        process_jpeg(image.getImage(), file);
        break;
    case ImageFormat.RAW10:
        file = new File(f + ".dng");
        process_rawWithDngConverter(image, DngProfile.Mipi, file);
        break;
    case ImageFormat.RAW12:
        file = new File(f + ".dng");
        process_rawWithDngConverter(image, DngProfile.Mipi12, file);
        break;
    case ImageFormat.RAW_SENSOR:
        file = new File(f + ".dng");
        if (appSettingsManager.getDevice() == Devices.Moto_X2k14
                || appSettingsManager.getDevice() == Devices.OnePlusTwo)
            process_rawWithDngConverter(image, DngProfile.Mipi16, file);
        else
            process_rawSensor(image, file);
        break;
    }
    internalFireOnWorkDone(file);
    isWorking = false;
    changeCaptureState(CaptureStates.image_capture_stop);
    if (burstcount == imagecount) {
        finishCapture(captureBuilder);
    }
}

From source file:com.askjeffreyliu.camera2barcode.camera.CameraSource.java

/**
 * Sets up member variables related to camera.
 *
 * @param width  The width of available size for camera preview
 * @param height The height of available size for camera preview
 *///from  w  w w  . jav a  2 s. c om
private void setUpCameraOutputs(int width, int height) {
    try {
        if (ContextCompat.checkSelfPermission(mContext,
                Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
            return;
        }
        if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
            throw new RuntimeException("Time out waiting to lock camera opening.");
        }
        if (manager == null)
            manager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE);
        mCameraId = manager.getCameraIdList()[mFacing];
        CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraId);
        StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
        if (map == null) {
            return;
        }

        // For still image captures, we use the largest available size.
        Size largest = getBestAspectPictureSize(map.getOutputSizes(ImageFormat.JPEG));

        // Find out if we need to swap dimension to get the preview size relative to sensor
        // coordinate.
        int displayRotation = mDisplayOrientation;
        //noinspection ConstantConditions
        int mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
        boolean swappedDimensions = false;
        switch (displayRotation) {
        case Surface.ROTATION_0:
        case Surface.ROTATION_180:
            if (mSensorOrientation == 90 || mSensorOrientation == 270) {
                swappedDimensions = true;
            }
            break;
        case Surface.ROTATION_90:
        case Surface.ROTATION_270:
            if (mSensorOrientation == 0 || mSensorOrientation == 180) {
                swappedDimensions = true;
            }
            break;
        default:
            Log.e(TAG, "Display rotation is invalid: " + displayRotation);
        }

        Point displaySize = new Point(Utils.getScreenWidth(mContext), Utils.getScreenHeight(mContext));
        int rotatedPreviewWidth = width;
        int rotatedPreviewHeight = height;
        int maxPreviewWidth = displaySize.x;
        int maxPreviewHeight = displaySize.y;

        if (swappedDimensions) {
            rotatedPreviewWidth = height;
            rotatedPreviewHeight = width;
            maxPreviewWidth = displaySize.y;
            maxPreviewHeight = displaySize.x;
        }

        if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
            maxPreviewWidth = MAX_PREVIEW_WIDTH;
        }

        if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
            maxPreviewHeight = MAX_PREVIEW_HEIGHT;
        }

        // Danger, W.R.! Attempting to use too large a preview size could  exceed the camera
        // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
        // garbage capture data.
        Size[] outputSizes = Utils.sizeToSize(map.getOutputSizes(SurfaceTexture.class));
        mPreviewSize = chooseOptimalSize(outputSizes, rotatedPreviewWidth, rotatedPreviewHeight,
                maxPreviewWidth, maxPreviewHeight, largest);

        // We fit the aspect ratio of TextureView to the size of preview we picked.
        int orientation = mDisplayOrientation;
        if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
            mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
        } else {
            mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
        }

        // Check if the flash is supported.
        Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
        mFlashSupported = available == null ? false : available;

        // control.aeTargetFpsRange
        Range<Integer>[] availableFpsRange = characteristics
                .get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);

        configureTransform(width, height);

        manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler);
    } catch (CameraAccessException e) {
        e.printStackTrace();
    } catch (InterruptedException e) {
        throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
    } catch (NullPointerException e) {
        // Currently an NPE is thrown when the Camera2API is used but not supported on the
        // device this code runs.
        Log.d(TAG, "Camera Error: " + e.getMessage());
    }
}

From source file:com.example.camera2apidemo.Camera2BasicFragment.java

/**   , ?
 * ??:// w w w  .  j a v  a2  s.  c  o  m
 * 1. ???, ?
 * 2. ????, ???
 * 3. ??, ?
 * Sets up member variables related to camera.     *
 * @param width  The width of available size for camera preview
 * @param height The height of available size for camera preview
 */
private void setUpCameraOutputs(int width, int height) {
    Activity activity = getActivity();
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    try { // ????
        for (String cameraId : manager.getCameraIdList()) {
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

            checkCamera2Support(characteristics);
            // ???, ?(???)
            // We don't use a front facing camera in this sample.
            Integer facing = characteristics.get(LENS_FACING);
            if (facing != null && facing == LENS_FACING_FRONT) {
                continue;
            }

            //                ??
            StreamConfigurationMap map = characteristics.get(SCALER_STREAM_CONFIGURATION_MAP);
            if (map == null) {
                continue;
            }
            // For still image captures, we use the largest available size.
            // 
            // maxImagesImageReader??
            Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
                    new CompareSizesByArea());
            mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG,
                    /*maxImages*/2);
            mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);

            // Find out if we need to swap dimension to get the preview size relative to sensor
            // coordinate.
            // ???(??, ""???ROTATION_90
            // ROTATION_270,?ROTATION_0ROTATION_180)
            int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
            //noinspection ConstantConditions
            // ??(""?0, , ?90)
            // ?, ??, , 90, switch??
            mSensorOrientation = characteristics.get(SENSOR_ORIENTATION);
            boolean swappedDimensions = false;
            switch (displayRotation) {
            // ROTATION_0ROTATION_180??????
            // ?, ?90270, ??(?true)
            case Surface.ROTATION_0:
            case Surface.ROTATION_180:
                if (mSensorOrientation == 90 || mSensorOrientation == 270) {
                    swappedDimensions = true;
                }
                break;
            case Surface.ROTATION_90:
            case Surface.ROTATION_270:
                if (mSensorOrientation == 0 || mSensorOrientation == 180) {
                    swappedDimensions = true;
                }
                break;
            default:
                Log.e(TAG, "Display rotation is invalid: " + displayRotation);
            }
            // ???, 
            Point displaySize = new Point();
            activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
            // ?(), ??
            int rotatedPreviewWidth = width;
            int rotatedPreviewHeight = height;
            // ?(, ?(texture????))
            int maxPreviewWidth = displaySize.x;
            int maxPreviewHeight = displaySize.y;

            // ??, 
            if (swappedDimensions) {
                rotatedPreviewWidth = height;
                rotatedPreviewHeight = width;
                maxPreviewWidth = displaySize.y;
                maxPreviewHeight = displaySize.x;
            }
            // ??
            if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
                maxPreviewWidth = MAX_PREVIEW_WIDTH;
            }

            if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
                maxPreviewHeight = MAX_PREVIEW_HEIGHT;
            }

            // Danger, W.R.! Attempting to use too large a preview size could  exceed the camera
            // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
            // garbage capture data.
            // ?
            // ?:map.getOutputSizes(SurfaceTexture.class)SurfaceTexture?List
            mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth,
                    rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest);

            // We fit the aspect ratio of TextureView to the size of preview we picked.
            // ????
            int orientation = getResources().getConfiguration().orientation;
            if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
                // ??(landscape)
                mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
            } else {
                mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
            }

            // Check if the flash is supported.
            Boolean available = characteristics.get(FLASH_INFO_AVAILABLE);
            mFlashSupported = available == null ? false : available;

            mCameraId = cameraId;
            return;
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    } catch (NullPointerException e) {
        // Currently an NPE is thrown when the Camera2API is used but not supported on the
        // device this code runs.
        ErrorDialog.newInstance(getString(R.string.camera_error)).show(getChildFragmentManager(),
                FRAGMENT_DIALOG);
    }
}

From source file:com.example.android.camera2raw.Camera2RawFragment.java

/**
 * Sets up state related to camera that is needed before opening a {@link CameraDevice}.
 *///from  w ww . j ava2  s.c  om
private boolean setUpCameraOutputs() {
    Activity activity = getActivity();
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    if (manager == null) {
        ErrorDialog.buildErrorDialog("This device doesn't support Camera2 API.").show(getFragmentManager(),
                "dialog");
        return false;
    }
    try {
        // Find a CameraDevice that supports RAW captures, and configure state.
        for (String cameraId : manager.getCameraIdList()) {
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

            // We only use a camera that supports RAW in this sample.
            if (!contains(characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES),
                    CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) {
                continue;
            }

            StreamConfigurationMap map = characteristics
                    .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

            // For still image captures, we use the largest available size.
            Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
                    new CompareSizesByArea());

            Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)),
                    new CompareSizesByArea());

            synchronized (mCameraStateLock) {
                // Set up ImageReaders for JPEG and RAW outputs.  Place these in a reference
                // counted wrapper to ensure they are only closed when all background tasks
                // using them are finished.
                if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) {
                    mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(
                            largestJpeg.getWidth(), largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5));
                }
                mJpegImageReader.get().setOnImageAvailableListener(mOnJpegImageAvailableListener,
                        mBackgroundHandler);

                if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) {
                    mRawImageReader = new RefCountedAutoCloseable<>(
                            ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(),
                                    ImageFormat.RAW_SENSOR, /*maxImages*/ 5));
                }
                mRawImageReader.get().setOnImageAvailableListener(mOnRawImageAvailableListener,
                        mBackgroundHandler);

                mCharacteristics = characteristics;
                mCameraId = cameraId;
            }
            return true;
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }

    // If we found no suitable cameras for capturing RAW, warn the user.
    ErrorDialog.buildErrorDialog("This device doesn't support capturing RAW photos").show(getFragmentManager(),
            "dialog");
    return false;
}

From source file:com.ape.camera2raw.Camera2RawFragment.java

/**
 * Sets up state related to camera that is needed before opening a {@link CameraDevice}.
 *//*w ww.  j a v a 2  s  .  c o  m*/
private boolean setUpCameraOutputs() {
    Activity activity = getActivity();
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    if (manager == null) {
        ErrorDialog.buildErrorDialog("This device doesn't support Camera2 API.").show(getFragmentManager(),
                "dialog");
        return false;
    }
    try {
        // Find a CameraDevice that supports RAW captures, and configure state.
        for (String cameraId : manager.getCameraIdList()) {
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

            // We only use a camera that supports RAW in this sample.
            if (!contains(characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES),
                    CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) {
                continue;
            }

            StreamConfigurationMap map = characteristics
                    .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

            // For still image captures, we use the largest available size.
            Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
                    new CompareSizesByArea());

            Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)),
                    new CompareSizesByArea());
            Log.d("WAY", "largestRaw = " + largestRaw);
            //largestRaw = new Size(4208, 3120);//9051

            synchronized (mCameraStateLock) {
                // Set up ImageReaders for JPEG and RAW outputs.  Place these in a reference
                // counted wrapper to ensure they are only closed when all background tasks
                // using them are finished.
                if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) {
                    mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(
                            largestJpeg.getWidth(), largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5));
                }
                mJpegImageReader.get().setOnImageAvailableListener(mOnJpegImageAvailableListener,
                        mBackgroundHandler);

                if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) {
                    mRawImageReader = new RefCountedAutoCloseable<>(
                            ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(),
                                    ImageFormat.RAW_SENSOR, /*maxImages*/ 5));
                }
                mRawImageReader.get().setOnImageAvailableListener(mOnRawImageAvailableListener,
                        mBackgroundHandler);

                mCharacteristics = characteristics;
                mCameraId = cameraId;
            }
            return true;
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }

    // If we found no suitable cameras for capturing RAW, warn the user.
    ErrorDialog.buildErrorDialog("This device doesn't support capturing RAW photos").show(getFragmentManager(),
            "dialog");
    return false;
}

From source file:com.quectel.camera2test.Camera2RawFragment.java

/**
 * Sets up state related to camera that is needed before opening a {@link CameraDevice}.
 *///from ww w . j av  a 2s .  c o  m
private boolean setUpCameraOutputs() {
    Activity activity = getActivity();
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    if (manager == null) {
        ErrorDialog.buildErrorDialog("This device doesn't support Camera2 API.").show(getFragmentManager(),
                "dialog");
        return false;
    }
    try {
        // Find a CameraDevice that supports RAW captures, and configure state.
        for (String cameraId : manager.getCameraIdList()) {
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
            Log.d(TAG, "---characteristics = " + characteristics);
            // We only use a camera that supports RAW in this sample.
            if (!contains(characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES),
                    CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) {
                Log.d(TAG, "-1--characteristics continue");
                continue;
            }

            Log.d(TAG, "-1--characteristics = " + characteristics);
            StreamConfigurationMap map = characteristics
                    .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

            // For still image captures, we use the largest available size.
            Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
                    new CompareSizesByArea());

            Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)),
                    new CompareSizesByArea());

            synchronized (mCameraStateLock) {
                // Set up ImageReaders for JPEG and RAW outputs.  Place these in a reference
                // counted wrapper to ensure they are only closed when all background tasks
                // using them are finished.
                if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) {
                    mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(
                            largestJpeg.getWidth(), largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5));
                }
                mJpegImageReader.get().setOnImageAvailableListener(mOnJpegImageAvailableListener,
                        mBackgroundHandler);

                if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) {
                    mRawImageReader = new RefCountedAutoCloseable<>(
                            ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(),
                                    ImageFormat.RAW_SENSOR, /*maxImages*/ 5));
                }
                mRawImageReader.get().setOnImageAvailableListener(mOnRawImageAvailableListener,
                        mBackgroundHandler);

                mCharacteristics = characteristics;
                mCameraId = cameraId;
            }
            return true;
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }

    // If we found no suitable cameras for capturing RAW, warn the user.
    ErrorDialog.buildErrorDialog("This device doesn't support capturing RAW photos").show(getFragmentManager(),
            "dialog");
    return false;
}

From source file:net.ddns.mlsoftlaberge.trycorder.TryviscamFragment.java

/**
 * Sets up member variables related to camera.
 *
 * @param width  The width of available size for camera preview
 * @param height The height of available size for camera preview
 *///  w ww  . j av a 2  s. c  om
private void setUpCameraOutputs(int width, int height) {
    Activity activity = getActivity();
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    try {
        for (String cameraId : manager.getCameraIdList()) {
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

            // We don't use a front facing camera in this sample.
            Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
            if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
                continue;
            }

            StreamConfigurationMap map = characteristics
                    .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
            if (map == null) {
                continue;
            }

            // For still image captures, we use the largest available size.
            Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
                    new CompareSizesByArea());
            mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG,
                    /*maxImages*/2);
            mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);

            // Find out if we need to swap dimension to get the preview size relative to sensor
            // coordinate.
            int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
            //noinspection ConstantConditions
            mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
            boolean swappedDimensions = false;
            switch (displayRotation) {
            case Surface.ROTATION_0:
            case Surface.ROTATION_180:
                if (mSensorOrientation == 90 || mSensorOrientation == 270) {
                    swappedDimensions = true;
                }
                break;
            case Surface.ROTATION_90:
            case Surface.ROTATION_270:
                if (mSensorOrientation == 0 || mSensorOrientation == 180) {
                    swappedDimensions = true;
                }
                break;
            default:
                Log.e(TAG, "Display rotation is invalid: " + displayRotation);
            }

            Point displaySize = new Point();
            activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
            int rotatedPreviewWidth = width;
            int rotatedPreviewHeight = height;
            int maxPreviewWidth = displaySize.x;
            int maxPreviewHeight = displaySize.y;

            if (swappedDimensions) {
                rotatedPreviewWidth = height;
                rotatedPreviewHeight = width;
                maxPreviewWidth = displaySize.y;
                maxPreviewHeight = displaySize.x;
            }

            if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
                maxPreviewWidth = MAX_PREVIEW_WIDTH;
            }

            if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
                maxPreviewHeight = MAX_PREVIEW_HEIGHT;
            }

            // Danger, W.R.! Attempting to use too large a preview size could  exceed the camera
            // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
            // garbage capture data.
            mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth,
                    rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest);

            // We fit the aspect ratio of TextureView to the size of preview we picked.
            int orientation = getResources().getConfiguration().orientation;
            if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
                mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
            } else {
                mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
            }

            // Check if the flash is supported.
            Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
            mFlashSupported = available == null ? false : available;

            mCameraId = cameraId;
            return;
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    } catch (NullPointerException e) {
        // Currently an NPE is thrown when the Camera2API is used but not supported on the
        // device this code runs.
        //ErrorDialog.newInstance(getString(R.string.camera_error))
        //        .show(getChildFragmentManager(), FRAGMENT_DIALOG);
        e.printStackTrace();
    }
}

From source file:com.obviousengine.android.focus.ZslFocusCamera.java

/**
 * Request a single image.//from ww w. j  av  a2 s .  com
 *
 * @return true if successful, false if there was an error submitting the
 *         capture request.
 */
private boolean sendSingleRequest(FocusCamera.PhotoCaptureParameters params) {
    Timber.v("sendSingleRequest()");
    try {
        CaptureRequest.Builder builder;
        builder = device.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);

        builder.addTarget(previewSurface);

        // Always add this surface for single image capture requests.
        builder.addTarget(captureImageReader.getSurface());

        builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);

        addFlashToCaptureRequestBuilder(builder, params.flashMode);
        addRegionsToCaptureRequestBuilder(builder);

        builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
        builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);

        // Tag this as a special request which should be saved.
        builder.setTag(RequestTag.EXPLICIT_CAPTURE);

        if (CAPTURE_IMAGE_FORMAT == ImageFormat.JPEG) {
            builder.set(CaptureRequest.JPEG_QUALITY, (byte) (JPEG_QUALITY));
            builder.set(CaptureRequest.JPEG_ORIENTATION,
                    Utils.getJpegRotation(params.orientation, characteristics));
        }

        captureSession.capture(builder.build(), captureManager, cameraHandler);
        return true;
    } catch (CameraAccessException e) {
        Timber.w(e, "Could not execute single still capture request.");
        return false;
    }
}

From source file:freed.cam.apis.camera2.modules.PictureModuleApi2.java

/**
 * PREVIEW STUFF//from  w  ww  .j  av  a  2 s  .  co  m
 */

@Override
public void startPreview() {

    picSize = appSettingsManager.pictureSize.get();
    Log.d(TAG, "Start Preview");
    largestImageSize = Collections.max(Arrays.asList(cameraHolder.map.getOutputSizes(ImageFormat.JPEG)),
            new CompareSizesByArea());
    picFormat = appSettingsManager.pictureFormat.get();
    if (picFormat.equals("")) {
        picFormat = KEYS.JPEG;
        appSettingsManager.pictureFormat.set(KEYS.JPEG);
        parameterHandler.PictureFormat.onValueHasChanged(KEYS.JPEG);

    }

    if (picFormat.equals(KEYS.JPEG)) {
        String[] split = picSize.split("x");
        int width, height;
        if (split.length < 2) {
            mImageWidth = largestImageSize.getWidth();
            mImageHeight = largestImageSize.getHeight();
        } else {
            mImageWidth = Integer.parseInt(split[0]);
            mImageHeight = Integer.parseInt(split[1]);
        }
        //create new ImageReader with the size and format for the image
        Log.d(TAG, "ImageReader JPEG");
    } else if (picFormat.equals(CameraHolderApi2.RAW_SENSOR)) {
        Log.d(TAG, "ImageReader RAW_SENOSR");
        largestImageSize = Collections.max(
                Arrays.asList(cameraHolder.map.getOutputSizes(ImageFormat.RAW_SENSOR)),
                new CompareSizesByArea());
        mImageWidth = largestImageSize.getWidth();
        mImageHeight = largestImageSize.getHeight();
    } else if (picFormat.equals(CameraHolderApi2.RAW10)) {
        Log.d(TAG, "ImageReader RAW_SENOSR");
        largestImageSize = Collections.max(Arrays.asList(cameraHolder.map.getOutputSizes(ImageFormat.RAW10)),
                new CompareSizesByArea());
        mImageWidth = largestImageSize.getWidth();
        mImageHeight = largestImageSize.getHeight();
    }

    int sensorOrientation = cameraHolder.characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
    int orientationToSet = (360 + cameraUiWrapper.getActivityInterface().getOrientation() + sensorOrientation)
            % 360;
    if (appSettingsManager.getApiString(AppSettingsManager.SETTING_OrientationHack).equals(KEYS.ON))
        orientationToSet = (360 + cameraUiWrapper.getActivityInterface().getOrientation() + sensorOrientation
                + 180) % 360;
    cameraHolder.SetParameter(CaptureRequest.JPEG_ORIENTATION, orientationToSet);

    // Here, we create a CameraCaptureSession for camera preview
    if (parameterHandler.Burst == null)
        SetBurst(1);
    else
        SetBurst(parameterHandler.Burst.GetValue());

}

From source file:com.android.camera.one.v2.OneCameraZslImpl.java

/**
 * Request a single image.// www  . j  a  va 2  s .  c  o m
 *
 * @return true if successful, false if there was an error submitting the
 *         capture request.
 */
private boolean sendSingleRequest(OneCamera.PhotoCaptureParameters params) {
    Log.v(TAG, "sendSingleRequest()");
    try {
        CaptureRequest.Builder builder;
        builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);

        builder.addTarget(mPreviewSurface);

        // Always add this surface for single image capture requests.
        builder.addTarget(mCaptureImageReader.getSurface());

        builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);

        Flash flashMode = Flash.OFF;
        addFlashToCaptureRequestBuilder(builder, flashMode);
        addRegionsToCaptureRequestBuilder(builder);

        builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
        builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);

        // Tag this as a special request which should be saved.
        builder.setTag(RequestTag.EXPLICIT_CAPTURE);

        if (sCaptureImageFormat == ImageFormat.JPEG) {
            builder.set(CaptureRequest.JPEG_QUALITY, (byte) (JPEG_QUALITY));
            builder.set(CaptureRequest.JPEG_ORIENTATION,
                    CameraUtil.getJpegRotation(params.orientation, mCharacteristics));
        }

        mCaptureSession.capture(builder.build(), mCaptureManager, mCameraHandler);
        return true;
    } catch (CameraAccessException e) {
        Log.v(TAG, "Could not execute single still capture request.", e);
        return false;
    }
}