Example usage for android.hardware.camera2 CaptureResult get

List of usage examples for android.hardware.camera2 CaptureResult get

Introduction

In this page you can find the example usage for android.hardware.camera2 CaptureResult get.

Prototype

@Nullable
public <T> T get(Key<T> key) 

Source Link

Document

Get a capture result field value.

Usage

From source file:com.example.camera2apidemo.Camera2Fragment.java

private void runPrecaptureSequence() {
    try {//from   ww  w . j a v a  2 s.  c  o  m
        mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
                CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);

        CameraCaptureSession.CaptureCallback mPrecaptureCallback = new CameraCaptureSession.CaptureCallback() {
            @Override
            public void onCaptureProgressed(@NonNull CameraCaptureSession session,
                    @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) {
                Integer aeState = partialResult.get(CaptureResult.CONTROL_AE_STATE);
                if (aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE
                        || aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) {
                    mState = STATE_WAITING_NON_PRECAPTURE;
                }
            }

            @Override
            public void onCaptureCompleted(@NonNull CameraCaptureSession session,
                    @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
                if (mState == STATE_WAITING_NON_PRECAPTURE) {
                    Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
                    if (aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
                        mState = STATE_PICTURE_TAKEN;
                        captureStillPicture();
                    }
                }

            }
        };
        mCaptureSession.capture(mPreviewRequestBuilder.build(), mPrecaptureCallback, mBackgroundHandler);
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }
}

From source file:com.android.camera.one.v2.OneCameraZslImpl.java

@Override
public void setFocusDistanceListener(FocusDistanceListener focusDistanceListener) {
    if (mFocusDistanceListener == null) {
        mCaptureManager.addMetadataChangeListener(CaptureResult.LENS_FOCUS_DISTANCE,
                new ImageCaptureManager.MetadataChangeListener() {
                    @Override//from  w ww  .  j av a  2 s .c  o  m
                    public void onImageMetadataChange(Key<?> key, Object oldValue, Object newValue,
                            CaptureResult result) {
                        Integer state = result.get(CaptureResult.LENS_STATE);

                        // Forward changes if we have a new value and the camera
                        // A) Doesn't support lens state or B) lens state is
                        // reported and it is reported as moving.
                        if (newValue != null && (state == null || state == CameraMetadata.LENS_STATE_MOVING)) {
                            mFocusDistanceListener.onFocusDistance((float) newValue, mLensRange);
                        }
                    }
                });
    }
    mFocusDistanceListener = focusDistanceListener;
}

From source file:com.obviousengine.android.focus.ZslFocusCamera.java

/**
 * Instantiates a new camera based on Camera 2 API.
 *
 * @param device The underlying Camera 2 device.
 * @param characteristics The device's characteristics.
 * @param pictureSize the size of the final image to be taken.
 *//*from   w w  w  . j  av  a  2s.com*/
ZslFocusCamera(CameraDevice device, CameraCharacteristics characteristics, Size pictureSize) {
    Timber.v("Creating new ZslFocusCamera");

    this.device = device;
    this.characteristics = characteristics;
    fullSizeAspectRatio = calculateFullSizeAspectRatio(characteristics);

    cameraThread = new HandlerThread("FocusCamera");
    // If this thread stalls, it will delay viewfinder frames.
    cameraThread.setPriority(Thread.MAX_PRIORITY);
    cameraThread.start();
    cameraHandler = new Handler(cameraThread.getLooper());

    cameraListenerThread = new HandlerThread("FocusCamera-Listener");
    cameraListenerThread.start();
    cameraListenerHandler = new Handler(cameraListenerThread.getLooper());

    // TODO: Encoding on multiple cores results in preview jank due to
    // excessive GC.
    int numEncodingCores = Utils.getNumCpuCores();
    imageSaverThreadPool = new ThreadPoolExecutor(numEncodingCores, numEncodingCores, 10, TimeUnit.SECONDS,
            new LinkedBlockingQueue<Runnable>());

    captureManager = new ImageCaptureManager(MAX_CAPTURE_IMAGES, cameraListenerHandler, imageSaverThreadPool);
    captureManager.setCaptureReadyListener(new ImageCaptureManager.CaptureReadyListener() {
        @Override
        public void onReadyStateChange(boolean capturePossible) {
            readyStateManager.setInput(ReadyStateRequirement.CAPTURE_MANAGER_READY, capturePossible);
        }
    });

    // Listen for changes to auto focus state and dispatch to
    // focusStateListener.
    captureManager.addMetadataChangeListener(CaptureResult.CONTROL_AF_STATE,
            new ImageCaptureManager.MetadataChangeListener() {
                @Override
                public void onImageMetadataChange(Key<?> key, Object oldValue, Object newValue,
                        CaptureResult result) {
                    if (focusStateListener == null) {
                        return;
                    }
                    focusStateListener.onFocusStatusUpdate(
                            AutoFocusHelper.stateFromCamera2State(result.get(CaptureResult.CONTROL_AF_STATE)),
                            result.getFrameNumber());
                }
            });

    // Allocate the image reader to store all images received from the
    // camera.
    if (pictureSize == null) {
        // TODO The default should be selected by the caller, and
        // pictureSize should never be null.
        pictureSize = getDefaultPictureSize();
    }
    captureImageReader = ImageReader.newInstance(pictureSize.getWidth(), pictureSize.getHeight(),
            CAPTURE_IMAGE_FORMAT, MAX_CAPTURE_IMAGES);

    captureImageReader.setOnImageAvailableListener(captureManager, cameraHandler);
    mediaActionSound.load(MediaActionSound.SHUTTER_CLICK);
}

From source file:com.android.camera.one.v2.OneCameraZslImpl.java

/**
 * Instantiates a new camera based on Camera 2 API.
 *
 * @param device The underlying Camera 2 device.
 * @param characteristics The device's characteristics.
 * @param pictureSize the size of the final image to be taken.
 *//*from   ww  w . ja  v a2  s.c  o m*/
OneCameraZslImpl(CameraDevice device, CameraCharacteristics characteristics, Size pictureSize) {
    Log.v(TAG, "Creating new OneCameraZslImpl");

    mDevice = device;
    mCharacteristics = characteristics;
    mLensRange = LensRangeCalculator.getDiopterToRatioCalculator(characteristics);
    mDirection = new CameraDirectionProvider(mCharacteristics);
    mFullSizeAspectRatio = calculateFullSizeAspectRatio(characteristics);

    mCameraThread = new HandlerThread("OneCamera2");
    // If this thread stalls, it will delay viewfinder frames.
    mCameraThread.setPriority(Thread.MAX_PRIORITY);
    mCameraThread.start();
    mCameraHandler = new Handler(mCameraThread.getLooper());

    mCameraListenerThread = new HandlerThread("OneCamera2-Listener");
    mCameraListenerThread.start();
    mCameraListenerHandler = new Handler(mCameraListenerThread.getLooper());

    // TODO: Encoding on multiple cores results in preview jank due to
    // excessive GC.
    int numEncodingCores = CameraUtil.getNumCpuCores();
    mImageSaverThreadPool = new ThreadPoolExecutor(numEncodingCores, numEncodingCores, 10, TimeUnit.SECONDS,
            new LinkedBlockingQueue<Runnable>());

    mCaptureManager = new ImageCaptureManager(MAX_CAPTURE_IMAGES, mCameraListenerHandler,
            mImageSaverThreadPool);
    mCaptureManager.setCaptureReadyListener(new ImageCaptureManager.CaptureReadyListener() {
        @Override
        public void onReadyStateChange(boolean capturePossible) {
            mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_MANAGER_READY, capturePossible);
        }
    });

    // Listen for changes to auto focus state and dispatch to
    // mFocusStateListener.
    mCaptureManager.addMetadataChangeListener(CaptureResult.CONTROL_AF_STATE,
            new ImageCaptureManager.MetadataChangeListener() {
                @Override
                public void onImageMetadataChange(Key<?> key, Object oldValue, Object newValue,
                        CaptureResult result) {
                    FocusStateListener listener = mFocusStateListener;
                    if (listener != null) {
                        listener.onFocusStatusUpdate(AutoFocusHelper.stateFromCamera2State(
                                result.get(CaptureResult.CONTROL_AF_STATE)), result.getFrameNumber());
                    }
                }
            });

    // Allocate the image reader to store all images received from the
    // camera.
    if (pictureSize == null) {
        // TODO The default should be selected by the caller, and
        // pictureSize should never be null.
        pictureSize = getDefaultPictureSize();
    }
    mCaptureImageReader = ImageReader.newInstance(pictureSize.getWidth(), pictureSize.getHeight(),
            sCaptureImageFormat, MAX_CAPTURE_IMAGES);

    mCaptureImageReader.setOnImageAvailableListener(mCaptureManager, mCameraHandler);
    mMediaActionSound.load(MediaActionSound.SHUTTER_CLICK);
}

From source file:com.almalence.opencam.PluginManagerBase.java

@TargetApi(21)
public boolean addToSharedMemExifTagsFromCaptureResult(final CaptureResult result, final long SessionID,
        final int num) {
    String exposure_time = String.valueOf(result.get(CaptureResult.SENSOR_EXPOSURE_TIME));
    String sensitivity = String.valueOf(result.get(CaptureResult.SENSOR_SENSITIVITY));
    String aperture = String.valueOf(result.get(CaptureResult.LENS_APERTURE));
    String focal_lenght = String.valueOf(result.get(CaptureResult.LENS_FOCAL_LENGTH));
    String flash_mode = String.valueOf(result.get(CaptureResult.FLASH_MODE));
    String awb_mode = String.valueOf(result.get(CaptureResult.CONTROL_AWB_MODE));

    if (num != -1 && exposure_time != null && !exposure_time.equals("null"))
        addToSharedMem("exiftag_exposure_time" + num + SessionID, exposure_time);
    else if (exposure_time != null && !exposure_time.equals("null"))
        addToSharedMem("exiftag_exposure_time" + SessionID, exposure_time);
    if (sensitivity != null && !sensitivity.equals("null"))
        addToSharedMem("exiftag_iso" + SessionID, sensitivity);
    if (aperture != null && !aperture.equals("null"))
        addToSharedMem("exiftag_aperture" + SessionID, aperture);
    if (focal_lenght != null && !focal_lenght.equals("null"))
        addToSharedMem("exiftag_focal_lenght" + SessionID, focal_lenght);
    if (flash_mode != null && !flash_mode.equals("null"))
        addToSharedMem("exiftag_flash" + SessionID, flash_mode);
    if (awb_mode != null && !awb_mode.equals("null"))
        addToSharedMem("exiftag_white_balance" + SessionID, awb_mode);

    return true;//from w w w  . ja va  2 s .c  o m
}