Example usage for android.hardware.camera2 CaptureResult CONTROL_AF_STATE

List of usage examples for android.hardware.camera2 CaptureResult CONTROL_AF_STATE

Introduction

In this page you can find the example usage for android.hardware.camera2 CaptureResult CONTROL_AF_STATE.

Prototype

Key CONTROL_AF_STATE

To view the source code for android.hardware.camera2 CaptureResult CONTROL_AF_STATE.

Click Source Link

Document

Current state of auto-focus (AF) algorithm.

Switching between or enabling AF modes ( CaptureRequest#CONTROL_AF_MODE android.control.afMode ) always resets the AF state to INACTIVE.

Usage

From source file:com.obviousengine.android.focus.ZslFocusCamera.java

/**
 * Instantiates a new camera based on Camera 2 API.
 *
 * @param device The underlying Camera 2 device.
 * @param characteristics The device's characteristics.
 * @param pictureSize the size of the final image to be taken.
 *///from   w w w.  j av a2 s .  co  m
ZslFocusCamera(CameraDevice device, CameraCharacteristics characteristics, Size pictureSize) {
    Timber.v("Creating new ZslFocusCamera");

    this.device = device;
    this.characteristics = characteristics;
    fullSizeAspectRatio = calculateFullSizeAspectRatio(characteristics);

    cameraThread = new HandlerThread("FocusCamera");
    // If this thread stalls, it will delay viewfinder frames.
    cameraThread.setPriority(Thread.MAX_PRIORITY);
    cameraThread.start();
    cameraHandler = new Handler(cameraThread.getLooper());

    cameraListenerThread = new HandlerThread("FocusCamera-Listener");
    cameraListenerThread.start();
    cameraListenerHandler = new Handler(cameraListenerThread.getLooper());

    // TODO: Encoding on multiple cores results in preview jank due to
    // excessive GC.
    int numEncodingCores = Utils.getNumCpuCores();
    imageSaverThreadPool = new ThreadPoolExecutor(numEncodingCores, numEncodingCores, 10, TimeUnit.SECONDS,
            new LinkedBlockingQueue<Runnable>());

    captureManager = new ImageCaptureManager(MAX_CAPTURE_IMAGES, cameraListenerHandler, imageSaverThreadPool);
    captureManager.setCaptureReadyListener(new ImageCaptureManager.CaptureReadyListener() {
        @Override
        public void onReadyStateChange(boolean capturePossible) {
            readyStateManager.setInput(ReadyStateRequirement.CAPTURE_MANAGER_READY, capturePossible);
        }
    });

    // Listen for changes to auto focus state and dispatch to
    // focusStateListener.
    captureManager.addMetadataChangeListener(CaptureResult.CONTROL_AF_STATE,
            new ImageCaptureManager.MetadataChangeListener() {
                @Override
                public void onImageMetadataChange(Key<?> key, Object oldValue, Object newValue,
                        CaptureResult result) {
                    if (focusStateListener == null) {
                        return;
                    }
                    focusStateListener.onFocusStatusUpdate(
                            AutoFocusHelper.stateFromCamera2State(result.get(CaptureResult.CONTROL_AF_STATE)),
                            result.getFrameNumber());
                }
            });

    // Allocate the image reader to store all images received from the
    // camera.
    if (pictureSize == null) {
        // TODO The default should be selected by the caller, and
        // pictureSize should never be null.
        pictureSize = getDefaultPictureSize();
    }
    captureImageReader = ImageReader.newInstance(pictureSize.getWidth(), pictureSize.getHeight(),
            CAPTURE_IMAGE_FORMAT, MAX_CAPTURE_IMAGES);

    captureImageReader.setOnImageAvailableListener(captureManager, cameraHandler);
    mediaActionSound.load(MediaActionSound.SHUTTER_CLICK);
}

From source file:com.android.camera.one.v2.OneCameraZslImpl.java

/**
 * Instantiates a new camera based on Camera 2 API.
 *
 * @param device The underlying Camera 2 device.
 * @param characteristics The device's characteristics.
 * @param pictureSize the size of the final image to be taken.
 *//* w  w w .j av a 2  s.c o m*/
OneCameraZslImpl(CameraDevice device, CameraCharacteristics characteristics, Size pictureSize) {
    Log.v(TAG, "Creating new OneCameraZslImpl");

    mDevice = device;
    mCharacteristics = characteristics;
    mLensRange = LensRangeCalculator.getDiopterToRatioCalculator(characteristics);
    mDirection = new CameraDirectionProvider(mCharacteristics);
    mFullSizeAspectRatio = calculateFullSizeAspectRatio(characteristics);

    mCameraThread = new HandlerThread("OneCamera2");
    // If this thread stalls, it will delay viewfinder frames.
    mCameraThread.setPriority(Thread.MAX_PRIORITY);
    mCameraThread.start();
    mCameraHandler = new Handler(mCameraThread.getLooper());

    mCameraListenerThread = new HandlerThread("OneCamera2-Listener");
    mCameraListenerThread.start();
    mCameraListenerHandler = new Handler(mCameraListenerThread.getLooper());

    // TODO: Encoding on multiple cores results in preview jank due to
    // excessive GC.
    int numEncodingCores = CameraUtil.getNumCpuCores();
    mImageSaverThreadPool = new ThreadPoolExecutor(numEncodingCores, numEncodingCores, 10, TimeUnit.SECONDS,
            new LinkedBlockingQueue<Runnable>());

    mCaptureManager = new ImageCaptureManager(MAX_CAPTURE_IMAGES, mCameraListenerHandler,
            mImageSaverThreadPool);
    mCaptureManager.setCaptureReadyListener(new ImageCaptureManager.CaptureReadyListener() {
        @Override
        public void onReadyStateChange(boolean capturePossible) {
            mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_MANAGER_READY, capturePossible);
        }
    });

    // Listen for changes to auto focus state and dispatch to
    // mFocusStateListener.
    mCaptureManager.addMetadataChangeListener(CaptureResult.CONTROL_AF_STATE,
            new ImageCaptureManager.MetadataChangeListener() {
                @Override
                public void onImageMetadataChange(Key<?> key, Object oldValue, Object newValue,
                        CaptureResult result) {
                    FocusStateListener listener = mFocusStateListener;
                    if (listener != null) {
                        listener.onFocusStatusUpdate(AutoFocusHelper.stateFromCamera2State(
                                result.get(CaptureResult.CONTROL_AF_STATE)), result.getFrameNumber());
                    }
                }
            });

    // Allocate the image reader to store all images received from the
    // camera.
    if (pictureSize == null) {
        // TODO The default should be selected by the caller, and
        // pictureSize should never be null.
        pictureSize = getDefaultPictureSize();
    }
    mCaptureImageReader = ImageReader.newInstance(pictureSize.getWidth(), pictureSize.getHeight(),
            sCaptureImageFormat, MAX_CAPTURE_IMAGES);

    mCaptureImageReader.setOnImageAvailableListener(mCaptureManager, mCameraHandler);
    mMediaActionSound.load(MediaActionSound.SHUTTER_CLICK);
}

From source file:com.obviousengine.android.focus.ZslFocusCamera.java

/**
 * Take a picture./*  w  w  w  . j  a v a2  s.c  om*/
 */
@Override
public void takePicture(final PhotoCaptureParameters params, final CaptureSession session) {
    params.checkSanity();

    readyStateManager.setInput(ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, false);

    boolean useZSL = ZSL_ENABLED;

    // We will only capture images from the zsl ring-buffer which satisfy
    // this constraint.
    ArrayList<ImageCaptureManager.CapturedImageConstraint> zslConstraints = new ArrayList<>();
    zslConstraints.add(new ImageCaptureManager.CapturedImageConstraint() {
        @Override
        public boolean satisfiesConstraint(TotalCaptureResult captureResult) {
            Long timestamp = captureResult.get(CaptureResult.SENSOR_TIMESTAMP);
            Integer lensState = captureResult.get(CaptureResult.LENS_STATE);
            Integer flashState = captureResult.get(CaptureResult.FLASH_STATE);
            Integer flashMode = captureResult.get(CaptureResult.FLASH_MODE);
            Integer aeState = captureResult.get(CaptureResult.CONTROL_AE_STATE);
            Integer afState = captureResult.get(CaptureResult.CONTROL_AF_STATE);
            Integer awbState = captureResult.get(CaptureResult.CONTROL_AWB_STATE);

            if (timestamp <= lastCapturedImageTimestamp.get()) {
                // Don't save frames older than the most
                // recently-captured frame.
                // TODO This technically has a race condition in which
                // duplicate frames may be saved, but if a user is
                // tapping at >30Hz, duplicate images may be what they
                // expect.
                return false;
            }

            if (lensState == CaptureResult.LENS_STATE_MOVING) {
                // If we know the lens was moving, don't use this image.
                return false;
            }

            if (aeState == CaptureResult.CONTROL_AE_STATE_SEARCHING
                    || aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
                return false;
            }
            switch (params.flashMode) {
            case OFF:
                break;
            case ON:
                if (flashState != CaptureResult.FLASH_STATE_FIRED
                        || flashMode != CaptureResult.FLASH_MODE_SINGLE) {
                    return false;
                }
                break;
            case AUTO:
                if (aeState == CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED
                        && flashState != CaptureResult.FLASH_STATE_FIRED) {
                    return false;
                }
                break;
            }

            if (afState == CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN
                    || afState == CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN) {
                return false;
            }

            if (awbState == CaptureResult.CONTROL_AWB_STATE_SEARCHING) {
                return false;
            }

            return true;
        }
    });
    // This constraint lets us capture images which have been explicitly
    // requested. See {@link RequestTag.EXPLICIT_CAPTURE}.
    ArrayList<ImageCaptureManager.CapturedImageConstraint> singleCaptureConstraint = new ArrayList<>();
    singleCaptureConstraint.add(new ImageCaptureManager.CapturedImageConstraint() {
        @Override
        public boolean satisfiesConstraint(TotalCaptureResult captureResult) {
            Object tag = captureResult.getRequest().getTag();
            return tag == RequestTag.EXPLICIT_CAPTURE;
        }
    });

    // If we can use ZSL, try to save a previously-captured frame, if an
    // acceptable one exists in the buffer.
    if (useZSL) {
        boolean capturedPreviousFrame = captureManager
                .tryCaptureExistingImage(new ImageCaptureTask(params, session), zslConstraints);
        if (capturedPreviousFrame) {
            Timber.v("Saving previous frame");
            onShutterInvokeUI(params);
        } else {
            Timber.v("No good image Available.  Capturing next available good image.");
            // If there was no good frame available in the ring buffer
            // already, capture the next good image.
            // TODO Disable the shutter button until this image is captured.

            if (params.flashMode == Flash.ON || params.flashMode == Flash.AUTO) {
                // We must issue a request for a single capture using the
                // flash, including an AE precapture trigger.

                // The following sets up a sequence of events which will
                // occur in reverse order to the associated method
                // calls:
                // 1. Send a request to trigger the Auto Exposure Precapture
                // 2. Wait for the AE_STATE to leave the PRECAPTURE state,
                // and then send a request for a single image, with the
                // appropriate flash settings.
                // 3. Capture the next appropriate image, which should be
                // the one we requested in (2).

                captureManager.captureNextImage(new ImageCaptureTask(params, session), singleCaptureConstraint);

                captureManager.addMetadataChangeListener(CaptureResult.CONTROL_AE_STATE,
                        new ImageCaptureManager.MetadataChangeListener() {
                            @Override
                            public void onImageMetadataChange(Key<?> key, Object oldValue, Object newValue,
                                    CaptureResult result) {
                                Timber.v("AE State Changed");
                                if (oldValue
                                        .equals(Integer.valueOf(CaptureResult.CONTROL_AE_STATE_PRECAPTURE))) {
                                    captureManager.removeMetadataChangeListener(key, this);
                                    sendSingleRequest(params);
                                    // TODO: Delay this until onCaptureStarted().
                                    onShutterInvokeUI(params);
                                }
                            }
                        });

                sendAutoExposureTriggerRequest(params.flashMode);
            } else {
                // We may get here if, for example, the auto focus is in the
                // middle of a scan.
                // If the flash is off, we should just wait for the next
                // image that arrives. This will have minimal delay since we
                // do not need to send a new capture request.
                captureManager.captureNextImage(new ImageCaptureTask(params, session), zslConstraints);
            }
        }
    } else {
        // TODO If we can't save a previous frame, create a new capture
        // request to do what we need (e.g. flash) and call
        // captureNextImage().
        throw new UnsupportedOperationException("Non-ZSL capture not yet supported");
    }
}

From source file:com.example.camera2apidemo.Camera2Fragment.java

private void lockFocus() {
    try {//from   w  w  w. j  av  a2 s.c o m
        mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);

        CameraCaptureSession.CaptureCallback mLockFocusCallback = new CameraCaptureSession.CaptureCallback() {

            @Override
            public void onCaptureCompleted(@NonNull CameraCaptureSession session,
                    @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {

                Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
                if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState
                        || CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState) {
                    Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
                    if (aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) {
                        captureStillPicture();
                    } else {
                        runPrecaptureSequence();
                    }
                }
            }
        };

        mState = STATE_WAITING_LOCK;
        mCaptureSession.capture(mPreviewRequestBuilder.build(), mLockFocusCallback, mBackgroundHandler);

    } catch (CameraAccessException e) {
        e.printStackTrace();
    }
}

From source file:com.android.camera.one.v2.OneCameraZslImpl.java

/**
 * Take a picture./*  w  w w . j a va2s.com*/
 */
@Override
public void takePicture(final PhotoCaptureParameters params, final CaptureSession session) {
    mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, false);

    boolean useZSL = ZSL_ENABLED;

    // We will only capture images from the zsl ring-buffer which satisfy
    // this constraint.
    ArrayList<ImageCaptureManager.CapturedImageConstraint> zslConstraints = new ArrayList<ImageCaptureManager.CapturedImageConstraint>();
    zslConstraints.add(new ImageCaptureManager.CapturedImageConstraint() {
        @Override
        public boolean satisfiesConstraint(TotalCaptureResult captureResult) {
            Long timestamp = captureResult.get(CaptureResult.SENSOR_TIMESTAMP);
            Integer lensState = captureResult.get(CaptureResult.LENS_STATE);
            Integer flashState = captureResult.get(CaptureResult.FLASH_STATE);
            Integer flashMode = captureResult.get(CaptureResult.FLASH_MODE);
            Integer aeState = captureResult.get(CaptureResult.CONTROL_AE_STATE);
            Integer afState = captureResult.get(CaptureResult.CONTROL_AF_STATE);
            Integer awbState = captureResult.get(CaptureResult.CONTROL_AWB_STATE);

            if (lensState == null) {
                lensState = CaptureResult.LENS_STATE_STATIONARY;
            }
            if (flashState == null) {
                flashState = CaptureResult.FLASH_STATE_UNAVAILABLE;
            }
            if (flashMode == null) {
                flashMode = CaptureResult.FLASH_MODE_OFF;
            }
            if (aeState == null) {
                aeState = CaptureResult.CONTROL_AE_STATE_INACTIVE;
            }
            if (afState == null) {
                afState = CaptureResult.CONTROL_AF_STATE_INACTIVE;
            }
            if (awbState == null) {
                awbState = CaptureResult.CONTROL_AWB_STATE_INACTIVE;
            }

            synchronized (mCapturedImageTimestamps) {
                if (mCapturedImageTimestamps.contains(timestamp)) {
                    // Don't save frames which we've already saved.
                    return false;
                }
            }

            if (lensState == CaptureResult.LENS_STATE_MOVING) {
                // If we know the lens was moving, don't use this image.
                return false;
            }

            if (aeState == CaptureResult.CONTROL_AE_STATE_SEARCHING
                    || aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
                return false;
            }

            if (afState == CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN
                    || afState == CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN) {
                return false;
            }

            if (awbState == CaptureResult.CONTROL_AWB_STATE_SEARCHING) {
                return false;
            }

            return true;
        }
    });
    // This constraint lets us capture images which have been explicitly
    // requested. See {@link RequestTag.EXPLICIT_CAPTURE}.
    ArrayList<ImageCaptureManager.CapturedImageConstraint> singleCaptureConstraint = new ArrayList<ImageCaptureManager.CapturedImageConstraint>();
    singleCaptureConstraint.add(new ImageCaptureManager.CapturedImageConstraint() {
        @Override
        public boolean satisfiesConstraint(TotalCaptureResult captureResult) {
            Object tag = captureResult.getRequest().getTag();
            return tag == RequestTag.EXPLICIT_CAPTURE;
        }
    });

    // If we can use ZSL, try to save a previously-captured frame, if an
    // acceptable one exists in the buffer.
    if (useZSL) {
        boolean capturedPreviousFrame = mCaptureManager
                .tryCaptureExistingImage(new ImageCaptureTask(params, session), zslConstraints);
        if (capturedPreviousFrame) {
            Log.v(TAG, "Saving previous frame");
            onShutterInvokeUI(params);
        } else {
            Log.v(TAG, "No good image Available.  Capturing next available good image.");
            // If there was no good frame available in the ring buffer
            // already, capture the next good image.
            // TODO Disable the shutter button until this image is captured.

            Flash flashMode = Flash.OFF;

            if (flashMode == Flash.ON || flashMode == Flash.AUTO) {
                // We must issue a request for a single capture using the
                // flash, including an AE precapture trigger.

                // The following sets up a sequence of events which will
                // occur in reverse order to the associated method
                // calls:
                // 1. Send a request to trigger the Auto Exposure Precapture
                // 2. Wait for the AE_STATE to leave the PRECAPTURE state,
                // and then send a request for a single image, with the
                // appropriate flash settings.
                // 3. Capture the next appropriate image, which should be
                // the one we requested in (2).

                mCaptureManager.captureNextImage(new ImageCaptureTask(params, session),
                        singleCaptureConstraint);

                mCaptureManager.addMetadataChangeListener(CaptureResult.CONTROL_AE_STATE,
                        new MetadataChangeListener() {
                            @Override
                            public void onImageMetadataChange(Key<?> key, Object oldValue, Object newValue,
                                    CaptureResult result) {
                                Log.v(TAG, "AE State Changed");
                                if (oldValue
                                        .equals(Integer.valueOf(CaptureResult.CONTROL_AE_STATE_PRECAPTURE))) {
                                    mCaptureManager.removeMetadataChangeListener(key, this);
                                    sendSingleRequest(params);
                                    // TODO: Delay this until
                                    // onCaptureStarted().
                                    onShutterInvokeUI(params);
                                }
                            }
                        });

                sendAutoExposureTriggerRequest(flashMode);
            } else {
                // We may get here if, for example, the auto focus is in the
                // middle of a scan.
                // If the flash is off, we should just wait for the next
                // image that arrives. This will have minimal delay since we
                // do not need to send a new capture request.
                mCaptureManager.captureNextImage(new ImageCaptureTask(params, session), zslConstraints);
            }
        }
    } else {
        // TODO If we can't save a previous frame, create a new capture
        // request to do what we need (e.g. flash) and call
        // captureNextImage().
        throw new UnsupportedOperationException("Non-ZSL capture not yet supported");
    }
}