Example usage for android.hardware.camera2.params MeteringRectangle MeteringRectangle

List of usage examples for android.hardware.camera2.params MeteringRectangle MeteringRectangle

Introduction

In this page you can find the example usage for android.hardware.camera2.params MeteringRectangle MeteringRectangle.

Prototype

public MeteringRectangle(int x, int y, int width, int height, int meteringWeight) 

Source Link

Document

Create a new metering rectangle.

Usage

From source file:com.android.camera2.its.ItsUtils.java

public static MeteringRectangle[] getJsonWeightedRectsFromArray(JSONArray a, boolean normalized, int width,
        int height) throws ItsException {
    try {//w w w .ja v a2  s .c  om
        // Returns [x0,y0,x1,y1,wgt,  x0,y0,x1,y1,wgt,  x0,y0,x1,y1,wgt,  ...]
        assert (a.length() % 5 == 0);
        MeteringRectangle[] ma = new MeteringRectangle[a.length() / 5];
        for (int i = 0; i < a.length(); i += 5) {
            int x, y, w, h;
            if (normalized) {
                x = (int) Math.floor(a.getDouble(i + 0) * width + 0.5f);
                y = (int) Math.floor(a.getDouble(i + 1) * height + 0.5f);
                w = (int) Math.floor(a.getDouble(i + 2) * width + 0.5f);
                h = (int) Math.floor(a.getDouble(i + 3) * height + 0.5f);
            } else {
                x = a.getInt(i + 0);
                y = a.getInt(i + 1);
                w = a.getInt(i + 2);
                h = a.getInt(i + 3);
            }
            x = Math.max(x, 0);
            y = Math.max(y, 0);
            w = Math.min(w, width - x);
            h = Math.min(h, height - y);
            int wgt = a.getInt(i + 4);
            ma[i / 5] = new MeteringRectangle(x, y, w, h, wgt);
        }
        return ma;
    } catch (org.json.JSONException e) {
        throw new ItsException("JSON error: ", e);
    }
}

From source file:com.dastanapps.camera2.view.Cam2AutoFitTextureView.java

@Nullable
private Boolean touchTofocus2(MotionEvent event) {
    MotionEvent motionEvent = event;/*from   ww w  .ja va  2  s .co m*/
    final int actionMasked = motionEvent.getActionMasked();
    if (actionMasked != MotionEvent.ACTION_DOWN) {
        return false;
    }
    if (mManualFocusEngaged) {
        Log.d(TAG, "Manual focus already engaged");
        return true;
    }

    final Rect sensorArraySize = mCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);

    //TODO: here I just flip x,y, but this needs to correspond with the sensor orientation (via SENSOR_ORIENTATION)
    final int y = (int) ((motionEvent.getX() / (float) getWidth()) * (float) sensorArraySize.height());
    final int x = (int) ((motionEvent.getY() / (float) getHeight()) * (float) sensorArraySize.width());
    final int halfTouchWidth = 150; //(int)motionEvent.getTouchMajor(); //TODO: this doesn't represent actual touch size in pixel. Values range in [3, 10]...
    final int halfTouchHeight = 150; //(int)motionEvent.getTouchMinor();
    MeteringRectangle focusAreaTouch = new MeteringRectangle(Math.max(x - halfTouchWidth, 0),
            Math.max(y - halfTouchHeight, 0), halfTouchWidth * 2, halfTouchHeight * 2,
            MeteringRectangle.METERING_WEIGHT_MAX - 1);

    CameraCaptureSession.CaptureCallback captureCallbackHandler = new CameraCaptureSession.CaptureCallback() {
        @Override
        public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
                TotalCaptureResult result) {
            super.onCaptureCompleted(session, request, result);
            mManualFocusEngaged = false;

            if (request.getTag() == "FOCUS_TAG") {
                //the focus trigger is complete -
                //resume repeating (preview surface will get frames), clear AF trigger
                mPreviewBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, null);
                try {
                    mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, null);
                } catch (CameraAccessException e) {
                    e.printStackTrace();
                }
            }
        }

        @Override
        public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request,
                CaptureFailure failure) {
            super.onCaptureFailed(session, request, failure);
            Log.e(TAG, "Manual AF failure: " + failure);
            mManualFocusEngaged = false;
        }
    };

    //first stop the existing repeating request
    try {
        mPreviewSession.stopRepeating();
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }

    //cancel any existing AF trigger (repeated touches, etc.)
    mPreviewBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
    mPreviewBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF);
    try {
        mPreviewSession.capture(mPreviewBuilder.build(), captureCallbackHandler, null);
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }

    //Now add a new AF trigger with focus region
    if (isMeteringAreaAFSupported()) {
        mPreviewBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, new MeteringRectangle[] { focusAreaTouch });
    }
    mPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
    mPreviewBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
    mPreviewBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
    mPreviewBuilder.setTag("FOCUS_TAG"); //we'll capture this later for resuming the preview

    //            //then we ask for a single request (not repeating!)
    //            mPreviewSession.capture(mPreviewBuilder.build(), captureCallbackHandler, mBackgroundHandler);
    return null;
}

From source file:com.android.camera2.its.ItsSerializer.java

@SuppressWarnings("unchecked")
public static CaptureRequest.Builder deserialize(CaptureRequest.Builder mdDefault, JSONObject jsonReq)
        throws ItsException {
    try {//from ww w .j a  v a 2s.  co m
        Logt.i(TAG, "Parsing JSON capture request ...");

        // Iterate over the CaptureRequest reflected fields.
        CaptureRequest.Builder md = mdDefault;
        Field[] allFields = CaptureRequest.class.getDeclaredFields();
        for (Field field : allFields) {
            if (Modifier.isPublic(field.getModifiers()) && Modifier.isStatic(field.getModifiers())
                    && field.getType() == CaptureRequest.Key.class
                    && field.getGenericType() instanceof ParameterizedType) {
                ParameterizedType paramType = (ParameterizedType) field.getGenericType();
                Type[] argTypes = paramType.getActualTypeArguments();
                if (argTypes.length > 0) {
                    CaptureRequest.Key key = (CaptureRequest.Key) field.get(md);
                    String keyName = key.getName();
                    Type keyType = argTypes[0];

                    // For each reflected CaptureRequest entry, look inside the JSON object
                    // to see if it is being set. If it is found, remove the key from the
                    // JSON object. After this process, there should be no keys left in the
                    // JSON (otherwise an invalid key was specified).

                    if (jsonReq.has(keyName) && !jsonReq.isNull(keyName)) {
                        if (keyType instanceof GenericArrayType) {
                            Type elmtType = ((GenericArrayType) keyType).getGenericComponentType();
                            JSONArray ja = jsonReq.getJSONArray(keyName);
                            Object val[] = new Object[ja.length()];
                            for (int i = 0; i < ja.length(); i++) {
                                if (elmtType == int.class) {
                                    Array.set(val, i, ja.getInt(i));
                                } else if (elmtType == byte.class) {
                                    Array.set(val, i, (byte) ja.getInt(i));
                                } else if (elmtType == float.class) {
                                    Array.set(val, i, (float) ja.getDouble(i));
                                } else if (elmtType == long.class) {
                                    Array.set(val, i, ja.getLong(i));
                                } else if (elmtType == double.class) {
                                    Array.set(val, i, ja.getDouble(i));
                                } else if (elmtType == boolean.class) {
                                    Array.set(val, i, ja.getBoolean(i));
                                } else if (elmtType == String.class) {
                                    Array.set(val, i, ja.getString(i));
                                } else if (elmtType == Size.class) {
                                    JSONObject obj = ja.getJSONObject(i);
                                    Array.set(val, i, new Size(obj.getInt("width"), obj.getInt("height")));
                                } else if (elmtType == Rect.class) {
                                    JSONObject obj = ja.getJSONObject(i);
                                    Array.set(val, i, new Rect(obj.getInt("left"), obj.getInt("top"),
                                            obj.getInt("bottom"), obj.getInt("right")));
                                } else if (elmtType == Rational.class) {
                                    JSONObject obj = ja.getJSONObject(i);
                                    Array.set(val, i,
                                            new Rational(obj.getInt("numerator"), obj.getInt("denominator")));
                                } else if (elmtType == RggbChannelVector.class) {
                                    JSONArray arr = ja.getJSONArray(i);
                                    Array.set(val, i,
                                            new RggbChannelVector((float) arr.getDouble(0),
                                                    (float) arr.getDouble(1), (float) arr.getDouble(2),
                                                    (float) arr.getDouble(3)));
                                } else if (elmtType == ColorSpaceTransform.class) {
                                    JSONArray arr = ja.getJSONArray(i);
                                    Rational xform[] = new Rational[9];
                                    for (int j = 0; j < 9; j++) {
                                        xform[j] = new Rational(arr.getJSONObject(j).getInt("numerator"),
                                                arr.getJSONObject(j).getInt("denominator"));
                                    }
                                    Array.set(val, i, new ColorSpaceTransform(xform));
                                } else if (elmtType == MeteringRectangle.class) {
                                    JSONObject obj = ja.getJSONObject(i);
                                    Array.set(val, i, new MeteringRectangle(obj.getInt("x"), obj.getInt("y"),
                                            obj.getInt("width"), obj.getInt("height"), obj.getInt("weight")));
                                } else {
                                    throw new ItsException("Failed to parse key from JSON: " + keyName);
                                }
                            }
                            if (val != null) {
                                Logt.i(TAG, "Set: " + keyName + " -> " + Arrays.toString(val));
                                md.set(key, val);
                                jsonReq.remove(keyName);
                            }
                        } else {
                            Object val = null;
                            if (keyType == Integer.class) {
                                val = jsonReq.getInt(keyName);
                            } else if (keyType == Byte.class) {
                                val = (byte) jsonReq.getInt(keyName);
                            } else if (keyType == Double.class) {
                                val = jsonReq.getDouble(keyName);
                            } else if (keyType == Long.class) {
                                val = jsonReq.getLong(keyName);
                            } else if (keyType == Float.class) {
                                val = (float) jsonReq.getDouble(keyName);
                            } else if (keyType == Boolean.class) {
                                val = jsonReq.getBoolean(keyName);
                            } else if (keyType == String.class) {
                                val = jsonReq.getString(keyName);
                            } else if (keyType == Size.class) {
                                JSONObject obj = jsonReq.getJSONObject(keyName);
                                val = new Size(obj.getInt("width"), obj.getInt("height"));
                            } else if (keyType == Rect.class) {
                                JSONObject obj = jsonReq.getJSONObject(keyName);
                                val = new Rect(obj.getInt("left"), obj.getInt("top"), obj.getInt("right"),
                                        obj.getInt("bottom"));
                            } else if (keyType == Rational.class) {
                                JSONObject obj = jsonReq.getJSONObject(keyName);
                                val = new Rational(obj.getInt("numerator"), obj.getInt("denominator"));
                            } else if (keyType == RggbChannelVector.class) {
                                JSONObject obj = jsonReq.optJSONObject(keyName);
                                JSONArray arr = jsonReq.optJSONArray(keyName);
                                if (arr != null) {
                                    val = new RggbChannelVector((float) arr.getDouble(0),
                                            (float) arr.getDouble(1), (float) arr.getDouble(2),
                                            (float) arr.getDouble(3));
                                } else if (obj != null) {
                                    val = new RggbChannelVector((float) obj.getDouble("red"),
                                            (float) obj.getDouble("greenEven"),
                                            (float) obj.getDouble("greenOdd"), (float) obj.getDouble("blue"));
                                } else {
                                    throw new ItsException("Invalid RggbChannelVector object");
                                }
                            } else if (keyType == ColorSpaceTransform.class) {
                                JSONArray arr = jsonReq.getJSONArray(keyName);
                                Rational a[] = new Rational[9];
                                for (int i = 0; i < 9; i++) {
                                    a[i] = new Rational(arr.getJSONObject(i).getInt("numerator"),
                                            arr.getJSONObject(i).getInt("denominator"));
                                }
                                val = new ColorSpaceTransform(a);
                            } else if (keyType instanceof ParameterizedType
                                    && ((ParameterizedType) keyType).getRawType() == Range.class
                                    && ((ParameterizedType) keyType).getActualTypeArguments().length == 1
                                    && ((ParameterizedType) keyType)
                                            .getActualTypeArguments()[0] == Integer.class) {
                                JSONArray arr = jsonReq.getJSONArray(keyName);
                                val = new Range<Integer>(arr.getInt(0), arr.getInt(1));
                            } else {
                                throw new ItsException(
                                        "Failed to parse key from JSON: " + keyName + ", " + keyType);
                            }
                            if (val != null) {
                                Logt.i(TAG, "Set: " + keyName + " -> " + val);
                                md.set(key, val);
                                jsonReq.remove(keyName);
                            }
                        }
                    }
                }
            }
        }

        // Ensure that there were no invalid keys in the JSON request object.
        if (jsonReq.length() != 0) {
            throw new ItsException("Invalid JSON key(s): " + jsonReq.toString());
        }

        Logt.i(TAG, "Parsing JSON capture request completed");
        return md;
    } catch (java.lang.IllegalAccessException e) {
        throw new ItsException("Access error: ", e);
    } catch (org.json.JSONException e) {
        throw new ItsException("JSON error: ", e);
    }
}

From source file:com.android.camera2.its.ItsService.java

private void do3A(JSONObject params) throws ItsException {
    try {// w w  w .  j  a  v a 2s .  c  om
        // Start a 3A action, and wait for it to converge.
        // Get the converged values for each "A", and package into JSON result for caller.

        // 3A happens on full-res frames.
        Size sizes[] = ItsUtils.getYuvOutputSizes(mCameraCharacteristics);
        int widths[] = new int[1];
        int heights[] = new int[1];
        int formats[] = new int[1];
        widths[0] = sizes[0].getWidth();
        heights[0] = sizes[0].getHeight();
        formats[0] = ImageFormat.YUV_420_888;
        int width = widths[0];
        int height = heights[0];

        prepareCaptureReader(widths, heights, formats, 1);
        List<Surface> outputSurfaces = new ArrayList<Surface>(1);
        outputSurfaces.add(mCaptureReaders[0].getSurface());
        BlockingSessionCallback sessionListener = new BlockingSessionCallback();
        mCamera.createCaptureSession(outputSurfaces, sessionListener, mCameraHandler);
        mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS);

        // Add a listener that just recycles buffers; they aren't saved anywhere.
        ImageReader.OnImageAvailableListener readerListener = createAvailableListenerDropper(mCaptureCallback);
        mCaptureReaders[0].setOnImageAvailableListener(readerListener, mSaveHandlers[0]);

        // Get the user-specified regions for AE, AWB, AF.
        // Note that the user specifies normalized [x,y,w,h], which is converted below
        // to an [x0,y0,x1,y1] region in sensor coords. The capture request region
        // also has a fifth "weight" element: [x0,y0,x1,y1,w].
        MeteringRectangle[] regionAE = new MeteringRectangle[] {
                new MeteringRectangle(0, 0, width, height, 1) };
        MeteringRectangle[] regionAF = new MeteringRectangle[] {
                new MeteringRectangle(0, 0, width, height, 1) };
        MeteringRectangle[] regionAWB = new MeteringRectangle[] {
                new MeteringRectangle(0, 0, width, height, 1) };
        if (params.has(REGION_KEY)) {
            JSONObject regions = params.getJSONObject(REGION_KEY);
            if (regions.has(REGION_AE_KEY)) {
                regionAE = ItsUtils.getJsonWeightedRectsFromArray(regions.getJSONArray(REGION_AE_KEY), true,
                        width, height);
            }
            if (regions.has(REGION_AF_KEY)) {
                regionAF = ItsUtils.getJsonWeightedRectsFromArray(regions.getJSONArray(REGION_AF_KEY), true,
                        width, height);
            }
            if (regions.has(REGION_AWB_KEY)) {
                regionAWB = ItsUtils.getJsonWeightedRectsFromArray(regions.getJSONArray(REGION_AWB_KEY), true,
                        width, height);
            }
        }

        // If AE or AWB lock is specified, then the 3A will converge first and then lock these
        // values, waiting until the HAL has reported that the lock was successful.
        mNeedsLockedAE = params.optBoolean(LOCK_AE_KEY, false);
        mNeedsLockedAWB = params.optBoolean(LOCK_AWB_KEY, false);

        // By default, AE and AF both get triggered, but the user can optionally override this.
        // Also, AF won't get triggered if the lens is fixed-focus.
        boolean doAE = true;
        boolean doAF = true;
        if (params.has(TRIGGER_KEY)) {
            JSONObject triggers = params.getJSONObject(TRIGGER_KEY);
            if (triggers.has(TRIGGER_AE_KEY)) {
                doAE = triggers.getBoolean(TRIGGER_AE_KEY);
            }
            if (triggers.has(TRIGGER_AF_KEY)) {
                doAF = triggers.getBoolean(TRIGGER_AF_KEY);
            }
        }
        if (doAF && mCameraCharacteristics.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE) == 0) {
            // Send a dummy result back for the code that is waiting for this message to see
            // that AF has converged.
            Logt.i(TAG, "Ignoring request for AF on fixed-focus camera");
            mSocketRunnableObj.sendResponse("afResult", "0.0");
            doAF = false;
        }

        mInterlock3A.open();
        mIssuedRequest3A = false;
        mConvergedAE = false;
        mConvergedAWB = false;
        mConvergedAF = false;
        mLockedAE = false;
        mLockedAWB = false;
        long tstart = System.currentTimeMillis();
        boolean triggeredAE = false;
        boolean triggeredAF = false;

        Logt.i(TAG, String.format("Initiating 3A: AE:%d, AF:%d, AWB:1, AELOCK:%d, AWBLOCK:%d", doAE ? 1 : 0,
                doAF ? 1 : 0, mNeedsLockedAE ? 1 : 0, mNeedsLockedAWB ? 1 : 0));

        // Keep issuing capture requests until 3A has converged.
        while (true) {

            // Block until can take the next 3A frame. Only want one outstanding frame
            // at a time, to simplify the logic here.
            if (!mInterlock3A.block(TIMEOUT_3A * 1000)
                    || System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) {
                throw new ItsException("3A failed to converge (timeout)");
            }
            mInterlock3A.close();

            // If not converged yet, issue another capture request.
            if ((doAE && (!triggeredAE || !mConvergedAE)) || !mConvergedAWB
                    || (doAF && (!triggeredAF || !mConvergedAF)) || (doAE && mNeedsLockedAE && !mLockedAE)
                    || (mNeedsLockedAWB && !mLockedAWB)) {

                // Baseline capture request for 3A.
                CaptureRequest.Builder req = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
                req.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
                req.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
                req.set(CaptureRequest.CONTROL_CAPTURE_INTENT, CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
                req.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
                req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
                req.set(CaptureRequest.CONTROL_AE_LOCK, false);
                req.set(CaptureRequest.CONTROL_AE_REGIONS, regionAE);
                req.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
                req.set(CaptureRequest.CONTROL_AF_REGIONS, regionAF);
                req.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_AUTO);
                req.set(CaptureRequest.CONTROL_AWB_LOCK, false);
                req.set(CaptureRequest.CONTROL_AWB_REGIONS, regionAWB);

                if (mConvergedAE && mNeedsLockedAE) {
                    req.set(CaptureRequest.CONTROL_AE_LOCK, true);
                }
                if (mConvergedAWB && mNeedsLockedAWB) {
                    req.set(CaptureRequest.CONTROL_AWB_LOCK, true);
                }

                // Trigger AE first.
                if (doAE && !triggeredAE) {
                    Logt.i(TAG, "Triggering AE");
                    req.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
                            CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
                    triggeredAE = true;
                }

                // After AE has converged, trigger AF.
                if (doAF && !triggeredAF && (!doAE || (triggeredAE && mConvergedAE))) {
                    Logt.i(TAG, "Triggering AF");
                    req.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
                    triggeredAF = true;
                }

                req.addTarget(mCaptureReaders[0].getSurface());

                mIssuedRequest3A = true;
                mSession.capture(req.build(), mCaptureResultListener, mResultHandler);
            } else {
                mSocketRunnableObj.sendResponse("3aConverged", "");
                Logt.i(TAG, "3A converged");
                break;
            }
        }
    } catch (android.hardware.camera2.CameraAccessException e) {
        throw new ItsException("Access error: ", e);
    } catch (org.json.JSONException e) {
        throw new ItsException("JSON error: ", e);
    } finally {
        mSocketRunnableObj.sendResponse("3aDone", "");
    }
}