--- /dev/null
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2016 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<metadata xmlns="http://schemas.android.com/service/camera/metadata/"
+xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata_properties.xsd">
+
+ <tags>
+ <tag id="BC">
+ Needed for backwards compatibility with old Java API
+ </tag>
+ <tag id="V1">
+ New features for first camera 2 release (API1)
+ </tag>
+ <tag id="RAW">
+ Needed for useful RAW image processing and DNG file support
+ </tag>
+ <tag id="HAL2">
+ Entry is only used by camera device HAL 2.x
+ </tag>
+ <tag id="FULL">
+ Entry is required for full hardware level devices, and optional for other hardware levels
+ </tag>
+ <tag id="DEPTH">
+ Entry is required for the depth capability.
+ </tag>
+ <tag id="REPROC">
+ Entry is required for the YUV or PRIVATE reprocessing capability.
+ </tag>
+ <tag id="FUTURE">
+ Entry is under-specified and is not required for now. This is for book-keeping purpose,
+ do not implement or use it, it may be revised for future.
+ </tag>
+ </tags>
+
+ <types>
+ <typedef name="pairFloatFloat">
+ <language name="java">android.util.Pair<Float,Float></language>
+ </typedef>
+ <typedef name="pairDoubleDouble">
+ <language name="java">android.util.Pair<Double,Double></language>
+ </typedef>
+ <typedef name="rectangle">
+ <language name="java">android.graphics.Rect</language>
+ </typedef>
+ <typedef name="size">
+ <language name="java">android.util.Size</language>
+ </typedef>
+ <typedef name="string">
+ <language name="java">String</language>
+ </typedef>
+ <typedef name="boolean">
+ <language name="java">boolean</language>
+ </typedef>
+ <typedef name="imageFormat">
+ <language name="java">int</language>
+ </typedef>
+ <typedef name="streamConfigurationMap">
+ <language name="java">android.hardware.camera2.params.StreamConfigurationMap</language>
+ </typedef>
+ <typedef name="streamConfiguration">
+ <language name="java">android.hardware.camera2.params.StreamConfiguration</language>
+ </typedef>
+ <typedef name="streamConfigurationDuration">
+ <language name="java">android.hardware.camera2.params.StreamConfigurationDuration</language>
+ </typedef>
+ <typedef name="face">
+ <language name="java">android.hardware.camera2.params.Face</language>
+ </typedef>
+ <typedef name="meteringRectangle">
+ <language name="java">android.hardware.camera2.params.MeteringRectangle</language>
+ </typedef>
+ <typedef name="rangeFloat">
+ <language name="java">android.util.Range<Float></language>
+ </typedef>
+ <typedef name="rangeInt">
+ <language name="java">android.util.Range<Integer></language>
+ </typedef>
+ <typedef name="rangeLong">
+ <language name="java">android.util.Range<Long></language>
+ </typedef>
+ <typedef name="colorSpaceTransform">
+ <language name="java">android.hardware.camera2.params.ColorSpaceTransform</language>
+ </typedef>
+ <typedef name="rggbChannelVector">
+ <language name="java">android.hardware.camera2.params.RggbChannelVector</language>
+ </typedef>
+ <typedef name="blackLevelPattern">
+ <language name="java">android.hardware.camera2.params.BlackLevelPattern</language>
+ </typedef>
+ <typedef name="enumList">
+ <language name="java">int</language>
+ </typedef>
+ <typedef name="sizeF">
+ <language name="java">android.util.SizeF</language>
+ </typedef>
+ <typedef name="point">
+ <language name="java">android.graphics.Point</language>
+ </typedef>
+ <typedef name="tonemapCurve">
+ <language name="java">android.hardware.camera2.params.TonemapCurve</language>
+ </typedef>
+ <typedef name="lensShadingMap">
+ <language name="java">android.hardware.camera2.params.LensShadingMap</language>
+ </typedef>
+ <typedef name="location">
+ <language name="java">android.location.Location</language>
+ </typedef>
+ <typedef name="highSpeedVideoConfiguration">
+ <language name="java">android.hardware.camera2.params.HighSpeedVideoConfiguration</language>
+ </typedef>
+ <typedef name="reprocessFormatsMap">
+ <language name="java">android.hardware.camera2.params.ReprocessFormatsMap</language>
+ </typedef>
+ </types>
+
+ <namespace name="android">
+ <section name="colorCorrection">
+ <controls>
+ <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
+ <enum>
+ <value>TRANSFORM_MATRIX
+ <notes>Use the android.colorCorrection.transform matrix
+ and android.colorCorrection.gains to do color conversion.
+
+ All advanced white balance adjustments (not specified
+ by our white balance pipeline) must be disabled.
+
+ If AWB is enabled with `android.control.awbMode != OFF`, then
+ TRANSFORM_MATRIX is ignored. The camera device will override
+ this value to either FAST or HIGH_QUALITY.
+ </notes>
+ </value>
+ <value>FAST
+ <notes>Color correction processing must not slow down
+ capture rate relative to sensor raw output.
+
+ Advanced white balance adjustments above and beyond
+ the specified white balance pipeline may be applied.
+
+ If AWB is enabled with `android.control.awbMode != OFF`, then
+ the camera device uses the last frame's AWB values
+ (or defaults if AWB has never been run).
+ </notes>
+ </value>
+ <value>HIGH_QUALITY
+ <notes>Color correction processing operates at improved
+ quality but the capture rate might be reduced (relative to sensor
+ raw output rate)
+
+ Advanced white balance adjustments above and beyond
+ the specified white balance pipeline may be applied.
+
+ If AWB is enabled with `android.control.awbMode != OFF`, then
+ the camera device uses the last frame's AWB values
+ (or defaults if AWB has never been run).
+ </notes>
+ </value>
+ </enum>
+
+ <description>
+ The mode control selects how the image data is converted from the
+ sensor's native color into linear sRGB color.
+ </description>
+ <details>
+ When auto-white balance (AWB) is enabled with android.control.awbMode, this
+ control is overridden by the AWB routine. When AWB is disabled, the
+ application controls how the color mapping is performed.
+
+ We define the expected processing pipeline below. For consistency
+ across devices, this is always the case with TRANSFORM_MATRIX.
+
+ When either FULL or HIGH_QUALITY is used, the camera device may
+ do additional processing but android.colorCorrection.gains and
+ android.colorCorrection.transform will still be provided by the
+ camera device (in the results) and be roughly correct.
+
+ Switching to TRANSFORM_MATRIX and using the data provided from
+ FAST or HIGH_QUALITY will yield a picture with the same white point
+ as what was produced by the camera device in the earlier frame.
+
+ The expected processing pipeline is as follows:
+
+ ![White balance processing pipeline](android.colorCorrection.mode/processing_pipeline.png)
+
+ The white balance is encoded by two values, a 4-channel white-balance
+ gain vector (applied in the Bayer domain), and a 3x3 color transform
+ matrix (applied after demosaic).
+
+ The 4-channel white-balance gains are defined as:
+
+ android.colorCorrection.gains = [ R G_even G_odd B ]
+
+ where `G_even` is the gain for green pixels on even rows of the
+ output, and `G_odd` is the gain for green pixels on the odd rows.
+ These may be identical for a given camera device implementation; if
+ the camera device does not support a separate gain for even/odd green
+ channels, it will use the `G_even` value, and write `G_odd` equal to
+ `G_even` in the output result metadata.
+
+ The matrices for color transforms are defined as a 9-entry vector:
+
+ android.colorCorrection.transform = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ]
+
+ which define a transform from input sensor colors, `P_in = [ r g b ]`,
+ to output linear sRGB, `P_out = [ r' g' b' ]`,
+
+ with colors as follows:
+
+ r' = I0r + I1g + I2b
+ g' = I3r + I4g + I5b
+ b' = I6r + I7g + I8b
+
+ Both the input and output value ranges must match. Overflow/underflow
+ values are clipped to fit within the range.
+ </details>
+ <hal_details>
+ HAL must support both FAST and HIGH_QUALITY if color correction control is available
+ on the camera device, but the underlying implementation can be the same for both modes.
+ That is, if the highest quality implementation on the camera device does not slow down
+ capture rate, then FAST and HIGH_QUALITY should generate the same output.
+ </hal_details>
+ </entry>
+ <entry name="transform" type="rational" visibility="public"
+ type_notes="3x3 rational matrix in row-major order"
+ container="array" typedef="colorSpaceTransform" hwlevel="full">
+ <array>
+ <size>3</size>
+ <size>3</size>
+ </array>
+ <description>A color transform matrix to use to transform
+ from sensor RGB color space to output linear sRGB color space.
+ </description>
+ <units>Unitless scale factors</units>
+ <details>This matrix is either set by the camera device when the request
+ android.colorCorrection.mode is not TRANSFORM_MATRIX, or
+ directly by the application in the request when the
+ android.colorCorrection.mode is TRANSFORM_MATRIX.
+
+ In the latter case, the camera device may round the matrix to account
+ for precision issues; the final rounded matrix should be reported back
+ in this matrix result metadata. The transform should keep the magnitude
+ of the output color values within `[0, 1.0]` (assuming input color
+ values is within the normalized range `[0, 1.0]`), or clipping may occur.
+
+ The valid range of each matrix element varies on different devices, but
+ values within [-1.5, 3.0] are guaranteed not to be clipped.
+ </details>
+ </entry>
+ <entry name="gains" type="float" visibility="public"
+ type_notes="A 1D array of floats for 4 color channel gains"
+ container="array" typedef="rggbChannelVector" hwlevel="full">
+ <array>
+ <size>4</size>
+ </array>
+ <description>Gains applying to Bayer raw color channels for
+ white-balance.</description>
+ <units>Unitless gain factors</units>
+ <details>
+ These per-channel gains are either set by the camera device
+ when the request android.colorCorrection.mode is not
+ TRANSFORM_MATRIX, or directly by the application in the
+ request when the android.colorCorrection.mode is
+ TRANSFORM_MATRIX.
+
+ The gains in the result metadata are the gains actually
+ applied by the camera device to the current frame.
+
+ The valid range of gains varies on different devices, but gains
+ between [1.0, 3.0] are guaranteed not to be clipped. Even if a given
+ device allows gains below 1.0, this is usually not recommended because
+ this can create color artifacts.
+ </details>
+ <hal_details>
+ The 4-channel white-balance gains are defined in
+ the order of `[R G_even G_odd B]`, where `G_even` is the gain
+ for green pixels on even rows of the output, and `G_odd`
+ is the gain for green pixels on the odd rows.
+
+ If a HAL does not support a separate gain for even/odd green
+ channels, it must use the `G_even` value, and write
+ `G_odd` equal to `G_even` in the output result metadata.
+ </hal_details>
+ </entry>
+ <entry name="aberrationMode" type="byte" visibility="public" enum="true" hwlevel="legacy">
+ <enum>
+ <value>OFF
+ <notes>
+ No aberration correction is applied.
+ </notes>
+ </value>
+ <value>FAST
+ <notes>
+ Aberration correction will not slow down capture rate
+ relative to sensor raw output.
+ </notes>
+ </value>
+ <value>HIGH_QUALITY
+ <notes>
+ Aberration correction operates at improved quality but the capture rate might be
+ reduced (relative to sensor raw output rate)
+ </notes>
+ </value>
+ </enum>
+ <description>
+ Mode of operation for the chromatic aberration correction algorithm.
+ </description>
+ <range>android.colorCorrection.availableAberrationModes</range>
+ <details>
+ Chromatic (color) aberration is caused by the fact that different wavelengths of light
+ can not focus on the same point after exiting from the lens. This metadata defines
+ the high level control of chromatic aberration correction algorithm, which aims to
+ minimize the chromatic artifacts that may occur along the object boundaries in an
+ image.
+
+ FAST/HIGH_QUALITY both mean that camera device determined aberration
+ correction will be applied. HIGH_QUALITY mode indicates that the camera device will
+ use the highest-quality aberration correction algorithms, even if it slows down
+ capture rate. FAST means the camera device will not slow down capture rate when
+ applying aberration correction.
+
+ LEGACY devices will always be in FAST mode.
+ </details>
+ </entry>
+ </controls>
+ <dynamic>
+ <clone entry="android.colorCorrection.mode" kind="controls">
+ </clone>
+ <clone entry="android.colorCorrection.transform" kind="controls">
+ </clone>
+ <clone entry="android.colorCorrection.gains" kind="controls">
+ </clone>
+ <clone entry="android.colorCorrection.aberrationMode" kind="controls">
+ </clone>
+ </dynamic>
+ <static>
+ <entry name="availableAberrationModes" type="byte" visibility="public"
+ type_notes="list of enums" container="array" typedef="enumList" hwlevel="legacy">
+ <array>
+ <size>n</size>
+ </array>
+ <description>
+ List of aberration correction modes for android.colorCorrection.aberrationMode that are
+ supported by this camera device.
+ </description>
+ <range>Any value listed in android.colorCorrection.aberrationMode</range>
+ <details>
+ This key lists the valid modes for android.colorCorrection.aberrationMode. If no
+ aberration correction modes are available for a device, this list will solely include
+ OFF mode. All camera devices will support either OFF or FAST mode.
+
+ Camera devices that support the MANUAL_POST_PROCESSING capability will always list
+ OFF mode. This includes all FULL level devices.
+
+ LEGACY devices will always only support FAST mode.
+ </details>
+ <hal_details>
+ HAL must support both FAST and HIGH_QUALITY if chromatic aberration control is available
+ on the camera device, but the underlying implementation can be the same for both modes.
+ That is, if the highest quality implementation on the camera device does not slow down
+ capture rate, then FAST and HIGH_QUALITY will generate the same output.
+ </hal_details>
+ <tag id="V1" />
+ </entry>
+ </static>
+ </section>
+ <section name="control">
+ <controls>
+ <entry name="aeAntibandingMode" type="byte" visibility="public"
+ enum="true" hwlevel="legacy">
+ <enum>
+ <value>OFF
+ <notes>
+ The camera device will not adjust exposure duration to
+ avoid banding problems.
+ </notes>
+ </value>
+ <value>50HZ
+ <notes>
+ The camera device will adjust exposure duration to
+ avoid banding problems with 50Hz illumination sources.
+ </notes>
+ </value>
+ <value>60HZ
+ <notes>
+ The camera device will adjust exposure duration to
+ avoid banding problems with 60Hz illumination
+ sources.
+ </notes>
+ </value>
+ <value>AUTO
+ <notes>
+ The camera device will automatically adapt its
+ antibanding routine to the current illumination
+ condition. This is the default mode if AUTO is
+ available on given camera device.
+ </notes>
+ </value>
+ </enum>
+ <description>
+ The desired setting for the camera device's auto-exposure
+ algorithm's antibanding compensation.
+ </description>
+ <range>
+ android.control.aeAvailableAntibandingModes
+ </range>
+ <details>
+ Some kinds of lighting fixtures, such as some fluorescent
+ lights, flicker at the rate of the power supply frequency
+ (60Hz or 50Hz, depending on country). While this is
+ typically not noticeable to a person, it can be visible to
+ a camera device. If a camera sets its exposure time to the
+ wrong value, the flicker may become visible in the
+ viewfinder as flicker or in a final captured image, as a
+ set of variable-brightness bands across the image.
+
+ Therefore, the auto-exposure routines of camera devices
+ include antibanding routines that ensure that the chosen
+ exposure value will not cause such banding. The choice of
+ exposure time depends on the rate of flicker, which the
+ camera device can detect automatically, or the expected
+ rate can be selected by the application using this
+ control.
+
+ A given camera device may not support all of the possible
+ options for the antibanding mode. The
+ android.control.aeAvailableAntibandingModes key contains
+ the available modes for a given camera device.
+
+ AUTO mode is the default if it is available on given
+ camera device. When AUTO mode is not available, the
+ default will be either 50HZ or 60HZ, and both 50HZ
+ and 60HZ will be available.
+
+ If manual exposure control is enabled (by setting
+ android.control.aeMode or android.control.mode to OFF),
+ then this setting has no effect, and the application must
+ ensure it selects exposure times that do not cause banding
+ issues. The android.statistics.sceneFlicker key can assist
+ the application in this.
+ </details>
+ <hal_details>
+ For all capture request templates, this field must be set
+ to AUTO if AUTO mode is available. If AUTO is not available,
+ the default must be either 50HZ or 60HZ, and both 50HZ and
+ 60HZ must be available.
+
+ If manual exposure control is enabled (by setting
+ android.control.aeMode or android.control.mode to OFF),
+ then the exposure values provided by the application must not be
+ adjusted for antibanding.
+ </hal_details>
+ <tag id="BC" />
+ </entry>
+ <entry name="aeExposureCompensation" type="int32" visibility="public" hwlevel="legacy">
+ <description>Adjustment to auto-exposure (AE) target image
+ brightness.</description>
+ <units>Compensation steps</units>
+ <range>android.control.aeCompensationRange</range>
+ <details>
+ The adjustment is measured as a count of steps, with the
+ step size defined by android.control.aeCompensationStep and the
+ allowed range by android.control.aeCompensationRange.
+
+ For example, if the exposure value (EV) step is 0.333, '6'
+ will mean an exposure compensation of +2 EV; -3 will mean an
+ exposure compensation of -1 EV. One EV represents a doubling
+ of image brightness. Note that this control will only be
+ effective if android.control.aeMode `!=` OFF. This control
+ will take effect even when android.control.aeLock `== true`.
+
+ In the event of exposure compensation value being changed, camera device
+ may take several frames to reach the newly requested exposure target.
+ During that time, android.control.aeState field will be in the SEARCHING
+ state. Once the new exposure target is reached, android.control.aeState will
+ change from SEARCHING to either CONVERGED, LOCKED (if AE lock is enabled), or
+ FLASH_REQUIRED (if the scene is too dark for still capture).
+ </details>
+ <tag id="BC" />
+ </entry>
+ <entry name="aeLock" type="byte" visibility="public" enum="true"
+ typedef="boolean" hwlevel="legacy">
+ <enum>
+ <value>OFF
+ <notes>Auto-exposure lock is disabled; the AE algorithm
+ is free to update its parameters.</notes></value>
+ <value>ON
+ <notes>Auto-exposure lock is enabled; the AE algorithm
+ must not update the exposure and sensitivity parameters
+ while the lock is active.
+
+ android.control.aeExposureCompensation setting changes
+ will still take effect while auto-exposure is locked.
+
+ Some rare LEGACY devices may not support
+ this, in which case the value will always be overridden to OFF.
+ </notes></value>
+ </enum>
+ <description>Whether auto-exposure (AE) is currently locked to its latest
+ calculated values.</description>
+ <details>
+ When set to `true` (ON), the AE algorithm is locked to its latest parameters,
+ and will not change exposure settings until the lock is set to `false` (OFF).
+
+ Note that even when AE is locked, the flash may be fired if
+ the android.control.aeMode is ON_AUTO_FLASH /
+ ON_ALWAYS_FLASH / ON_AUTO_FLASH_REDEYE.
+
+ When android.control.aeExposureCompensation is changed, even if the AE lock
+ is ON, the camera device will still adjust its exposure value.
+
+ If AE precapture is triggered (see android.control.aePrecaptureTrigger)
+ when AE is already locked, the camera device will not change the exposure time
+ (android.sensor.exposureTime) and sensitivity (android.sensor.sensitivity)
+ parameters. The flash may be fired if the android.control.aeMode
+ is ON_AUTO_FLASH/ON_AUTO_FLASH_REDEYE and the scene is too dark. If the
+ android.control.aeMode is ON_ALWAYS_FLASH, the scene may become overexposed.
+ Similarly, AE precapture trigger CANCEL has no effect when AE is already locked.
+
+ When an AE precapture sequence is triggered, AE unlock will not be able to unlock
+ the AE if AE is locked by the camera device internally during precapture metering
+ sequence In other words, submitting requests with AE unlock has no effect for an
+ ongoing precapture metering sequence. Otherwise, the precapture metering sequence
+ will never succeed in a sequence of preview requests where AE lock is always set
+ to `false`.
+
+ Since the camera device has a pipeline of in-flight requests, the settings that
+ get locked do not necessarily correspond to the settings that were present in the
+ latest capture result received from the camera device, since additional captures
+ and AE updates may have occurred even before the result was sent out. If an
+ application is switching between automatic and manual control and wishes to eliminate
+ any flicker during the switch, the following procedure is recommended:
+
+ 1. Starting in auto-AE mode:
+ 2. Lock AE
+ 3. Wait for the first result to be output that has the AE locked
+ 4. Copy exposure settings from that result into a request, set the request to manual AE
+ 5. Submit the capture request, proceed to run manual AE as desired.
+
+ See android.control.aeState for AE lock related state transition details.
+ </details>
+ <tag id="BC" />
+ </entry>
+ <entry name="aeMode" type="byte" visibility="public" enum="true" hwlevel="legacy">
+ <enum>
+ <value>OFF
+ <notes>
+ The camera device's autoexposure routine is disabled.
+
+ The application-selected android.sensor.exposureTime,
+ android.sensor.sensitivity and
+ android.sensor.frameDuration are used by the camera
+ device, along with android.flash.* fields, if there's
+ a flash unit for this camera device.
+
+ Note that auto-white balance (AWB) and auto-focus (AF)
+ behavior is device dependent when AE is in OFF mode.
+ To have consistent behavior across different devices,
+ it is recommended to either set AWB and AF to OFF mode
+ or lock AWB and AF before setting AE to OFF.
+ See android.control.awbMode, android.control.afMode,
+ android.control.awbLock, and android.control.afTrigger
+ for more details.
+
+ LEGACY devices do not support the OFF mode and will
+ override attempts to use this value to ON.
+ </notes>
+ </value>
+ <value>ON
+ <notes>
+ The camera device's autoexposure routine is active,
+ with no flash control.
+
+ The application's values for
+ android.sensor.exposureTime,
+ android.sensor.sensitivity, and
+ android.sensor.frameDuration are ignored. The
+ application has control over the various
+ android.flash.* fields.
+ </notes>
+ </value>
+ <value>ON_AUTO_FLASH
+ <notes>
+ Like ON, except that the camera device also controls
+ the camera's flash unit, firing it in low-light
+ conditions.
+
+ The flash may be fired during a precapture sequence
+ (triggered by android.control.aePrecaptureTrigger) and
+ may be fired for captures for which the
+ android.control.captureIntent field is set to
+ STILL_CAPTURE
+ </notes>
+ </value>
+ <value>ON_ALWAYS_FLASH
+ <notes>
+ Like ON, except that the camera device also controls
+ the camera's flash unit, always firing it for still
+ captures.
+
+ The flash may be fired during a precapture sequence
+ (triggered by android.control.aePrecaptureTrigger) and
+ will always be fired for captures for which the
+ android.control.captureIntent field is set to
+ STILL_CAPTURE
+ </notes>
+ </value>
+ <value>ON_AUTO_FLASH_REDEYE
+ <notes>
+ Like ON_AUTO_FLASH, but with automatic red eye
+ reduction.
+
+ If deemed necessary by the camera device, a red eye
+ reduction flash will fire during the precapture
+ sequence.
+ </notes>
+ </value>
+ </enum>
+ <description>The desired mode for the camera device's
+ auto-exposure routine.</description>
+ <range>android.control.aeAvailableModes</range>
+ <details>
+ This control is only effective if android.control.mode is
+ AUTO.
+
+ When set to any of the ON modes, the camera device's
+ auto-exposure routine is enabled, overriding the
+ application's selected exposure time, sensor sensitivity,
+ and frame duration (android.sensor.exposureTime,
+ android.sensor.sensitivity, and
+ android.sensor.frameDuration). If one of the FLASH modes
+ is selected, the camera device's flash unit controls are
+ also overridden.
+
+ The FLASH modes are only available if the camera device
+ has a flash unit (android.flash.info.available is `true`).
+
+ If flash TORCH mode is desired, this field must be set to
+ ON or OFF, and android.flash.mode set to TORCH.
+
+ When set to any of the ON modes, the values chosen by the
+ camera device auto-exposure routine for the overridden
+ fields for a given capture will be available in its
+ CaptureResult.
+ </details>
+ <tag id="BC" />
+ </entry>
+ <entry name="aeRegions" type="int32" visibility="public"
+ optional="true" container="array" typedef="meteringRectangle">
+ <array>
+ <size>5</size>
+ <size>area_count</size>
+ </array>
+ <description>List of metering areas to use for auto-exposure adjustment.</description>
+ <units>Pixel coordinates within android.sensor.info.activeArraySize</units>
+ <range>Coordinates must be between `[(0,0), (width, height))` of
+ android.sensor.info.activeArraySize</range>
+ <details>
+ Not available if android.control.maxRegionsAe is 0.
+ Otherwise will always be present.
+
+ The maximum number of regions supported by the device is determined by the value
+ of android.control.maxRegionsAe.
+
+ The data representation is int[5 * area_count].
+ Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
+ The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+ ymax.
+
+ The coordinate system is based on the active pixel array,
+ with (0,0) being the top-left pixel in the active pixel array, and
+ (android.sensor.info.activeArraySize.width - 1,
+ android.sensor.info.activeArraySize.height - 1) being the
+ bottom-right pixel in the active pixel array.
+
+ The weight must be within `[0, 1000]`, and represents a weight
+ for every pixel in the area. This means that a large metering area
+ with the same weight as a smaller area will have more effect in
+ the metering result. Metering areas can partially overlap and the
+ camera device will add the weights in the overlap region.
+
+ The weights are relative to weights of other exposure metering regions, so if only one
+ region is used, all non-zero weights will have the same effect. A region with 0
+ weight is ignored.
+
+ If all regions have 0 weight, then no specific metering area needs to be used by the
+ camera device.
+
+ If the metering region is outside the used android.scaler.cropRegion returned in
+ capture result metadata, the camera device will ignore the sections outside the crop
+ region and output only the intersection rectangle as the metering region in the result
+ metadata. If the region is entirely outside the crop region, it will be ignored and
+ not reported in the result metadata.
+ </details>
+ <hal_details>
+ The HAL level representation of MeteringRectangle[] is a
+ int[5 * area_count].
+ Every five elements represent a metering region of
+ (xmin, ymin, xmax, ymax, weight).
+ The rectangle is defined to be inclusive on xmin and ymin, but
+ exclusive on xmax and ymax.
+ </hal_details>
+ <tag id="BC" />
+ </entry>
+ <entry name="aeTargetFpsRange" type="int32" visibility="public"
+ container="array" typedef="rangeInt" hwlevel="legacy">
+ <array>
+ <size>2</size>
+ </array>
+ <description>Range over which the auto-exposure routine can
+ adjust the capture frame rate to maintain good
+ exposure.</description>
+ <units>Frames per second (FPS)</units>
+ <range>Any of the entries in android.control.aeAvailableTargetFpsRanges</range>
+ <details>Only constrains auto-exposure (AE) algorithm, not
+ manual control of android.sensor.exposureTime and
+ android.sensor.frameDuration.</details>
+ <tag id="BC" />
+ </entry>
+ <entry name="aePrecaptureTrigger" type="byte" visibility="public"
+ enum="true" hwlevel="limited">
+ <enum>
+ <value>IDLE
+ <notes>The trigger is idle.</notes>
+ </value>
+ <value>START
+ <notes>The precapture metering sequence will be started
+ by the camera device.
+
+ The exact effect of the precapture trigger depends on
+ the current AE mode and state.</notes>
+ </value>
+ <value>CANCEL
+ <notes>The camera device will cancel any currently active or completed
+ precapture metering sequence, the auto-exposure routine will return to its
+ initial state.</notes>
+ </value>
+ </enum>
+ <description>Whether the camera device will trigger a precapture
+ metering sequence when it processes this request.</description>
+ <details>This entry is normally set to IDLE, or is not
+ included at all in the request settings. When included and
+ set to START, the camera device will trigger the auto-exposure (AE)
+ precapture metering sequence.
+
+ When set to CANCEL, the camera device will cancel any active
+ precapture metering trigger, and return to its initial AE state.
+ If a precapture metering sequence is already completed, and the camera
+ device has implicitly locked the AE for subsequent still capture, the
+ CANCEL trigger will unlock the AE and return to its initial AE state.
+
+ The precapture sequence should be triggered before starting a
+ high-quality still capture for final metering decisions to
+ be made, and for firing pre-capture flash pulses to estimate
+ scene brightness and required final capture flash power, when
+ the flash is enabled.
+
+ Normally, this entry should be set to START for only a
+ single request, and the application should wait until the
+ sequence completes before starting a new one.
+
+ When a precapture metering sequence is finished, the camera device
+ may lock the auto-exposure routine internally to be able to accurately expose the
+ subsequent still capture image (`android.control.captureIntent == STILL_CAPTURE`).
+ For this case, the AE may not resume normal scan if no subsequent still capture is
+ submitted. To ensure that the AE routine restarts normal scan, the application should
+ submit a request with `android.control.aeLock == true`, followed by a request
+ with `android.control.aeLock == false`, if the application decides not to submit a
+ still capture request after the precapture sequence completes. Alternatively, for
+ API level 23 or newer devices, the CANCEL can be used to unlock the camera device
+ internally locked AE if the application doesn't submit a still capture request after
+ the AE precapture trigger. Note that, the CANCEL was added in API level 23, and must not
+ be used in devices that have earlier API levels.
+
+ The exact effect of auto-exposure (AE) precapture trigger
+ depends on the current AE mode and state; see
+ android.control.aeState for AE precapture state transition
+ details.
+
+ On LEGACY-level devices, the precapture trigger is not supported;
+ capturing a high-resolution JPEG image will automatically trigger a
+ precapture sequence before the high-resolution capture, including
+ potentially firing a pre-capture flash.
+
+ Using the precapture trigger and the auto-focus trigger android.control.afTrigger
+ simultaneously is allowed. However, since these triggers often require cooperation between
+ the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
+ focus sweep), the camera device may delay acting on a later trigger until the previous
+ trigger has been fully handled. This may lead to longer intervals between the trigger and
+ changes to android.control.aeState indicating the start of the precapture sequence, for
+ example.
+
+ If both the precapture and the auto-focus trigger are activated on the same request, then
+ the camera device will complete them in the optimal order for that device.
+ </details>
+ <hal_details>
+ The HAL must support triggering the AE precapture trigger while an AF trigger is active
+ (and vice versa), or at the same time as the AF trigger. It is acceptable for the HAL to
+ treat these as two consecutive triggers, for example handling the AF trigger and then the
+ AE trigger. Or the HAL may choose to optimize the case with both triggers fired at once,
+ to minimize the latency for converging both focus and exposure/flash usage.
+ </hal_details>
+ <tag id="BC" />
+ </entry>
+ <entry name="afMode" type="byte" visibility="public" enum="true"
+ hwlevel="legacy">
+ <enum>
+ <value>OFF
+ <notes>The auto-focus routine does not control the lens;
+ android.lens.focusDistance is controlled by the
+ application.</notes></value>
+ <value>AUTO
+ <notes>Basic automatic focus mode.
+
+ In this mode, the lens does not move unless
+ the autofocus trigger action is called. When that trigger
+ is activated, AF will transition to ACTIVE_SCAN, then to
+ the outcome of the scan (FOCUSED or NOT_FOCUSED).
+
+ Always supported if lens is not fixed focus.
+
+ Use android.lens.info.minimumFocusDistance to determine if lens
+ is fixed-focus.
+
+ Triggering AF_CANCEL resets the lens position to default,
+ and sets the AF state to INACTIVE.</notes></value>
+ <value>MACRO
+ <notes>Close-up focusing mode.
+
+ In this mode, the lens does not move unless the
+ autofocus trigger action is called. When that trigger is
+ activated, AF will transition to ACTIVE_SCAN, then to
+ the outcome of the scan (FOCUSED or NOT_FOCUSED). This
+ mode is optimized for focusing on objects very close to
+ the camera.
+
+ When that trigger is activated, AF will transition to
+ ACTIVE_SCAN, then to the outcome of the scan (FOCUSED or
+ NOT_FOCUSED). Triggering cancel AF resets the lens
+ position to default, and sets the AF state to
+ INACTIVE.</notes></value>
+ <value>CONTINUOUS_VIDEO
+ <notes>In this mode, the AF algorithm modifies the lens
+ position continually to attempt to provide a
+ constantly-in-focus image stream.
+
+ The focusing behavior should be suitable for good quality
+ video recording; typically this means slower focus
+ movement and no overshoots. When the AF trigger is not
+ involved, the AF algorithm should start in INACTIVE state,
+ and then transition into PASSIVE_SCAN and PASSIVE_FOCUSED
+ states as appropriate. When the AF trigger is activated,
+ the algorithm should immediately transition into
+ AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the
+ lens position until a cancel AF trigger is received.
+
+ Once cancel is received, the algorithm should transition
+ back to INACTIVE and resume passive scan. Note that this
+ behavior is not identical to CONTINUOUS_PICTURE, since an
+ ongoing PASSIVE_SCAN must immediately be
+ canceled.</notes></value>
+ <value>CONTINUOUS_PICTURE
+ <notes>In this mode, the AF algorithm modifies the lens
+ position continually to attempt to provide a
+ constantly-in-focus image stream.
+
+ The focusing behavior should be suitable for still image
+ capture; typically this means focusing as fast as
+ possible. When the AF trigger is not involved, the AF
+ algorithm should start in INACTIVE state, and then
+ transition into PASSIVE_SCAN and PASSIVE_FOCUSED states as
+ appropriate as it attempts to maintain focus. When the AF
+ trigger is activated, the algorithm should finish its
+ PASSIVE_SCAN if active, and then transition into
+ AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the
+ lens position until a cancel AF trigger is received.
+
+ When the AF cancel trigger is activated, the algorithm
+ should transition back to INACTIVE and then act as if it
+ has just been started.</notes></value>
+ <value>EDOF
+ <notes>Extended depth of field (digital focus) mode.
+
+ The camera device will produce images with an extended
+ depth of field automatically; no special focusing
+ operations need to be done before taking a picture.
+
+ AF triggers are ignored, and the AF state will always be
+ INACTIVE.</notes></value>
+ </enum>
+ <description>Whether auto-focus (AF) is currently enabled, and what
+ mode it is set to.</description>
+ <range>android.control.afAvailableModes</range>
+ <details>Only effective if android.control.mode = AUTO and the lens is not fixed focus
+ (i.e. `android.lens.info.minimumFocusDistance > 0`). Also note that
+ when android.control.aeMode is OFF, the behavior of AF is device
+ dependent. It is recommended to lock AF by using android.control.afTrigger before
+ setting android.control.aeMode to OFF, or set AF mode to OFF when AE is OFF.
+
+ If the lens is controlled by the camera device auto-focus algorithm,
+ the camera device will report the current AF status in android.control.afState
+ in result metadata.</details>
+ <hal_details>
+ When afMode is AUTO or MACRO, the lens must not move until an AF trigger is sent in a
+ request (android.control.afTrigger `==` START). After an AF trigger, the afState will end
+ up with either FOCUSED_LOCKED or NOT_FOCUSED_LOCKED state (see
+ android.control.afState for detailed state transitions), which indicates that the lens is
+ locked and will not move. If camera movement (e.g. tilting camera) causes the lens to move
+ after the lens is locked, the HAL must compensate this movement appropriately such that
+ the same focal plane remains in focus.
+
+ When afMode is one of the continuous auto focus modes, the HAL is free to start a AF
+ scan whenever it's not locked. When the lens is locked after an AF trigger
+ (see android.control.afState for detailed state transitions), the HAL should maintain the
+ same lock behavior as above.
+
+ When afMode is OFF, the application controls focus manually. The accuracy of the
+ focus distance control depends on the android.lens.info.focusDistanceCalibration.
+ However, the lens must not move regardless of the camera movement for any focus distance
+ manual control.
+
+ To put this in concrete terms, if the camera has lens elements which may move based on
+ camera orientation or motion (e.g. due to gravity), then the HAL must drive the lens to
+ remain in a fixed position invariant to the camera's orientation or motion, for example,
+ by using accelerometer measurements in the lens control logic. This is a typical issue
+ that will arise on camera modules with open-loop VCMs.
+ </hal_details>
+ <tag id="BC" />
+ </entry>
+ <entry name="afRegions" type="int32" visibility="public"
+ optional="true" container="array" typedef="meteringRectangle">
+ <array>
+ <size>5</size>
+ <size>area_count</size>
+ </array>
+ <description>List of metering areas to use for auto-focus.</description>
+ <units>Pixel coordinates within android.sensor.info.activeArraySize</units>
+ <range>Coordinates must be between `[(0,0), (width, height))` of
+ android.sensor.info.activeArraySize</range>
+ <details>
+ Not available if android.control.maxRegionsAf is 0.
+ Otherwise will always be present.
+
+ The maximum number of focus areas supported by the device is determined by the value
+ of android.control.maxRegionsAf.
+
+ The data representation is int[5 * area_count].
+ Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
+ The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+ ymax.
+
+ The coordinate system is based on the active pixel array,
+ with (0,0) being the top-left pixel in the active pixel array, and
+ (android.sensor.info.activeArraySize.width - 1,
+ android.sensor.info.activeArraySize.height - 1) being the
+ bottom-right pixel in the active pixel array.
+
+ The weight must be within `[0, 1000]`, and represents a weight
+ for every pixel in the area. This means that a large metering area
+ with the same weight as a smaller area will have more effect in
+ the metering result. Metering areas can partially overlap and the
+ camera device will add the weights in the overlap region.
+
+ The weights are relative to weights of other metering regions, so if only one region
+ is used, all non-zero weights will have the same effect. A region with 0 weight is
+ ignored.
+
+ If all regions have 0 weight, then no specific metering area needs to be used by the
+ camera device.
+
+ If the metering region is outside the used android.scaler.cropRegion returned in
+ capture result metadata, the camera device will ignore the sections outside the crop
+ region and output only the intersection rectangle as the metering region in the result
+ metadata. If the region is entirely outside the crop region, it will be ignored and
+ not reported in the result metadata.
+ </details>
+ <hal_details>
+ The HAL level representation of MeteringRectangle[] is a
+ int[5 * area_count].
+ Every five elements represent a metering region of
+ (xmin, ymin, xmax, ymax, weight).
+ The rectangle is defined to be inclusive on xmin and ymin, but
+ exclusive on xmax and ymax.
+ </hal_details>
+ <tag id="BC" />
+ </entry>
+ <entry name="afTrigger" type="byte" visibility="public" enum="true"
+ hwlevel="legacy">
+ <enum>
+ <value>IDLE
+ <notes>The trigger is idle.</notes>
+ </value>
+ <value>START
+ <notes>Autofocus will trigger now.</notes>
+ </value>
+ <value>CANCEL
+ <notes>Autofocus will return to its initial
+ state, and cancel any currently active trigger.</notes>
+ </value>
+ </enum>
+ <description>
+ Whether the camera device will trigger autofocus for this request.
+ </description>
+ <details>This entry is normally set to IDLE, or is not
+ included at all in the request settings.
+
+ When included and set to START, the camera device will trigger the
+ autofocus algorithm. If autofocus is disabled, this trigger has no effect.
+
+ When set to CANCEL, the camera device will cancel any active trigger,
+ and return to its initial AF state.
+
+ Generally, applications should set this entry to START or CANCEL for only a
+ single capture, and then return it to IDLE (or not set at all). Specifying
+ START for multiple captures in a row means restarting the AF operation over
+ and over again.
+
+ See android.control.afState for what the trigger means for each AF mode.
+
+ Using the autofocus trigger and the precapture trigger android.control.aePrecaptureTrigger
+ simultaneously is allowed. However, since these triggers often require cooperation between
+ the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
+ focus sweep), the camera device may delay acting on a later trigger until the previous
+ trigger has been fully handled. This may lead to longer intervals between the trigger and
+ changes to android.control.afState, for example.
+ </details>
+ <hal_details>
+ The HAL must support triggering the AF trigger while an AE precapture trigger is active
+ (and vice versa), or at the same time as the AE trigger. It is acceptable for the HAL to
+ treat these as two consecutive triggers, for example handling the AF trigger and then the
+ AE trigger. Or the HAL may choose to optimize the case with both triggers fired at once,
+ to minimize the latency for converging both focus and exposure/flash usage.
+ </hal_details>
+ <tag id="BC" />
+ </entry>
+ <entry name="awbLock" type="byte" visibility="public" enum="true"
+ typedef="boolean" hwlevel="legacy">
+ <enum>
+ <value>OFF
+ <notes>Auto-white balance lock is disabled; the AWB
+ algorithm is free to update its parameters if in AUTO
+ mode.</notes></value>
+ <value>ON
+ <notes>Auto-white balance lock is enabled; the AWB
+ algorithm will not update its parameters while the lock
+ is active.</notes></value>
+ </enum>
+ <description>Whether auto-white balance (AWB) is currently locked to its
+ latest calculated values.</description>
+ <details>
+ When set to `true` (ON), the AWB algorithm is locked to its latest parameters,
+ and will not change color balance settings until the lock is set to `false` (OFF).
+
+ Since the camera device has a pipeline of in-flight requests, the settings that
+ get locked do not necessarily correspond to the settings that were present in the
+ latest capture result received from the camera device, since additional captures
+ and AWB updates may have occurred even before the result was sent out. If an
+ application is switching between automatic and manual control and wishes to eliminate
+ any flicker during the switch, the following procedure is recommended:
+
+ 1. Starting in auto-AWB mode:
+ 2. Lock AWB
+ 3. Wait for the first result to be output that has the AWB locked
+ 4. Copy AWB settings from that result into a request, set the request to manual AWB
+ 5. Submit the capture request, proceed to run manual AWB as desired.
+
+ Note that AWB lock is only meaningful when
+ android.control.awbMode is in the AUTO mode; in other modes,
+ AWB is already fixed to a specific setting.
+
+ Some LEGACY devices may not support ON; the value is then overridden to OFF.
+ </details>
+ <tag id="BC" />
+ </entry>
+ <entry name="awbMode" type="byte" visibility="public" enum="true"
+ hwlevel="legacy">
+ <enum>
+ <value>OFF
+ <notes>
+ The camera device's auto-white balance routine is disabled.
+
+ The application-selected color transform matrix
+ (android.colorCorrection.transform) and gains
+ (android.colorCorrection.gains) are used by the camera
+ device for manual white balance control.
+ </notes>
+ </value>
+ <value>AUTO
+ <notes>
+ The camera device's auto-white balance routine is active.
+
+ The application's values for android.colorCorrection.transform
+ and android.colorCorrection.gains are ignored.
+ For devices that support the MANUAL_POST_PROCESSING capability, the
+ values used by the camera device for the transform and gains
+ will be available in the capture result for this request.
+ </notes>
+ </value>
+ <value>INCANDESCENT
+ <notes>
+ The camera device's auto-white balance routine is disabled;
+ the camera device uses incandescent light as the assumed scene
+ illumination for white balance.
+
+ While the exact white balance transforms are up to the
+ camera device, they will approximately match the CIE
+ standard illuminant A.
+
+ The application's values for android.colorCorrection.transform
+ and android.colorCorrection.gains are ignored.
+ For devices that support the MANUAL_POST_PROCESSING capability, the
+ values used by the camera device for the transform and gains
+ will be available in the capture result for this request.
+ </notes>
+ </value>
+ <value>FLUORESCENT
+ <notes>
+ The camera device's auto-white balance routine is disabled;
+ the camera device uses fluorescent light as the assumed scene
+ illumination for white balance.
+
+ While the exact white balance transforms are up to the
+ camera device, they will approximately match the CIE
+ standard illuminant F2.
+
+ The application's values for android.colorCorrection.transform
+ and android.colorCorrection.gains are ignored.
+ For devices that support the MANUAL_POST_PROCESSING capability, the
+ values used by the camera device for the transform and gains
+ will be available in the capture result for this request.
+ </notes>
+ </value>
+ <value>WARM_FLUORESCENT
+ <notes>
+ The camera device's auto-white balance routine is disabled;
+ the camera device uses warm fluorescent light as the assumed scene
+ illumination for white balance.
+
+ While the exact white balance transforms are up to the
+ camera device, they will approximately match the CIE
+ standard illuminant F4.
+
+ The application's values for android.colorCorrection.transform
+ and android.colorCorrection.gains are ignored.
+ For devices that support the MANUAL_POST_PROCESSING capability, the
+ values used by the camera device for the transform and gains
+ will be available in the capture result for this request.
+ </notes>
+ </value>
+ <value>DAYLIGHT
+ <notes>
+ The camera device's auto-white balance routine is disabled;
+ the camera device uses daylight light as the assumed scene
+ illumination for white balance.
+
+ While the exact white balance transforms are up to the
+ camera device, they will approximately match the CIE
+ standard illuminant D65.
+
+ The application's values for android.colorCorrection.transform
+ and android.colorCorrection.gains are ignored.
+ For devices that support the MANUAL_POST_PROCESSING capability, the
+ values used by the camera device for the transform and gains
+ will be available in the capture result for this request.
+ </notes>
+ </value>
+ <value>CLOUDY_DAYLIGHT
+ <notes>
+ The camera device's auto-white balance routine is disabled;
+ the camera device uses cloudy daylight light as the assumed scene
+ illumination for white balance.
+
+ The application's values for android.colorCorrection.transform
+ and android.colorCorrection.gains are ignored.
+ For devices that support the MANUAL_POST_PROCESSING capability, the
+ values used by the camera device for the transform and gains
+ will be available in the capture result for this request.
+ </notes>
+ </value>
+ <value>TWILIGHT
+ <notes>
+ The camera device's auto-white balance routine is disabled;
+ the camera device uses twilight light as the assumed scene
+ illumination for white balance.
+
+ The application's values for android.colorCorrection.transform
+ and android.colorCorrection.gains are ignored.
+ For devices that support the MANUAL_POST_PROCESSING capability, the
+ values used by the camera device for the transform and gains
+ will be available in the capture result for this request.
+ </notes>
+ </value>
+ <value>SHADE
+ <notes>
+ The camera device's auto-white balance routine is disabled;
+ the camera device uses shade light as the assumed scene
+ illumination for white balance.
+
+ The application's values for android.colorCorrection.transform
+ and android.colorCorrection.gains are ignored.
+ For devices that support the MANUAL_POST_PROCESSING capability, the
+ values used by the camera device for the transform and gains
+ will be available in the capture result for this request.
+ </notes>
+ </value>
+ </enum>
+ <description>Whether auto-white balance (AWB) is currently setting the color
+ transform fields, and what its illumination target
+ is.</description>
+ <range>android.control.awbAvailableModes</range>
+ <details>
+ This control is only effective if android.control.mode is AUTO.
+
+ When set to the ON mode, the camera device's auto-white balance
+ routine is enabled, overriding the application's selected
+ android.colorCorrection.transform, android.colorCorrection.gains and
+ android.colorCorrection.mode. Note that when android.control.aeMode
+ is OFF, the behavior of AWB is device dependent. It is recommened to
+ also set AWB mode to OFF or lock AWB by using android.control.awbLock before
+ setting AE mode to OFF.
+
+ When set to the OFF mode, the camera device's auto-white balance
+ routine is disabled. The application manually controls the white
+ balance by android.colorCorrection.transform, android.colorCorrection.gains
+ and android.colorCorrection.mode.
+
+ When set to any other modes, the camera device's auto-white
+ balance routine is disabled. The camera device uses each
+ particular illumination target for white balance
+ adjustment. The application's values for
+ android.colorCorrection.transform,
+ android.colorCorrection.gains and
+ android.colorCorrection.mode are ignored.
+ </details>
+ <tag id="BC" />
+ </entry>
+ <entry name="awbRegions" type="int32" visibility="public"
+ optional="true" container="array" typedef="meteringRectangle">
+ <array>
+ <size>5</size>
+ <size>area_count</size>
+ </array>
+ <description>List of metering areas to use for auto-white-balance illuminant
+ estimation.</description>
+ <units>Pixel coordinates within android.sensor.info.activeArraySize</units>
+ <range>Coordinates must be between `[(0,0), (width, height))` of
+ android.sensor.info.activeArraySize</range>
+ <details>
+ Not available if android.control.maxRegionsAwb is 0.
+ Otherwise will always be present.
+
+ The maximum number of regions supported by the device is determined by the value
+ of android.control.maxRegionsAwb.
+
+ The data representation is int[5 * area_count].
+ Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
+ The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+ ymax.
+
+ The coordinate system is based on the active pixel array,
+ with (0,0) being the top-left pixel in the active pixel array, and
+ (android.sensor.info.activeArraySize.width - 1,
+ android.sensor.info.activeArraySize.height - 1) being the
+ bottom-right pixel in the active pixel array.
+
+ The weight must range from 0 to 1000, and represents a weight
+ for every pixel in the area. This means that a large metering area
+ with the same weight as a smaller area will have more effect in
+ the metering result. Metering areas can partially overlap and the
+ camera device will add the weights in the overlap region.
+
+ The weights are relative to weights of other white balance metering regions, so if
+ only one region is used, all non-zero weights will have the same effect. A region with
+ 0 weight is ignored.
+
+ If all regions have 0 weight, then no specific metering area needs to be used by the
+ camera device.
+
+ If the metering region is outside the used android.scaler.cropRegion returned in
+ capture result metadata, the camera device will ignore the sections outside the crop
+ region and output only the intersection rectangle as the metering region in the result
+ metadata. If the region is entirely outside the crop region, it will be ignored and
+ not reported in the result metadata.
+ </details>
+ <hal_details>
+ The HAL level representation of MeteringRectangle[] is a
+ int[5 * area_count].
+ Every five elements represent a metering region of
+ (xmin, ymin, xmax, ymax, weight).
+ The rectangle is defined to be inclusive on xmin and ymin, but
+ exclusive on xmax and ymax.
+ </hal_details>
+ <tag id="BC" />
+ </entry>
+ <entry name="captureIntent" type="byte" visibility="public" enum="true"
+ hwlevel="legacy">
+ <enum>
+ <value>CUSTOM
+ <notes>The goal of this request doesn't fall into the other
+ categories. The camera device will default to preview-like
+ behavior.</notes></value>
+ <value>PREVIEW
+ <notes>This request is for a preview-like use case.
+
+ The precapture trigger may be used to start off a metering
+ w/flash sequence.
+ </notes></value>
+ <value>STILL_CAPTURE
+ <notes>This request is for a still capture-type
+ use case.
+
+ If the flash unit is under automatic control, it may fire as needed.
+ </notes></value>
+ <value>VIDEO_RECORD
+ <notes>This request is for a video recording
+ use case.</notes></value>
+ <value>VIDEO_SNAPSHOT
+ <notes>This request is for a video snapshot (still
+ image while recording video) use case.
+
+ The camera device should take the highest-quality image
+ possible (given the other settings) without disrupting the
+ frame rate of video recording. </notes></value>
+ <value>ZERO_SHUTTER_LAG
+ <notes>This request is for a ZSL usecase; the
+ application will stream full-resolution images and
+ reprocess one or several later for a final
+ capture.
+ </notes></value>
+ <value>MANUAL
+ <notes>This request is for manual capture use case where
+ the applications want to directly control the capture parameters.
+
+ For example, the application may wish to manually control
+ android.sensor.exposureTime, android.sensor.sensitivity, etc.
+ </notes></value>
+ </enum>
+ <description>Information to the camera device 3A (auto-exposure,
+ auto-focus, auto-white balance) routines about the purpose
+ of this capture, to help the camera device to decide optimal 3A
+ strategy.</description>
+ <details>This control (except for MANUAL) is only effective if
+ `android.control.mode != OFF` and any 3A routine is active.
+
+ ZERO_SHUTTER_LAG will be supported if android.request.availableCapabilities
+ contains PRIVATE_REPROCESSING or YUV_REPROCESSING. MANUAL will be supported if
+ android.request.availableCapabilities contains MANUAL_SENSOR. Other intent values are
+ always supported.
+ </details>
+ <tag id="BC" />
+ </entry>
+ <entry name="effectMode" type="byte" visibility="public" enum="true"
+ hwlevel="legacy">
+ <enum>
+ <value>OFF
+ <notes>
+ No color effect will be applied.
+ </notes>
+ </value>
+ <value optional="true">MONO
+ <notes>
+ A "monocolor" effect where the image is mapped into
+ a single color.
+
+ This will typically be grayscale.
+ </notes>
+ </value>
+ <value optional="true">NEGATIVE
+ <notes>
+ A "photo-negative" effect where the image's colors
+ are inverted.
+ </notes>
+ </value>
+ <value optional="true">SOLARIZE
+ <notes>
+ A "solarisation" effect (Sabattier effect) where the
+ image is wholly or partially reversed in
+ tone.
+ </notes>
+ </value>
+ <value optional="true">SEPIA
+ <notes>
+ A "sepia" effect where the image is mapped into warm
+ gray, red, and brown tones.
+ </notes>
+ </value>
+ <value optional="true">POSTERIZE
+ <notes>
+ A "posterization" effect where the image uses
+ discrete regions of tone rather than a continuous
+ gradient of tones.
+ </notes>
+ </value>
+ <value optional="true">WHITEBOARD
+ <notes>
+ A "whiteboard" effect where the image is typically displayed
+ as regions of white, with black or grey details.
+ </notes>
+ </value>
+ <value optional="true">BLACKBOARD
+ <notes>
+ A "blackboard" effect where the image is typically displayed
+ as regions of black, with white or grey details.
+ </notes>
+ </value>
+ <value optional="true">AQUA
+ <notes>
+ An "aqua" effect where a blue hue is added to the image.
+ </notes>
+ </value>
+ </enum>
+ <description>A special color effect to apply.</description>
+ <range>android.control.availableEffects</range>
+ <details>
+ When this mode is set, a color effect will be applied
+ to images produced by the camera device. The interpretation
+ and implementation of these color effects is left to the
+ implementor of the camera device, and should not be
+ depended on to be consistent (or present) across all
+ devices.
+ </details>
+ <tag id="BC" />
+ </entry>
+ <entry name="mode" type="byte" visibility="public" enum="true"
+ hwlevel="legacy">
+ <enum>
+ <value>OFF
+ <notes>Full application control of pipeline.
+
+ All control by the device's metering and focusing (3A)
+ routines is disabled, and no other settings in
+ android.control.* have any effect, except that
+ android.control.captureIntent may be used by the camera
+ device to select post-processing values for processing
+ blocks that do not allow for manual control, or are not
+ exposed by the camera API.
+
+ However, the camera device's 3A routines may continue to
+ collect statistics and update their internal state so that
+ when control is switched to AUTO mode, good control values
+ can be immediately applied.
+ </notes></value>
+ <value>AUTO
+ <notes>Use settings for each individual 3A routine.
+
+ Manual control of capture parameters is disabled. All
+ controls in android.control.* besides sceneMode take
+ effect.</notes></value>
+ <value optional="true">USE_SCENE_MODE
+ <notes>Use a specific scene mode.
+
+ Enabling this disables control.aeMode, control.awbMode and
+ control.afMode controls; the camera device will ignore
+ those settings while USE_SCENE_MODE is active (except for
+ FACE_PRIORITY scene mode). Other control entries are still active.
+ This setting can only be used if scene mode is supported (i.e.
+ android.control.availableSceneModes
+ contain some modes other than DISABLED).</notes></value>
+ <value optional="true">OFF_KEEP_STATE
+ <notes>Same as OFF mode, except that this capture will not be
+ used by camera device background auto-exposure, auto-white balance and
+ auto-focus algorithms (3A) to update their statistics.
+
+ Specifically, the 3A routines are locked to the last
+ values set from a request with AUTO, OFF, or
+ USE_SCENE_MODE, and any statistics or state updates
+ collected from manual captures with OFF_KEEP_STATE will be
+ discarded by the camera device.
+ </notes></value>
+ </enum>
+ <description>Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control
+ routines.</description>
+ <range>android.control.availableModes</range>
+ <details>
+ This is a top-level 3A control switch. When set to OFF, all 3A control
+ by the camera device is disabled. The application must set the fields for
+ capture parameters itself.
+
+ When set to AUTO, the individual algorithm controls in
+ android.control.* are in effect, such as android.control.afMode.
+
+ When set to USE_SCENE_MODE, the individual controls in
+ android.control.* are mostly disabled, and the camera device implements
+ one of the scene mode settings (such as ACTION, SUNSET, or PARTY)
+ as it wishes. The camera device scene mode 3A settings are provided by
+ capture results {@link ACameraMetadata} from
+ {@link ACameraCaptureSession_captureCallback_result}.
+
+ When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference
+ is that this frame will not be used by camera device background 3A statistics
+ update, as if this frame is never captured. This mode can be used in the scenario
+ where the application doesn't want a 3A manual control capture to affect
+ the subsequent auto 3A capture results.
+ </details>
+ <tag id="BC" />
+ </entry>
+ <entry name="sceneMode" type="byte" visibility="public" enum="true"
+ hwlevel="legacy">
+ <enum>
+ <value id="0">DISABLED
+ <notes>
+ Indicates that no scene modes are set for a given capture request.
+ </notes>
+ </value>
+ <value>FACE_PRIORITY
+ <notes>If face detection support exists, use face
+ detection data for auto-focus, auto-white balance, and
+ auto-exposure routines.
+
+ If face detection statistics are disabled
+ (i.e. android.statistics.faceDetectMode is set to OFF),
+ this should still operate correctly (but will not return
+ face detection statistics to the framework).
+
+ Unlike the other scene modes, android.control.aeMode,
+ android.control.awbMode, and android.control.afMode
+ remain active when FACE_PRIORITY is set.
+ </notes>
+ </value>
+ <value optional="true">ACTION
+ <notes>
+ Optimized for photos of quickly moving objects.
+
+ Similar to SPORTS.
+ </notes>
+ </value>
+ <value optional="true">PORTRAIT
+ <notes>
+ Optimized for still photos of people.
+ </notes>
+ </value>
+ <value optional="true">LANDSCAPE
+ <notes>
+ Optimized for photos of distant macroscopic objects.
+ </notes>
+ </value>
+ <value optional="true">NIGHT
+ <notes>
+ Optimized for low-light settings.
+ </notes>
+ </value>
+ <value optional="true">NIGHT_PORTRAIT
+ <notes>
+ Optimized for still photos of people in low-light
+ settings.
+ </notes>
+ </value>
+ <value optional="true">THEATRE
+ <notes>
+ Optimized for dim, indoor settings where flash must
+ remain off.
+ </notes>
+ </value>
+ <value optional="true">BEACH
+ <notes>
+ Optimized for bright, outdoor beach settings.
+ </notes>
+ </value>
+ <value optional="true">SNOW
+ <notes>
+ Optimized for bright, outdoor settings containing snow.
+ </notes>
+ </value>
+ <value optional="true">SUNSET
+ <notes>
+ Optimized for scenes of the setting sun.
+ </notes>
+ </value>
+ <value optional="true">STEADYPHOTO
+ <notes>
+ Optimized to avoid blurry photos due to small amounts of
+ device motion (for example: due to hand shake).
+ </notes>
+ </value>
+ <value optional="true">FIREWORKS
+ <notes>
+ Optimized for nighttime photos of fireworks.
+ </notes>
+ </value>
+ <value optional="true">SPORTS
+ <notes>
+ Optimized for photos of quickly moving people.
+
+ Similar to ACTION.
+ </notes>
+ </value>
+ <value optional="true">PARTY
+ <notes>
+ Optimized for dim, indoor settings with multiple moving
+ people.
+ </notes>
+ </value>
+ <value optional="true">CANDLELIGHT
+ <notes>
+ Optimized for dim settings where the main light source
+ is a flame.
+ </notes>
+ </value>
+ <value optional="true">BARCODE
+ <notes>
+ Optimized for accurately capturing a photo of barcode
+ for use by camera applications that wish to read the
+ barcode value.
+ </notes>
+ </value>
+ <value deprecated="true" optional="true" ndk_hidden="true">HIGH_SPEED_VIDEO
+ <notes>
+ This is deprecated, please use {@link
+ android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}
+ and {@link
+ android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
+ for high speed video recording.
+
+ Optimized for high speed video recording (frame rate >=60fps) use case.
+
+ The supported high speed video sizes and fps ranges are specified in
+ android.control.availableHighSpeedVideoConfigurations. To get desired
+ output frame rates, the application is only allowed to select video size
+ and fps range combinations listed in this static metadata. The fps range
+ can be control via android.control.aeTargetFpsRange.
+
+ In this mode, the camera device will override aeMode, awbMode, and afMode to
+ ON, ON, and CONTINUOUS_VIDEO, respectively. All post-processing block mode
+ controls will be overridden to be FAST. Therefore, no manual control of capture
+ and post-processing parameters is possible. All other controls operate the
+ same as when android.control.mode == AUTO. This means that all other
+ android.control.* fields continue to work, such as
+
+ * android.control.aeTargetFpsRange
+ * android.control.aeExposureCompensation
+ * android.control.aeLock
+ * android.control.awbLock
+ * android.control.effectMode
+ * android.control.aeRegions
+ * android.control.afRegions
+ * android.control.awbRegions
+ * android.control.afTrigger
+ * android.control.aePrecaptureTrigger
+
+ Outside of android.control.*, the following controls will work:
+
+ * android.flash.mode (automatic flash for still capture will not work since aeMode is ON)
+ * android.lens.opticalStabilizationMode (if it is supported)
+ * android.scaler.cropRegion
+ * android.statistics.faceDetectMode
+
+ For high speed recording use case, the actual maximum supported frame rate may
+ be lower than what camera can output, depending on the destination Surfaces for
+ the image data. For example, if the destination surface is from video encoder,
+ the application need check if the video encoder is capable of supporting the
+ high frame rate for a given video size, or it will end up with lower recording
+ frame rate. If the destination surface is from preview window, the preview frame
+ rate will be bounded by the screen refresh rate.
+
+ The camera device will only support up to 2 output high speed streams
+ (processed non-stalling format defined in android.request.maxNumOutputStreams)
+ in this mode. This control will be effective only if all of below conditions are true:
+
+ * The application created no more than maxNumHighSpeedStreams processed non-stalling
+ format output streams, where maxNumHighSpeedStreams is calculated as
+ min(2, android.request.maxNumOutputStreams[Processed (but not-stalling)]).
+ * The stream sizes are selected from the sizes reported by
+ android.control.availableHighSpeedVideoConfigurations.
+ * No processed non-stalling or raw streams are configured.
+
+ When above conditions are NOT satistied, the controls of this mode and
+ android.control.aeTargetFpsRange will be ignored by the camera device,
+ the camera device will fall back to android.control.mode `==` AUTO,
+ and the returned capture result metadata will give the fps range choosen
+ by the camera device.
+
+ Switching into or out of this mode may trigger some camera ISP/sensor
+ reconfigurations, which may introduce extra latency. It is recommended that
+ the application avoids unnecessary scene mode switch as much as possible.
+ </notes>
+ </value>
+ <value optional="true">HDR
+ <notes>
+ Turn on a device-specific high dynamic range (HDR) mode.
+
+ In this scene mode, the camera device captures images
+ that keep a larger range of scene illumination levels
+ visible in the final image. For example, when taking a
+ picture of a object in front of a bright window, both
+ the object and the scene through the window may be
+ visible when using HDR mode, while in normal AUTO mode,
+ one or the other may be poorly exposed. As a tradeoff,
+ HDR mode generally takes much longer to capture a single
+ image, has no user control, and may have other artifacts
+ depending on the HDR method used.
+
+ Therefore, HDR captures operate at a much slower rate
+ than regular captures.
+
+ In this mode, on LIMITED or FULL devices, when a request
+ is made with a android.control.captureIntent of
+ STILL_CAPTURE, the camera device will capture an image
+ using a high dynamic range capture technique. On LEGACY
+ devices, captures that target a JPEG-format output will
+ be captured with HDR, and the capture intent is not
+ relevant.
+
+ The HDR capture may involve the device capturing a burst
+ of images internally and combining them into one, or it
+ may involve the device using specialized high dynamic
+ range capture hardware. In all cases, a single image is
+ produced in response to a capture request submitted
+ while in HDR mode.
+
+ Since substantial post-processing is generally needed to
+ produce an HDR image, only YUV, PRIVATE, and JPEG
+ outputs are supported for LIMITED/FULL device HDR
+ captures, and only JPEG outputs are supported for LEGACY
+ HDR captures. Using a RAW output for HDR capture is not
+ supported.
+
+ Some devices may also support always-on HDR, which
+ applies HDR processing at full frame rate. For these
+ devices, intents other than STILL_CAPTURE will also
+ produce an HDR output with no frame rate impact compared
+ to normal operation, though the quality may be lower
+ than for STILL_CAPTURE intents.
+
+ If SCENE_MODE_HDR is used with unsupported output types
+ or capture intents, the images captured will be as if
+ the SCENE_MODE was not enabled at all.
+ </notes>
+ </value>
+ <value optional="true" hidden="true">FACE_PRIORITY_LOW_LIGHT
+ <notes>Same as FACE_PRIORITY scene mode, except that the camera
+ device will choose higher sensitivity values (android.sensor.sensitivity)
+ under low light conditions.
+
+ The camera device may be tuned to expose the images in a reduced
+ sensitivity range to produce the best quality images. For example,
+ if the android.sensor.info.sensitivityRange gives range of [100, 1600],
+ the camera device auto-exposure routine tuning process may limit the actual
+ exposure sensitivity range to [100, 1200] to ensure that the noise level isn't
+ exessive in order to preserve the image quality. Under this situation, the image under
+ low light may be under-exposed when the sensor max exposure time (bounded by the
+ android.control.aeTargetFpsRange when android.control.aeMode is one of the
+ ON_* modes) and effective max sensitivity are reached. This scene mode allows the
+ camera device auto-exposure routine to increase the sensitivity up to the max
+ sensitivity specified by android.sensor.info.sensitivityRange when the scene is too
+ dark and the max exposure time is reached. The captured images may be noisier
+ compared with the images captured in normal FACE_PRIORITY mode; therefore, it is
+ recommended that the application only use this scene mode when it is capable of
+ reducing the noise level of the captured images.
+
+ Unlike the other scene modes, android.control.aeMode,
+ android.control.awbMode, and android.control.afMode
+ remain active when FACE_PRIORITY_LOW_LIGHT is set.
+ </notes>
+ </value>
+ <value optional="true" hidden="true" id="100">DEVICE_CUSTOM_START
+ <notes>
+ Scene mode values within the range of
+ `[DEVICE_CUSTOM_START, DEVICE_CUSTOM_END]` are reserved for device specific
+ customized scene modes.
+ </notes>
+ </value>
+ <value optional="true" hidden="true" id="127">DEVICE_CUSTOM_END
+ <notes>
+ Scene mode values within the range of
+ `[DEVICE_CUSTOM_START, DEVICE_CUSTOM_END]` are reserved for device specific
+ customized scene modes.
+ </notes>
+ </value>
+ </enum>
+ <description>
+ Control for which scene mode is currently active.
+ </description>
+ <range>android.control.availableSceneModes</range>
+ <details>
+ Scene modes are custom camera modes optimized for a certain set of conditions and
+ capture settings.
+
+ This is the mode that that is active when
+ `android.control.mode == USE_SCENE_MODE`. Aside from FACE_PRIORITY, these modes will
+ disable android.control.aeMode, android.control.awbMode, and android.control.afMode
+ while in use.
+
+ The interpretation and implementation of these scene modes is left
+ to the implementor of the camera device. Their behavior will not be
+ consistent across all devices, and any given device may only implement
+ a subset of these modes.
+ </details>
+ <hal_details>
+ HAL implementations that include scene modes are expected to provide
+ the per-scene settings to use for android.control.aeMode,
+ android.control.awbMode, and android.control.afMode in
+ android.control.sceneModeOverrides.
+
+ For HIGH_SPEED_VIDEO mode, if it is included in android.control.availableSceneModes,
+ the HAL must list supported video size and fps range in
+ android.control.availableHighSpeedVideoConfigurations. For a given size, e.g.
+ 1280x720, if the HAL has two different sensor configurations for normal streaming
+ mode and high speed streaming, when this scene mode is set/reset in a sequence of capture
+ requests, the HAL may have to switch between different sensor modes.
+ This mode is deprecated in HAL3.3, to support high speed video recording, please implement
+ android.control.availableHighSpeedVideoConfigurations and CONSTRAINED_HIGH_SPEED_VIDEO
+ capbility defined in android.request.availableCapabilities.
+ </hal_details>
+ <tag id="BC" />
+ </entry>
+ <entry name="videoStabilizationMode" type="byte" visibility="public"
+ enum="true" hwlevel="legacy">
+ <enum>
+ <value>OFF
+ <notes>
+ Video stabilization is disabled.
+ </notes></value>
+ <value>ON
+ <notes>
+ Video stabilization is enabled.
+ </notes></value>
+ </enum>
+ <description>Whether video stabilization is
+ active.</description>
+ <details>
+ Video stabilization automatically warps images from
+ the camera in order to stabilize motion between consecutive frames.
+
+ If enabled, video stabilization can modify the
+ android.scaler.cropRegion to keep the video stream stabilized.
+
+ Switching between different video stabilization modes may take several
+ frames to initialize, the camera device will report the current mode
+ in capture result metadata. For example, When "ON" mode is requested,
+ the video stabilization modes in the first several capture results may
+ still be "OFF", and it will become "ON" when the initialization is
+ done.
+
+ In addition, not all recording sizes or frame rates may be supported for
+ stabilization by a device that reports stabilization support. It is guaranteed
+ that an output targeting a MediaRecorder or MediaCodec will be stabilized if
+ the recording resolution is less than or equal to 1920 x 1080 (width less than
+ or equal to 1920, height less than or equal to 1080), and the recording
+ frame rate is less than or equal to 30fps. At other sizes, the CaptureResult
+ android.control.videoStabilizationMode field will return
+ OFF if the recording output is not stabilized, or if there are no output
+ Surface types that can be stabilized.
+
+ If a camera device supports both this mode and OIS
+ (android.lens.opticalStabilizationMode), turning both modes on may
+ produce undesirable interaction, so it is recommended not to enable
+ both at the same time.
+ </details>
+ <tag id="BC" />
+ </entry>
+ </controls>
+ <static>
+ <entry name="aeAvailableAntibandingModes" type="byte" visibility="public"
+ type_notes="list of enums" container="array" typedef="enumList"
+ hwlevel="legacy">
+ <array>
+ <size>n</size>
+ </array>
+ <description>
+ List of auto-exposure antibanding modes for android.control.aeAntibandingMode that are
+ supported by this camera device.
+ </description>
+ <range>Any value listed in android.control.aeAntibandingMode</range>
+ <details>
+ Not all of the auto-exposure anti-banding modes may be
+ supported by a given camera device. This field lists the
+ valid anti-banding modes that the application may request
+ for this camera device with the
+ android.control.aeAntibandingMode control.
+ </details>
+ <tag id="BC" />
+ </entry>
+ <entry name="aeAvailableModes" type="byte" visibility="public"
+ type_notes="list of enums" container="array" typedef="enumList"
+ hwlevel="legacy">
+ <array>
+ <size>n</size>
+ </array>
+ <description>
+ List of auto-exposure modes for android.control.aeMode that are supported by this camera
+ device.
+ </description>
+ <range>Any value listed in android.control.aeMode</range>
+ <details>
+ Not all the auto-exposure modes may be supported by a
+ given camera device, especially if no flash unit is
+ available. This entry lists the valid modes for
+ android.control.aeMode for this camera device.
+
+ All camera devices support ON, and all camera devices with flash
+ units support ON_AUTO_FLASH and ON_ALWAYS_FLASH.
+
+ FULL mode camera devices always support OFF mode,
+ which enables application control of camera exposure time,
+ sensitivity, and frame duration.
+
+ LEGACY mode camera devices never support OFF mode.
+ LIMITED mode devices support OFF if they support the MANUAL_SENSOR
+ capability.
+ </details>
+ <tag id="BC" />
+ </entry>
+ <entry name="aeAvailableTargetFpsRanges" type="int32" visibility="public"
+ type_notes="list of pairs of frame rates"
+ container="array" typedef="rangeInt"
+ hwlevel="legacy">
+ <array>
+ <size>2</size>
+ <size>n</size>
+ </array>
+ <description>List of frame rate ranges for android.control.aeTargetFpsRange supported by
+ this camera device.</description>
+ <units>Frames per second (FPS)</units>
+ <details>
+ For devices at the LEGACY level or above:
+
+ * For constant-framerate recording, for each normal
+ [CamcorderProfile](https://developer.android.com/reference/android/media/CamcorderProfile.html), that is, a
+ [CamcorderProfile](https://developer.android.com/reference/android/media/CamcorderProfile.html) that has
+ [quality](https://developer.android.com/reference/android/media/CamcorderProfile.html#quality)
+ in the range [
+ [QUALITY_LOW](https://developer.android.com/reference/android/media/CamcorderProfile.html#QUALITY_LOW),
+ [QUALITY_2160P](https://developer.android.com/reference/android/media/CamcorderProfile.html#QUALITY_2160P)],
+ if the profile is supported by the device and has
+ [videoFrameRate](https://developer.android.com/reference/android/media/CamcorderProfile.html#videoFrameRate)
+ `x`, this list will always include (`x`,`x`).
+
+ * Also, a camera device must either not support any
+ [CamcorderProfile](https://developer.android.com/reference/android/media/CamcorderProfile.html),
+ or support at least one
+ normal [CamcorderProfile](https://developer.android.com/reference/android/media/CamcorderProfile.html)
+ that has
+ [videoFrameRate](https://developer.android.com/reference/android/media/CamcorderProfile.html#videoFrameRate) `x` >= 24.
+
+ For devices at the LIMITED level or above:
+
+ * For YUV_420_888 burst capture use case, this list will always include (`min`, `max`)
+ and (`max`, `max`) where `min` <= 15 and `max` = the maximum output frame rate of the
+ maximum YUV_420_888 output size.
+ </details>
+ <tag id="BC" />
+ </entry>
+ <entry name="aeCompensationRange" type="int32" visibility="public"
+ container="array" typedef="rangeInt"
+ hwlevel="legacy">
+ <array>
+ <size>2</size>
+ </array>
+ <description>Maximum and minimum exposure compensation values for
+ android.control.aeExposureCompensation, in counts of android.control.aeCompensationStep,
+ that are supported by this camera device.</description>
+ <range>
+ Range [0,0] indicates that exposure compensation is not supported.
+
+ For LIMITED and FULL devices, range must follow below requirements if exposure
+ compensation is supported (`range != [0, 0]`):
+
+ `Min.exposure compensation * android.control.aeCompensationStep <= -2 EV`
+
+ `Max.exposure compensation * android.control.aeCompensationStep >= 2 EV`
+
+ LEGACY devices may support a smaller range than this.
+ </range>
+ <tag id="BC" />
+ </entry>
+ <entry name="aeCompensationStep" type="rational" visibility="public"
+ hwlevel="legacy">
+ <description>Smallest step by which the exposure compensation
+ can be changed.</description>
+ <units>Exposure Value (EV)</units>
+ <details>
+ This is the unit for android.control.aeExposureCompensation. For example, if this key has
+ a value of `1/2`, then a setting of `-2` for android.control.aeExposureCompensation means
+ that the target EV offset for the auto-exposure routine is -1 EV.
+
+ One unit of EV compensation changes the brightness of the captured image by a factor
+ of two. +1 EV doubles the image brightness, while -1 EV halves the image brightness.
+ </details>
+ <hal_details>
+ This must be less than or equal to 1/2.
+ </hal_details>
+ <tag id="BC" />
+ </entry>
+ <entry name="afAvailableModes" type="byte" visibility="public"
+ type_notes="List of enums" container="array" typedef="enumList"
+ hwlevel="legacy">
+ <array>
+ <size>n</size>
+ </array>
+ <description>
+ List of auto-focus (AF) modes for android.control.afMode that are
+ supported by this camera device.
+ </description>
+ <range>Any value listed in android.control.afMode</range>
+ <details>
+ Not all the auto-focus modes may be supported by a
+ given camera device. This entry lists the valid modes for
+ android.control.afMode for this camera device.
+
+ All LIMITED and FULL mode camera devices will support OFF mode, and all
+ camera devices with adjustable focuser units
+ (`android.lens.info.minimumFocusDistance > 0`) will support AUTO mode.
+
+ LEGACY devices will support OFF mode only if they support
+ focusing to infinity (by also setting android.lens.focusDistance to
+ `0.0f`).
+ </details>
+ <tag id="BC" />
+ </entry>
+ <entry name="availableEffects" type="byte" visibility="public"
+ type_notes="List of enums (android.control.effectMode)." container="array"
+ typedef="enumList" hwlevel="legacy">
+ <array>
+ <size>n</size>
+ </array>
+ <description>
+ List of color effects for android.control.effectMode that are supported by this camera
+ device.
+ </description>
+ <range>Any value listed in android.control.effectMode</range>
+ <details>
+ This list contains the color effect modes that can be applied to
+ images produced by the camera device.
+ Implementations are not expected to be consistent across all devices.
+ If no color effect modes are available for a device, this will only list
+ OFF.
+
+ A color effect will only be applied if
+ android.control.mode != OFF. OFF is always included in this list.
+
+ This control has no effect on the operation of other control routines such
+ as auto-exposure, white balance, or focus.
+ </details>
+ <tag id="BC" />
+ </entry>
+ <entry name="availableSceneModes" type="byte" visibility="public"
+ type_notes="List of enums (android.control.sceneMode)."
+ container="array" typedef="enumList" hwlevel="legacy">
+ <array>
+ <size>n</size>
+ </array>
+ <description>
+ List of scene modes for android.control.sceneMode that are supported by this camera
+ device.
+ </description>
+ <range>Any value listed in android.control.sceneMode</range>
+ <details>
+ This list contains scene modes that can be set for the camera device.
+ Only scene modes that have been fully implemented for the
+ camera device may be included here. Implementations are not expected
+ to be consistent across all devices.
+
+ If no scene modes are supported by the camera device, this
+ will be set to DISABLED. Otherwise DISABLED will not be listed.
+
+ FACE_PRIORITY is always listed if face detection is
+ supported (i.e.`android.statistics.info.maxFaceCount >
+ 0`).
+ </details>
+ <tag id="BC" />
+ </entry>
+ <entry name="availableVideoStabilizationModes" type="byte"
+ visibility="public" type_notes="List of enums." container="array"
+ typedef="enumList" hwlevel="legacy">
+ <array>
+ <size>n</size>
+ </array>
+ <description>
+ List of video stabilization modes for android.control.videoStabilizationMode
+ that are supported by this camera device.
+ </description>
+ <range>Any value listed in android.control.videoStabilizationMode</range>
+ <details>
+ OFF will always be listed.
+ </details>
+ <tag id="BC" />
+ </entry>
+ <entry name="awbAvailableModes" type="byte" visibility="public"
+ type_notes="List of enums"
+ container="array" typedef="enumList" hwlevel="legacy">
+ <array>
+ <size>n</size>
+ </array>
+ <description>
+ List of auto-white-balance modes for android.control.awbMode that are supported by this
+ camera device.
+ </description>
+ <range>Any value listed in android.control.awbMode</range>
+ <details>
+ Not all the auto-white-balance modes may be supported by a
+ given camera device. This entry lists the valid modes for
+ android.control.awbMode for this camera device.
+
+ All camera devices will support ON mode.
+
+ Camera devices that support the MANUAL_POST_PROCESSING capability will always support OFF
+ mode, which enables application control of white balance, by using
+ android.colorCorrection.transform and android.colorCorrection.gains
+ (android.colorCorrection.mode must be set to TRANSFORM_MATRIX). This includes all FULL
+ mode camera devices.
+ </details>
+ <tag id="BC" />
+ </entry>
+ <entry name="maxRegions" type="int32" visibility="ndk_public"
+ container="array" hwlevel="legacy">
+ <array>
+ <size>3</size>
+ </array>
+ <description>
+ List of the maximum number of regions that can be used for metering in
+ auto-exposure (AE), auto-white balance (AWB), and auto-focus (AF);
+ this corresponds to the the maximum number of elements in
+ android.control.aeRegions, android.control.awbRegions,
+ and android.control.afRegions.
+ </description>
+ <range>
+ Value must be &gt;= 0 for each element. For full-capability devices
+ this value must be &gt;= 1 for AE and AF. The order of the elements is:
+ `(AE, AWB, AF)`.</range>
+ <tag id="BC" />
+ </entry>
+ <entry name="maxRegionsAe" type="int32" visibility="java_public"
+ synthetic="true" hwlevel="legacy">
+ <description>
+ The maximum number of metering regions that can be used by the auto-exposure (AE)
+ routine.
+ </description>
+ <range>Value will be &gt;= 0. For FULL-capability devices, this
+ value will be &gt;= 1.
+ </range>
+ <details>
+ This corresponds to the the maximum allowed number of elements in
+ android.control.aeRegions.
+ </details>
+ <hal_details>This entry is private to the framework. Fill in
+ maxRegions to have this entry be automatically populated.
+ </hal_details>
+ </entry>
+ <entry name="maxRegionsAwb" type="int32" visibility="java_public"
+ synthetic="true" hwlevel="legacy">
+ <description>
+ The maximum number of metering regions that can be used by the auto-white balance (AWB)
+ routine.
+ </description>
+ <range>Value will be &gt;= 0.
+ </range>
+ <details>
+ This corresponds to the the maximum allowed number of elements in
+ android.control.awbRegions.
+ </details>
+ <hal_details>This entry is private to the framework. Fill in
+ maxRegions to have this entry be automatically populated.
+ </hal_details>
+ </entry>
+ <entry name="maxRegionsAf" type="int32" visibility="java_public"
+ synthetic="true" hwlevel="legacy">
+ <description>
+ The maximum number of metering regions that can be used by the auto-focus (AF) routine.
+ </description>
+ <range>Value will be &gt;= 0. For FULL-capability devices, this
+ value will be &gt;= 1.
+ </range>
+ <details>
+ This corresponds to the the maximum allowed number of elements in
+ android.control.afRegions.
+ </details>
+ <hal_details>This entry is private to the framework. Fill in
+ maxRegions to have this entry be automatically populated.
+ </hal_details>
+ </entry>
+ <entry name="sceneModeOverrides" type="byte" visibility="system"
+ container="array" hwlevel="limited">
+ <array>
+ <size>3</size>
+ <size>length(availableSceneModes)</size>
+ </array>
+ <description>
+ Ordered list of auto-exposure, auto-white balance, and auto-focus
+ settings to use with each available scene mode.
+ </description>
+ <range>
+ For each available scene mode, the list must contain three
+ entries containing the android.control.aeMode,
+ android.control.awbMode, and android.control.afMode values used
+ by the camera device. The entry order is `(aeMode, awbMode, afMode)`
+ where aeMode has the lowest index position.
+ </range>
+ <details>
+ When a scene mode is enabled, the camera device is expected
+ to override android.control.aeMode, android.control.awbMode,
+ and android.control.afMode with its preferred settings for
+ that scene mode.
+
+ The order of this list matches that of availableSceneModes,
+ with 3 entries for each mode. The overrides listed
+ for FACE_PRIORITY and FACE_PRIORITY_LOW_LIGHT (if supported) are ignored,
+ since for that mode the application-set android.control.aeMode,
+ android.control.awbMode, and android.control.afMode values are
+ used instead, matching the behavior when android.control.mode
+ is set to AUTO. It is recommended that the FACE_PRIORITY and
+ FACE_PRIORITY_LOW_LIGHT (if supported) overrides should be set to 0.
+
+ For example, if availableSceneModes contains
+ `(FACE_PRIORITY, ACTION, NIGHT)`, then the camera framework
+ expects sceneModeOverrides to have 9 entries formatted like:
+ `(0, 0, 0, ON_AUTO_FLASH, AUTO, CONTINUOUS_PICTURE,
+ ON_AUTO_FLASH, INCANDESCENT, AUTO)`.
+ </details>
+ <hal_details>
+ To maintain backward compatibility, this list will be made available
+ in the static metadata of the camera service. The camera service will
+ use these values to set android.control.aeMode,
+ android.control.awbMode, and android.control.afMode when using a scene
+ mode other than FACE_PRIORITY and FACE_PRIORITY_LOW_LIGHT (if supported).
+ </hal_details>
+ <tag id="BC" />
+ </entry>
+ </static>
+ <dynamic>
+ <entry name="aePrecaptureId" type="int32" visibility="system" deprecated="true">
+ <description>The ID sent with the latest
+ CAMERA2_TRIGGER_PRECAPTURE_METERING call</description>
+ <details>Must be 0 if no
+ CAMERA2_TRIGGER_PRECAPTURE_METERING trigger received yet
+ by HAL. Always updated even if AE algorithm ignores the
+ trigger</details>
+ </entry>
+ <clone entry="android.control.aeAntibandingMode" kind="controls">
+ </clone>
+ <clone entry="android.control.aeExposureCompensation" kind="controls">
+ </clone>
+ <clone entry="android.control.aeLock" kind="controls">
+ </clone>
+ <clone entry="android.control.aeMode" kind="controls">
+ </clone>
+ <clone entry="android.control.aeRegions" kind="controls">
+ </clone>
+ <clone entry="android.control.aeTargetFpsRange" kind="controls">
+ </clone>
+ <clone entry="android.control.aePrecaptureTrigger" kind="controls">
+ </clone>
+ <entry name="aeState" type="byte" visibility="public" enum="true"
+ hwlevel="limited">
+ <enum>
+ <value>INACTIVE
+ <notes>AE is off or recently reset.
+
+ When a camera device is opened, it starts in
+ this state. This is a transient state, the camera device may skip reporting
+ this state in capture result.</notes></value>
+ <value>SEARCHING
+ <notes>AE doesn't yet have a good set of control values
+ for the current scene.
+
+ This is a transient state, the camera device may skip
+ reporting this state in capture result.</notes></value>
+ <value>CONVERGED
+ <notes>AE has a good set of control values for the
+ current scene.</notes></value>
+ <value>LOCKED
+ <notes>AE has been locked.</notes></value>
+ <value>FLASH_REQUIRED
+ <notes>AE has a good set of control values, but flash
+ needs to be fired for good quality still
+ capture.</notes></value>
+ <value>PRECAPTURE
+ <notes>AE has been asked to do a precapture sequence
+ and is currently executing it.
+
+ Precapture can be triggered through setting
+ android.control.aePrecaptureTrigger to START. Currently
+ active and completed (if it causes camera device internal AE lock) precapture
+ metering sequence can be canceled through setting
+ android.control.aePrecaptureTrigger to CANCEL.
+
+ Once PRECAPTURE completes, AE will transition to CONVERGED
+ or FLASH_REQUIRED as appropriate. This is a transient
+ state, the camera device may skip reporting this state in
+ capture result.</notes></value>
+ </enum>
+ <description>Current state of the auto-exposure (AE) algorithm.</description>
+ <details>Switching between or enabling AE modes (android.control.aeMode) always
+ resets the AE state to INACTIVE. Similarly, switching between android.control.mode,
+ or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all
+ the algorithm states to INACTIVE.
+
+ The camera device can do several state transitions between two results, if it is
+ allowed by the state transition table. For example: INACTIVE may never actually be
+ seen in a result.
+
+ The state in the result is the state for this image (in sync with this image): if
+ AE state becomes CONVERGED, then the image data associated with this result should
+ be good to use.
+
+ Below are state transition tables for different AE modes.
+
+ State | Transition Cause | New State | Notes
+ :------------:|:----------------:|:---------:|:-----------------------:
+ INACTIVE | | INACTIVE | Camera device auto exposure algorithm is disabled
+
+ When android.control.aeMode is AE_MODE_ON_*:
+
+ State | Transition Cause | New State | Notes
+ :-------------:|:--------------------------------------------:|:--------------:|:-----------------:
+ INACTIVE | Camera device initiates AE scan | SEARCHING | Values changing
+ INACTIVE | android.control.aeLock is ON | LOCKED | Values locked
+ SEARCHING | Camera device finishes AE scan | CONVERGED | Good values, not changing
+ SEARCHING | Camera device finishes AE scan | FLASH_REQUIRED | Converged but too dark w/o flash
+ SEARCHING | android.control.aeLock is ON | LOCKED | Values locked
+ CONVERGED | Camera device initiates AE scan | SEARCHING | Values changing
+ CONVERGED | android.control.aeLock is ON | LOCKED | Values locked
+ FLASH_REQUIRED | Camera device initiates AE scan | SEARCHING | Values changing
+ FLASH_REQUIRED | android.control.aeLock is ON | LOCKED | Values locked
+ LOCKED | android.control.aeLock is OFF | SEARCHING | Values not good after unlock
+ LOCKED | android.control.aeLock is OFF | CONVERGED | Values good after unlock
+ LOCKED | android.control.aeLock is OFF | FLASH_REQUIRED | Exposure good, but too dark
+ PRECAPTURE | Sequence done. android.control.aeLock is OFF | CONVERGED | Ready for high-quality capture
+ PRECAPTURE | Sequence done. android.control.aeLock is ON | LOCKED | Ready for high-quality capture
+ LOCKED | aeLock is ON and aePrecaptureTrigger is START | LOCKED | Precapture trigger is ignored when AE is already locked
+ LOCKED | aeLock is ON and aePrecaptureTrigger is CANCEL| LOCKED | Precapture trigger is ignored when AE is already locked
+ Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START | PRECAPTURE | Start AE precapture metering sequence
+ Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL| INACTIVE | Currently active precapture metering sequence is canceled
+
+ For the above table, the camera device may skip reporting any state changes that happen
+ without application intervention (i.e. mode switch, trigger, locking). Any state that
+ can be skipped in that manner is called a transient state.
+
+ For example, for above AE modes (AE_MODE_ON_*), in addition to the state transitions
+ listed in above table, it is also legal for the camera device to skip one or more
+ transient states between two results. See below table for examples:
+
+ State | Transition Cause | New State | Notes
+ :-------------:|:-----------------------------------------------------------:|:--------------:|:-----------------:
+ INACTIVE | Camera device finished AE scan | CONVERGED | Values are already good, transient states are skipped by camera device.
+ Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START, sequence done | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device.
+ Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START, sequence done | CONVERGED | Converged after a precapture sequence, transient states are skipped by camera device.
+ Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL, converged | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device.
+ Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL, converged | CONVERGED | Converged after a precapture sequenceis canceled, transient states are skipped by camera device.
+ CONVERGED | Camera device finished AE scan | FLASH_REQUIRED | Converged but too dark w/o flash after a new scan, transient states are skipped by camera device.
+ FLASH_REQUIRED | Camera device finished AE scan | CONVERGED | Converged after a new scan, transient states are skipped by camera device.
+ </details>
+ </entry>
+ <clone entry="android.control.afMode" kind="controls">
+ </clone>
+ <clone entry="android.control.afRegions" kind="controls">
+ </clone>
+ <clone entry="android.control.afTrigger" kind="controls">
+ </clone>
+ <entry name="afState" type="byte" visibility="public" enum="true"
+ hwlevel="legacy">
+ <enum>
+ <value>INACTIVE
+ <notes>AF is off or has not yet tried to scan/been asked
+ to scan.
+
+ When a camera device is opened, it starts in this
+ state. This is a transient state, the camera device may
+ skip reporting this state in capture
+ result.</notes></value>
+ <value>PASSIVE_SCAN
+ <notes>AF is currently performing an AF scan initiated the
+ camera device in a continuous autofocus mode.
+
+ Only used by CONTINUOUS_* AF modes. This is a transient
+ state, the camera device may skip reporting this state in
+ capture result.</notes></value>
+ <value>PASSIVE_FOCUSED
+ <notes>AF currently believes it is in focus, but may
+ restart scanning at any time.
+
+ Only used by CONTINUOUS_* AF modes. This is a transient
+ state, the camera device may skip reporting this state in
+ capture result.</notes></value>
+ <value>ACTIVE_SCAN
+ <notes>AF is performing an AF scan because it was
+ triggered by AF trigger.
+
+ Only used by AUTO or MACRO AF modes. This is a transient
+ state, the camera device may skip reporting this state in
+ capture result.</notes></value>
+ <value>FOCUSED_LOCKED
+ <notes>AF believes it is focused correctly and has locked
+ focus.
+
+ This state is reached only after an explicit START AF trigger has been
+ sent (android.control.afTrigger), when good focus has been obtained.
+
+ The lens will remain stationary until the AF mode (android.control.afMode) is changed or
+ a new AF trigger is sent to the camera device (android.control.afTrigger).
+ </notes></value>
+ <value>NOT_FOCUSED_LOCKED
+ <notes>AF has failed to focus successfully and has locked
+ focus.
+
+ This state is reached only after an explicit START AF trigger has been
+ sent (android.control.afTrigger), when good focus cannot be obtained.
+
+ The lens will remain stationary until the AF mode (android.control.afMode) is changed or
+ a new AF trigger is sent to the camera device (android.control.afTrigger).
+ </notes></value>
+ <value>PASSIVE_UNFOCUSED
+ <notes>AF finished a passive scan without finding focus,
+ and may restart scanning at any time.
+
+ Only used by CONTINUOUS_* AF modes. This is a transient state, the camera
+ device may skip reporting this state in capture result.
+
+ LEGACY camera devices do not support this state. When a passive
+ scan has finished, it will always go to PASSIVE_FOCUSED.
+ </notes></value>
+ </enum>
+ <description>Current state of auto-focus (AF) algorithm.</description>
+ <details>
+ Switching between or enabling AF modes (android.control.afMode) always
+ resets the AF state to INACTIVE. Similarly, switching between android.control.mode,
+ or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all
+ the algorithm states to INACTIVE.
+
+ The camera device can do several state transitions between two results, if it is
+ allowed by the state transition table. For example: INACTIVE may never actually be
+ seen in a result.
+
+ The state in the result is the state for this image (in sync with this image): if
+ AF state becomes FOCUSED, then the image data associated with this result should
+ be sharp.
+
+ Below are state transition tables for different AF modes.
+
+ When android.control.afMode is AF_MODE_OFF or AF_MODE_EDOF:
+
+ State | Transition Cause | New State | Notes
+ :------------:|:----------------:|:---------:|:-----------:
+ INACTIVE | | INACTIVE | Never changes
+
+ When android.control.afMode is AF_MODE_AUTO or AF_MODE_MACRO:
+
+ State | Transition Cause | New State | Notes
+ :-----------------:|:----------------:|:------------------:|:--------------:
+ INACTIVE | AF_TRIGGER | ACTIVE_SCAN | Start AF sweep, Lens now moving
+ ACTIVE_SCAN | AF sweep done | FOCUSED_LOCKED | Focused, Lens now locked
+ ACTIVE_SCAN | AF sweep done | NOT_FOCUSED_LOCKED | Not focused, Lens now locked
+ ACTIVE_SCAN | AF_CANCEL | INACTIVE | Cancel/reset AF, Lens now locked
+ FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Cancel/reset AF
+ FOCUSED_LOCKED | AF_TRIGGER | ACTIVE_SCAN | Start new sweep, Lens now moving
+ NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Cancel/reset AF
+ NOT_FOCUSED_LOCKED | AF_TRIGGER | ACTIVE_SCAN | Start new sweep, Lens now moving
+ Any state | Mode change | INACTIVE |
+
+ For the above table, the camera device may skip reporting any state changes that happen
+ without application intervention (i.e. mode switch, trigger, locking). Any state that
+ can be skipped in that manner is called a transient state.
+
+ For example, for these AF modes (AF_MODE_AUTO and AF_MODE_MACRO), in addition to the
+ state transitions listed in above table, it is also legal for the camera device to skip
+ one or more transient states between two results. See below table for examples:
+
+ State | Transition Cause | New State | Notes
+ :-----------------:|:----------------:|:------------------:|:--------------:
+ INACTIVE | AF_TRIGGER | FOCUSED_LOCKED | Focus is already good or good after a scan, lens is now locked.
+ INACTIVE | AF_TRIGGER | NOT_FOCUSED_LOCKED | Focus failed after a scan, lens is now locked.
+ FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | Focus is already good or good after a scan, lens is now locked.
+ NOT_FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | Focus is good after a scan, lens is not locked.
+
+
+ When android.control.afMode is AF_MODE_CONTINUOUS_VIDEO:
+
+ State | Transition Cause | New State | Notes
+ :-----------------:|:-----------------------------------:|:------------------:|:--------------:
+ INACTIVE | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving
+ INACTIVE | AF_TRIGGER | NOT_FOCUSED_LOCKED | AF state query, Lens now locked
+ PASSIVE_SCAN | Camera device completes current scan| PASSIVE_FOCUSED | End AF scan, Lens now locked
+ PASSIVE_SCAN | Camera device fails current scan | PASSIVE_UNFOCUSED | End AF scan, Lens now locked
+ PASSIVE_SCAN | AF_TRIGGER | FOCUSED_LOCKED | Immediate transition, if focus is good. Lens now locked
+ PASSIVE_SCAN | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate transition, if focus is bad. Lens now locked
+ PASSIVE_SCAN | AF_CANCEL | INACTIVE | Reset lens position, Lens now locked
+ PASSIVE_FOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving
+ PASSIVE_UNFOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving
+ PASSIVE_FOCUSED | AF_TRIGGER | FOCUSED_LOCKED | Immediate transition, lens now locked
+ PASSIVE_UNFOCUSED | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate transition, lens now locked
+ FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | No effect
+ FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan
+ NOT_FOCUSED_LOCKED | AF_TRIGGER | NOT_FOCUSED_LOCKED | No effect
+ NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan
+
+ When android.control.afMode is AF_MODE_CONTINUOUS_PICTURE:
+
+ State | Transition Cause | New State | Notes
+ :-----------------:|:------------------------------------:|:------------------:|:--------------:
+ INACTIVE | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving
+ INACTIVE | AF_TRIGGER | NOT_FOCUSED_LOCKED | AF state query, Lens now locked
+ PASSIVE_SCAN | Camera device completes current scan | PASSIVE_FOCUSED | End AF scan, Lens now locked
+ PASSIVE_SCAN | Camera device fails current scan | PASSIVE_UNFOCUSED | End AF scan, Lens now locked
+ PASSIVE_SCAN | AF_TRIGGER | FOCUSED_LOCKED | Eventual transition once the focus is good. Lens now locked
+ PASSIVE_SCAN | AF_TRIGGER | NOT_FOCUSED_LOCKED | Eventual transition if cannot find focus. Lens now locked
+ PASSIVE_SCAN | AF_CANCEL | INACTIVE | Reset lens position, Lens now locked
+ PASSIVE_FOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving
+ PASSIVE_UNFOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving
+ PASSIVE_FOCUSED | AF_TRIGGER | FOCUSED_LOCKED | Immediate trans. Lens now locked
+ PASSIVE_UNFOCUSED | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate trans. Lens now locked
+ FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | No effect
+ FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan
+ NOT_FOCUSED_LOCKED | AF_TRIGGER | NOT_FOCUSED_LOCKED | No effect
+ NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan
+
+ When switch between AF_MODE_CONTINUOUS_* (CAF modes) and AF_MODE_AUTO/AF_MODE_MACRO
+ (AUTO modes), the initial INACTIVE or PASSIVE_SCAN states may be skipped by the
+ camera device. When a trigger is included in a mode switch request, the trigger
+ will be evaluated in the context of the new mode in the request.
+ See below table for examples:
+
+ State | Transition Cause | New State | Notes
+ :-----------:|:--------------------------------------:|:----------------------------------------:|:--------------:
+ any state | CAF-->AUTO mode switch | INACTIVE | Mode switch without trigger, initial state must be INACTIVE
+ any state | CAF-->AUTO mode switch with AF_TRIGGER | trigger-reachable states from INACTIVE | Mode switch with trigger, INACTIVE is skipped
+ any state | AUTO-->CAF mode switch | passively reachable states from INACTIVE | Mode switch without trigger, passive transient state is skipped
+ </details>
+ </entry>
+ <entry name="afTriggerId" type="int32" visibility="system" deprecated="true">
+ <description>The ID sent with the latest
+ CAMERA2_TRIGGER_AUTOFOCUS call</description>
+ <details>Must be 0 if no CAMERA2_TRIGGER_AUTOFOCUS trigger
+ received yet by HAL. Always updated even if AF algorithm
+ ignores the trigger</details>
+ </entry>
+ <clone entry="android.control.awbLock" kind="controls">
+ </clone>
+ <clone entry="android.control.awbMode" kind="controls">
+ </clone>
+ <clone entry="android.control.awbRegions" kind="controls">
+ </clone>
+ <clone entry="android.control.captureIntent" kind="controls">
+ </clone>
+ <entry name="awbState" type="byte" visibility="public" enum="true"
+ hwlevel="limited">
+ <enum>
+ <value>INACTIVE
+ <notes>AWB is not in auto mode, or has not yet started metering.
+
+ When a camera device is opened, it starts in this
+ state. This is a transient state, the camera device may
+ skip reporting this state in capture
+ result.</notes></value>
+ <value>SEARCHING
+ <notes>AWB doesn't yet have a good set of control
+ values for the current scene.
+
+ This is a transient state, the camera device
+ may skip reporting this state in capture result.</notes></value>
+ <value>CONVERGED
+ <notes>AWB has a good set of control values for the
+ current scene.</notes></value>
+ <value>LOCKED
+ <notes>AWB has been locked.
+ </notes></value>
+ </enum>
+ <description>Current state of auto-white balance (AWB) algorithm.</description>
+ <details>Switching between or enabling AWB modes (android.control.awbMode) always
+ resets the AWB state to INACTIVE. Similarly, switching between android.control.mode,
+ or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all
+ the algorithm states to INACTIVE.
+
+ The camera device can do several state transitions between two results, if it is
+ allowed by the state transition table. So INACTIVE may never actually be seen in
+ a result.
+
+ The state in the result is the state for this image (in sync with this image): if
+ AWB state becomes CONVERGED, then the image data associated with this result should
+ be good to use.
+
+ Below are state transition tables for different AWB modes.
+
+ When `android.control.awbMode != AWB_MODE_AUTO`:
+
+ State | Transition Cause | New State | Notes
+ :------------:|:----------------:|:---------:|:-----------------------:
+ INACTIVE | |INACTIVE |Camera device auto white balance algorithm is disabled
+
+ When android.control.awbMode is AWB_MODE_AUTO:
+
+ State | Transition Cause | New State | Notes
+ :-------------:|:--------------------------------:|:-------------:|:-----------------:
+ INACTIVE | Camera device initiates AWB scan | SEARCHING | Values changing
+ INACTIVE | android.control.awbLock is ON | LOCKED | Values locked
+ SEARCHING | Camera device finishes AWB scan | CONVERGED | Good values, not changing
+ SEARCHING | android.control.awbLock is ON | LOCKED | Values locked
+ CONVERGED | Camera device initiates AWB scan | SEARCHING | Values changing
+ CONVERGED | android.control.awbLock is ON | LOCKED | Values locked
+ LOCKED | android.control.awbLock is OFF | SEARCHING | Values not good after unlock
+
+ For the above table, the camera device may skip reporting any state changes that happen
+ without application intervention (i.e. mode switch, trigger, locking). Any state that
+ can be skipped in that manner is called a transient state.
+
+ For example, for this AWB mode (AWB_MODE_AUTO), in addition to the state transitions
+ listed in above table, it is also legal for the camera device to skip one or more
+ transient states between two results. See below table for examples:
+
+ State | Transition Cause | New State | Notes
+ :-------------:|:--------------------------------:|:-------------:|:-----------------:
+ INACTIVE | Camera device finished AWB scan | CONVERGED | Values are already good, transient states are skipped by camera device.
+ LOCKED | android.control.awbLock is OFF | CONVERGED | Values good after unlock, transient states are skipped by camera device.
+ </details>
+ </entry>
+ <clone entry="android.control.effectMode" kind="controls">
+ </clone>
+ <clone entry="android.control.mode" kind="controls">
+ </clone>
+ <clone entry="android.control.sceneMode" kind="controls">
+ </clone>
+ <clone entry="android.control.videoStabilizationMode" kind="controls">
+ </clone>
+ </dynamic>
+ <static>
+ <entry name="availableHighSpeedVideoConfigurations" type="int32" visibility="hidden"
+ container="array" typedef="highSpeedVideoConfiguration" hwlevel="limited">
+ <array>
+ <size>5</size>
+ <size>n</size>
+ </array>
+ <description>
+ List of available high speed video size, fps range and max batch size configurations
+ supported by the camera device, in the format of (width, height, fps_min, fps_max, batch_size_max).
+ </description>
+ <range>
+ For each configuration, the fps_max &gt;= 120fps.
+ </range>
+ <details>
+ When CONSTRAINED_HIGH_SPEED_VIDEO is supported in android.request.availableCapabilities,
+ this metadata will list the supported high speed video size, fps range and max batch size
+ configurations. All the sizes listed in this configuration will be a subset of the sizes
+ reported by {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes}
+ for processed non-stalling formats.
+
+ For the high speed video use case, the application must
+ select the video size and fps range from this metadata to configure the recording and
+ preview streams and setup the recording requests. For example, if the application intends
+ to do high speed recording, it can select the maximum size reported by this metadata to
+ configure output streams. Once the size is selected, application can filter this metadata
+ by selected size and get the supported fps ranges, and use these fps ranges to setup the
+ recording requests. Note that for the use case of multiple output streams, application
+ must select one unique size from this metadata to use (e.g., preview and recording streams
+ must have the same size). Otherwise, the high speed capture session creation will fail.
+
+ The min and max fps will be multiple times of 30fps.
+
+ High speed video streaming extends significant performance pressue to camera hardware,
+ to achieve efficient high speed streaming, the camera device may have to aggregate
+ multiple frames together and send to camera device for processing where the request
+ controls are same for all the frames in this batch. Max batch size indicates
+ the max possible number of frames the camera device will group together for this high
+ speed stream configuration. This max batch size will be used to generate a high speed
+ recording request list by
+ {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
+ The max batch size for each configuration will satisfy below conditions:
+
+ * Each max batch size will be a divisor of its corresponding fps_max / 30. For example,
+ if max_fps is 300, max batch size will only be 1, 2, 5, or 10.
+ * The camera device may choose smaller internal batch size for each configuration, but
+ the actual batch size will be a divisor of max batch size. For example, if the max batch
+ size is 8, the actual batch size used by camera device will only be 1, 2, 4, or 8.
+ * The max batch size in each configuration entry must be no larger than 32.
+
+ The camera device doesn't have to support batch mode to achieve high speed video recording,
+ in such case, batch_size_max will be reported as 1 in each configuration entry.
+
+ This fps ranges in this configuration list can only be used to create requests
+ that are submitted to a high speed camera capture session created by
+ {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}.
+ The fps ranges reported in this metadata must not be used to setup capture requests for
+ normal capture session, or it will cause request error.
+ </details>
+ <hal_details>
+ All the sizes listed in this configuration will be a subset of the sizes reported by
+ android.scaler.availableStreamConfigurations for processed non-stalling output formats.
+ Note that for all high speed video configurations, HAL must be able to support a minimum
+ of two streams, though the application might choose to configure just one stream.
+
+ The HAL may support multiple sensor modes for high speed outputs, for example, 120fps
+ sensor mode and 120fps recording, 240fps sensor mode for 240fps recording. The application
+ usually starts preview first, then starts recording. To avoid sensor mode switch caused
+ stutter when starting recording as much as possible, the application may want to ensure
+ the same sensor mode is used for preview and recording. Therefore, The HAL must advertise
+ the variable fps range [30, fps_max] for each fixed fps range in this configuration list.
+ For example, if the HAL advertises [120, 120] and [240, 240], the HAL must also advertise
+ [30, 120] and [30, 240] for each configuration. In doing so, if the application intends to
+ do 120fps recording, it can select [30, 120] to start preview, and [120, 120] to start
+ recording. For these variable fps ranges, it's up to the HAL to decide the actual fps
+ values that are suitable for smooth preview streaming. If the HAL sees different max_fps
+ values that fall into different sensor modes in a sequence of requests, the HAL must
+ switch the sensor mode as quick as possible to minimize the mode switch caused stutter.
+ </hal_details>
+ <tag id="V1" />
+ </entry>
+ <entry name="aeLockAvailable" type="byte" visibility="public" enum="true"
+ typedef="boolean" hwlevel="legacy">
+ <enum>
+ <value>FALSE</value>
+ <value>TRUE</value>
+ </enum>
+ <description>Whether the camera device supports android.control.aeLock</description>
+ <details>
+ Devices with MANUAL_SENSOR capability or BURST_CAPTURE capability will always
+ list `true`. This includes FULL devices.
+ </details>
+ <tag id="BC"/>
+ </entry>
+ <entry name="awbLockAvailable" type="byte" visibility="public" enum="true"
+ typedef="boolean" hwlevel="legacy">
+ <enum>
+ <value>FALSE</value>
+ <value>TRUE</value>
+ </enum>
+ <description>Whether the camera device supports android.control.awbLock</description>
+ <details>
+ Devices with MANUAL_POST_PROCESSING capability or BURST_CAPTURE capability will
+ always list `true`. This includes FULL devices.
+ </details>
+ <tag id="BC"/>
+ </entry>
+ <entry name="availableModes" type="byte" visibility="public"
+ type_notes="List of enums (android.control.mode)." container="array"
+ typedef="enumList" hwlevel="legacy">
+ <array>
+ <size>n</size>
+ </array>
+ <description>
+ List of control modes for android.control.mode that are supported by this camera
+ device.
+ </description>
+ <range>Any value listed in android.control.mode</range>
+ <details>
+ This list contains control modes that can be set for the camera device.
+ LEGACY mode devices will always support AUTO mode. LIMITED and FULL
+ devices will always support OFF, AUTO modes.
+ </details>
+ </entry>
+ <entry name="postRawSensitivityBoostRange" type="int32" visibility="public"
+ type_notes="Range of supported post RAW sensitivitiy boosts"
+ container="array" typedef="rangeInt">
+ <array>
+ <size>2</size>
+ </array>
+ <description>Range of boosts for android.control.postRawSensitivityBoost supported
+ by this camera device.
+ </description>
+ <units>ISO arithmetic units, the same as android.sensor.sensitivity</units>
+ <details>
+ Devices support post RAW sensitivity boost will advertise
+ android.control.postRawSensitivityBoost key for controling
+ post RAW sensitivity boost.
+
+ This key will be `null` for devices that do not support any RAW format
+ outputs. For devices that do support RAW format outputs, this key will always
+ present, and if a device does not support post RAW sensitivity boost, it will
+ list `(100, 100)` in this key.
+ </details>
+ <hal_details>
+ This key is added in HAL3.4. For HAL3.3 or earlier devices, camera framework will
+ generate this key as `(100, 100)` if device supports any of RAW output formats.
+ All HAL3.4 and above devices should list this key if device supports any of RAW
+ output formats.
+ </hal_details>
+ </entry>
+ </static>
+ <controls>
+ <entry name="postRawSensitivityBoost" type="int32" visibility="public">
+ <description>The amount of additional sensitivity boost applied to output images
+ after RAW sensor data is captured.
+ </description>
+ <units>ISO arithmetic units, the same as android.sensor.sensitivity</units>
+ <range>android.control.postRawSensitivityBoostRange</range>
+ <details>
+ Some camera devices support additional digital sensitivity boosting in the
+ camera processing pipeline after sensor RAW image is captured.
+ Such a boost will be applied to YUV/JPEG format output images but will not
+ have effect on RAW output formats like RAW_SENSOR, RAW10, RAW12 or RAW_OPAQUE.
+
+ This key will be `null` for devices that do not support any RAW format
+ outputs. For devices that do support RAW format outputs, this key will always
+ present, and if a device does not support post RAW sensitivity boost, it will
+ list `100` in this key.
+
+ If the camera device cannot apply the exact boost requested, it will reduce the
+ boost to the nearest supported value.
+ The final boost value used will be available in the output capture result.
+
+ For devices that support post RAW sensitivity boost, the YUV/JPEG output images
+ of such device will have the total sensitivity of
+ `android.sensor.sensitivity * android.control.postRawSensitivityBoost / 100`
+ The sensitivity of RAW format images will always be `android.sensor.sensitivity`
+
+ This control is only effective if android.control.aeMode or android.control.mode is set to
+ OFF; otherwise the auto-exposure algorithm will override this value.
+ </details>
+ </entry>
+ </controls>
+ <dynamic>
+ <clone entry="android.control.postRawSensitivityBoost" kind="controls">
+ </clone>
+ </dynamic>
+ </section>
+ <section name="demosaic">
+ <controls>
+ <entry name="mode" type="byte" enum="true">
+ <enum>
+ <value>FAST
+ <notes>Minimal or no slowdown of frame rate compared to
+ Bayer RAW output.</notes></value>
+ <value>HIGH_QUALITY
+ <notes>Improved processing quality but the frame rate might be slowed down
+ relative to raw output.</notes></value>
+ </enum>
+ <description>Controls the quality of the demosaicing
+ processing.</description>
+ <tag id="FUTURE" />
+ </entry>
+ </controls>
+ </section>
+ <section name="edge">
+ <controls>
+ <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
+ <enum>
+ <value>OFF
+ <notes>No edge enhancement is applied.</notes></value>
+ <value>FAST
+ <notes>Apply edge enhancement at a quality level that does not slow down frame rate
+ relative to sensor output. It may be the same as OFF if edge enhancement will
+ slow down frame rate relative to sensor.</notes></value>
+ <value>HIGH_QUALITY
+ <notes>Apply high-quality edge enhancement, at a cost of possibly reduced output frame rate.
+ </notes></value>
+ <value optional="true">ZERO_SHUTTER_LAG
+ <notes>Edge enhancement is applied at different levels for different output streams,
+ based on resolution. Streams at maximum recording resolution (see {@link
+ ACameraDevice_createCaptureSession}) or below have
+ edge enhancement applied, while higher-resolution streams have no edge enhancement
+ applied. The level of edge enhancement for low-resolution streams is tuned so that
+ frame rate is not impacted, and the quality is equal to or better than FAST (since it
+ is only applied to lower-resolution outputs, quality may improve from FAST).
+
+ This mode is intended to be used by applications operating in a zero-shutter-lag mode
+ with YUV or PRIVATE reprocessing, where the application continuously captures
+ high-resolution intermediate buffers into a circular buffer, from which a final image is
+ produced via reprocessing when a user takes a picture. For such a use case, the
+ high-resolution buffers must not have edge enhancement applied to maximize efficiency of
+ preview and to avoid double-applying enhancement when reprocessed, while low-resolution
+ buffers (used for recording or preview, generally) need edge enhancement applied for
+ reasonable preview quality.
+
+ This mode is guaranteed to be supported by devices that support either the
+ YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities
+ (android.request.availableCapabilities lists either of those capabilities) and it will
+ be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.
+ </notes></value>
+ </enum>
+ <description>Operation mode for edge
+ enhancement.</description>
+ <range>android.edge.availableEdgeModes</range>
+ <details>Edge enhancement improves sharpness and details in the captured image. OFF means
+ no enhancement will be applied by the camera device.
+
+ FAST/HIGH_QUALITY both mean camera device determined enhancement
+ will be applied. HIGH_QUALITY mode indicates that the
+ camera device will use the highest-quality enhancement algorithms,
+ even if it slows down capture rate. FAST means the camera device will
+ not slow down capture rate when applying edge enhancement. FAST may be the same as OFF if
+ edge enhancement will slow down capture rate. Every output stream will have a similar
+ amount of enhancement applied.
+
+ ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
+ buffer of high-resolution images during preview and reprocess image(s) from that buffer
+ into a final capture when triggered by the user. In this mode, the camera device applies
+ edge enhancement to low-resolution streams (below maximum recording resolution) to
+ maximize preview quality, but does not apply edge enhancement to high-resolution streams,
+ since those will be reprocessed later if necessary.
+
+ For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera
+ device will apply FAST/HIGH_QUALITY YUV-domain edge enhancement, respectively.
+ The camera device may adjust its internal edge enhancement parameters for best
+ image quality based on the android.reprocess.effectiveExposureFactor, if it is set.
+ </details>
+ <hal_details>
+ For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to
+ adjust the internal edge enhancement reduction parameters appropriately to get the best
+ quality images.
+ </hal_details>
+ <tag id="V1" />
+ <tag id="REPROC" />
+ </entry>
+ <entry name="strength" type="byte">
+ <description>Control the amount of edge enhancement
+ applied to the images</description>
+ <units>1-10; 10 is maximum sharpening</units>
+ <tag id="FUTURE" />
+ </entry>
+ </controls>
+ <static>
+ <entry name="availableEdgeModes" type="byte" visibility="public"
+ type_notes="list of enums" container="array" typedef="enumList"
+ hwlevel="full">
+ <array>
+ <size>n</size>
+ </array>
+ <description>
+ List of edge enhancement modes for android.edge.mode that are supported by this camera
+ device.
+ </description>
+ <range>Any value listed in android.edge.mode</range>
+ <details>
+ Full-capability camera devices must always support OFF; camera devices that support
+ YUV_REPROCESSING or PRIVATE_REPROCESSING will list ZERO_SHUTTER_LAG; all devices will
+ list FAST.
+ </details>
+ <hal_details>
+ HAL must support both FAST and HIGH_QUALITY if edge enhancement control is available
+ on the camera device, but the underlying implementation can be the same for both modes.
+ That is, if the highest quality implementation on the camera device does not slow down
+ capture rate, then FAST and HIGH_QUALITY will generate the same output.
+ </hal_details>
+ <tag id="V1" />
+ <tag id="REPROC" />
+ </entry>
+ </static>
+ <dynamic>
+ <clone entry="android.edge.mode" kind="controls">
+ <tag id="V1" />
+ <tag id="REPROC" />
+ </clone>
+ </dynamic>
+ </section>
+ <section name="flash">
+ <controls>
+ <entry name="firingPower" type="byte">
+ <description>Power for flash firing/torch</description>
+ <units>10 is max power; 0 is no flash. Linear</units>
+ <range>0 - 10</range>
+ <details>Power for snapshot may use a different scale than
+ for torch mode. Only one entry for torch mode will be
+ used</details>
+ <tag id="FUTURE" />
+ </entry>
+ <entry name="firingTime" type="int64">
+ <description>Firing time of flash relative to start of
+ exposure</description>
+ <units>nanoseconds</units>
+ <range>0-(exposure time-flash duration)</range>
+ <details>Clamped to (0, exposure time - flash
+ duration).</details>
+ <tag id="FUTURE" />
+ </entry>
+ <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="legacy">
+ <enum>
+ <value>OFF
+ <notes>
+ Do not fire the flash for this capture.
+ </notes>
+ </value>
+ <value>SINGLE
+ <notes>
+ If the flash is available and charged, fire flash
+ for this capture.
+ </notes>
+ </value>
+ <value>TORCH
+ <notes>
+ Transition flash to continuously on.
+ </notes>
+ </value>
+ </enum>
+ <description>The desired mode for for the camera device's flash control.</description>
+ <details>
+ This control is only effective when flash unit is available
+ (`android.flash.info.available == true`).
+
+ When this control is used, the android.control.aeMode must be set to ON or OFF.
+ Otherwise, the camera device auto-exposure related flash control (ON_AUTO_FLASH,
+ ON_ALWAYS_FLASH, or ON_AUTO_FLASH_REDEYE) will override this control.
+
+ When set to OFF, the camera device will not fire flash for this capture.
+
+ When set to SINGLE, the camera device will fire flash regardless of the camera
+ device's auto-exposure routine's result. When used in still capture case, this
+ control should be used along with auto-exposure (AE) precapture metering sequence
+ (android.control.aePrecaptureTrigger), otherwise, the image may be incorrectly exposed.
+
+ When set to TORCH, the flash will be on continuously. This mode can be used
+ for use cases such as preview, auto-focus assist, still capture, or video recording.
+
+ The flash status will be reported by android.flash.state in the capture result metadata.
+ </details>
+ <tag id="BC" />
+ </entry>
+ </controls>
+ <static>
+ <namespace name="info">
+ <entry name="available" type="byte" visibility="public" enum="true"
+ typedef="boolean" hwlevel="legacy">
+ <enum>
+ <value>FALSE</value>
+ <value>TRUE</value>
+ </enum>
+ <description>Whether this camera device has a
+ flash unit.</description>
+ <details>
+ Will be `false` if no flash is available.
+
+ If there is no flash unit, none of the flash controls do
+ anything.</details>
+ <tag id="BC" />
+ </entry>
+ <entry name="chargeDuration" type="int64">
+ <description>Time taken before flash can fire
+ again</description>
+ <units>nanoseconds</units>
+ <range>0-1e9</range>
+ <details>1 second too long/too short for recharge? Should
+ this be power-dependent?</details>
+ <tag id="FUTURE" />
+ </entry>
+ </namespace>
+ <entry name="colorTemperature" type="byte">
+ <description>The x,y whitepoint of the
+ flash</description>
+ <units>pair of floats</units>
+ <range>0-1 for both</range>
+ <tag id="FUTURE" />
+ </entry>
+ <entry name="maxEnergy" type="byte">
+ <description>Max energy output of the flash for a full
+ power single flash</description>
+ <units>lumen-seconds</units>
+ <range>&gt;= 0</range>
+ <tag id="FUTURE" />
+ </entry>
+ </static>
+ <dynamic>
+ <clone entry="android.flash.firingPower" kind="controls">
+ </clone>
+ <clone entry="android.flash.firingTime" kind="controls">
+ </clone>
+ <clone entry="android.flash.mode" kind="controls"></clone>
+ <entry name="state" type="byte" visibility="public" enum="true"
+ hwlevel="limited">
+ <enum>
+ <value>UNAVAILABLE
+ <notes>No flash on camera.</notes></value>
+ <value>CHARGING
+ <notes>Flash is charging and cannot be fired.</notes></value>
+ <value>READY
+ <notes>Flash is ready to fire.</notes></value>
+ <value>FIRED
+ <notes>Flash fired for this capture.</notes></value>
+ <value>PARTIAL
+ <notes>Flash partially illuminated this frame.
+
+ This is usually due to the next or previous frame having
+ the flash fire, and the flash spilling into this capture
+ due to hardware limitations.</notes></value>
+ </enum>
+ <description>Current state of the flash
+ unit.</description>
+ <details>
+ When the camera device doesn't have flash unit
+ (i.e. `android.flash.info.available == false`), this state will always be UNAVAILABLE.
+ Other states indicate the current flash status.
+
+ In certain conditions, this will be available on LEGACY devices:
+
+ * Flash-less cameras always return UNAVAILABLE.
+ * Using android.control.aeMode `==` ON_ALWAYS_FLASH
+ will always return FIRED.
+ * Using android.flash.mode `==` TORCH
+ will always return FIRED.
+
+ In all other conditions the state will not be available on
+ LEGACY devices (i.e. it will be `null`).
+ </details>
+ </entry>
+ </dynamic>
+ </section>
+ <section name="hotPixel">
+ <controls>
+ <entry name="mode" type="byte" visibility="public" enum="true">
+ <enum>
+ <value>OFF
+ <notes>
+ No hot pixel correction is applied.
+
+ The frame rate must not be reduced relative to sensor raw output
+ for this option.
+
+ The hotpixel map may be returned in android.statistics.hotPixelMap.
+ </notes>
+ </value>
+ <value>FAST
+ <notes>
+ Hot pixel correction is applied, without reducing frame
+ rate relative to sensor raw output.
+
+ The hotpixel map may be returned in android.statistics.hotPixelMap.
+ </notes>
+ </value>
+ <value>HIGH_QUALITY
+ <notes>
+ High-quality hot pixel correction is applied, at a cost
+ of possibly reduced frame rate relative to sensor raw output.
+
+ The hotpixel map may be returned in android.statistics.hotPixelMap.
+ </notes>
+ </value>
+ </enum>
+ <description>
+ Operational mode for hot pixel correction.
+ </description>
+ <range>android.hotPixel.availableHotPixelModes</range>
+ <details>
+ Hotpixel correction interpolates out, or otherwise removes, pixels
+ that do not accurately measure the incoming light (i.e. pixels that
+ are stuck at an arbitrary value or are oversensitive).
+ </details>
+ <tag id="V1" />
+ <tag id="RAW" />
+ </entry>
+ </controls>
+ <static>
+ <entry name="availableHotPixelModes" type="byte" visibility="public"
+ type_notes="list of enums" container="array" typedef="enumList">
+ <array>
+ <size>n</size>
+ </array>
+ <description>
+ List of hot pixel correction modes for android.hotPixel.mode that are supported by this
+ camera device.
+ </description>
+ <range>Any value listed in android.hotPixel.mode</range>
+ <details>
+ FULL mode camera devices will always support FAST.
+ </details>
+ <hal_details>
+ To avoid performance issues, there will be significantly fewer hot
+ pixels than actual pixels on the camera sensor.
+ HAL must support both FAST and HIGH_QUALITY if hot pixel correction control is available
+ on the camera device, but the underlying implementation can be the same for both modes.
+ That is, if the highest quality implementation on the camera device does not slow down
+ capture rate, then FAST and HIGH_QUALITY will generate the same output.
+ </hal_details>
+ <tag id="V1" />
+ <tag id="RAW" />
+ </entry>
+ </static>
+ <dynamic>
+ <clone entry="android.hotPixel.mode" kind="controls">
+ <tag id="V1" />
+ <tag id="RAW" />
+ </clone>
+ </dynamic>
+ </section>
+ <section name="jpeg">
+ <controls>
+ <entry name="gpsLocation" type="byte" visibility="java_public" synthetic="true"
+ typedef="location" hwlevel="legacy">
+ <description>
+ A location object to use when generating image GPS metadata.
+ </description>
+ <details>
+ Setting a location object in a request will include the GPS coordinates of the location
+ into any JPEG images captured based on the request. These coordinates can then be
+ viewed by anyone who receives the JPEG image.
+ </details>
+ </entry>
+ <entry name="gpsCoordinates" type="double" visibility="ndk_public"
+ type_notes="latitude, longitude, altitude. First two in degrees, the third in meters"
+ container="array" hwlevel="legacy">
+ <array>
+ <size>3</size>
+ </array>
+ <description>GPS coordinates to include in output JPEG
+ EXIF.</description>
+ <range>(-180 - 180], [-90,90], [-inf, inf]</range>
+ <tag id="BC" />
+ </entry>
+ <entry name="gpsProcessingMethod" type="byte" visibility="ndk_public"
+ typedef="string" hwlevel="legacy">
+ <description>32 characters describing GPS algorithm to
+ include in EXIF.</description>
+ <units>UTF-8 null-terminated string</units>
+ <tag id="BC" />
+ </entry>
+ <entry name="gpsTimestamp" type="int64" visibility="ndk_public" hwlevel="legacy">
+ <description>Time GPS fix was made to include in
+ EXIF.</description>
+ <units>UTC in seconds since January 1, 1970</units>
+ <tag id="BC" />
+ </entry>
+ <entry name="orientation" type="int32" visibility="public" hwlevel="legacy">
+ <description>The orientation for a JPEG image.</description>
+ <units>Degrees in multiples of 90</units>
+ <range>0, 90, 180, 270</range>
+ <details>
+ The clockwise rotation angle in degrees, relative to the orientation
+ to the camera, that the JPEG picture needs to be rotated by, to be viewed
+ upright.
+
+ Camera devices may either encode this value into the JPEG EXIF header, or
+ rotate the image data to match this orientation. When the image data is rotated,
+ the thumbnail data will also be rotated.
+
+ Note that this orientation is relative to the orientation of the camera sensor, given
+ by android.sensor.orientation.
+
+ To translate from the device orientation given by the Android sensor APIs, the following
+ sample code may be used:
+
+ private int getJpegOrientation(CameraCharacteristics c, int deviceOrientation) {
+ if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0;
+ int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
+
+ // Round device orientation to a multiple of 90
+ deviceOrientation = (deviceOrientation + 45) / 90 * 90;
+
+ // Reverse device orientation for front-facing cameras
+ boolean facingFront = c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT;
+ if (facingFront) deviceOrientation = -deviceOrientation;
+
+ // Calculate desired JPEG orientation relative to camera orientation to make
+ // the image upright relative to the device orientation
+ int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
+
+ return jpegOrientation;
+ }
+ </details>
+ <tag id="BC" />
+ </entry>
+ <entry name="quality" type="byte" visibility="public" hwlevel="legacy">
+ <description>Compression quality of the final JPEG
+ image.</description>
+ <range>1-100; larger is higher quality</range>
+ <details>85-95 is typical usage range.</details>
+ <tag id="BC" />
+ </entry>
+ <entry name="thumbnailQuality" type="byte" visibility="public" hwlevel="legacy">
+ <description>Compression quality of JPEG
+ thumbnail.</description>
+ <range>1-100; larger is higher quality</range>
+ <tag id="BC" />
+ </entry>
+ <entry name="thumbnailSize" type="int32" visibility="public"
+ container="array" typedef="size" hwlevel="legacy">
+ <array>
+ <size>2</size>
+ </array>
+ <description>Resolution of embedded JPEG thumbnail.</description>
+ <range>android.jpeg.availableThumbnailSizes</range>
+ <details>When set to (0, 0) value, the JPEG EXIF will not contain thumbnail,
+ but the captured JPEG will still be a valid image.
+
+ For best results, when issuing a request for a JPEG image, the thumbnail size selected
+ should have the same aspect ratio as the main JPEG output.
+
+ If the thumbnail image aspect ratio differs from the JPEG primary image aspect
+ ratio, the camera device creates the thumbnail by cropping it from the primary image.
+ For example, if the primary image has 4:3 aspect ratio, the thumbnail image has
+ 16:9 aspect ratio, the primary image will be cropped vertically (letterbox) to
+ generate the thumbnail image. The thumbnail image will always have a smaller Field
+ Of View (FOV) than the primary image when aspect ratios differ.
+
+ When an android.jpeg.orientation of non-zero degree is requested,
+ the camera device will handle thumbnail rotation in one of the following ways:
+
+ * Set the
+ [EXIF orientation flag](https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION)
+ and keep jpeg and thumbnail image data unrotated.
+ * Rotate the jpeg and thumbnail image data and not set
+ [EXIF orientation flag](https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION).
+ In this case, LIMITED or FULL hardware level devices will report rotated thumnail size
+ in capture result, so the width and height will be interchanged if 90 or 270 degree
+ orientation is requested. LEGACY device will always report unrotated thumbnail size.
+ </details>
+ <hal_details>
+ The HAL must not squeeze or stretch the downscaled primary image to generate thumbnail.
+ The cropping must be done on the primary jpeg image rather than the sensor active array.
+ The stream cropping rule specified by "S5. Cropping" in camera3.h doesn't apply to the
+ thumbnail image cropping.
+ </hal_details>
+ <tag id="BC" />
+ </entry>
+ </controls>
+ <static>
+ <entry name="availableThumbnailSizes" type="int32" visibility="public"
+ container="array" typedef="size" hwlevel="legacy">
+ <array>
+ <size>2</size>
+ <size>n</size>
+ </array>
+ <description>List of JPEG thumbnail sizes for android.jpeg.thumbnailSize supported by this
+ camera device.</description>
+ <details>
+ This list will include at least one non-zero resolution, plus `(0,0)` for indicating no
+ thumbnail should be generated.
+
+ Below condiditions will be satisfied for this size list:
+
+ * The sizes will be sorted by increasing pixel area (width x height).
+ If several resolutions have the same area, they will be sorted by increasing width.
+ * The aspect ratio of the largest thumbnail size will be same as the
+ aspect ratio of largest JPEG output size in android.scaler.availableStreamConfigurations.
+ The largest size is defined as the size that has the largest pixel area
+ in a given size list.
+ * Each output JPEG size in android.scaler.availableStreamConfigurations will have at least
+ one corresponding size that has the same aspect ratio in availableThumbnailSizes,
+ and vice versa.
+ * All non-`(0, 0)` sizes will have non-zero widths and heights.</details>
+ <tag id="BC" />
+ </entry>
+ <entry name="maxSize" type="int32" visibility="system">
+ <description>Maximum size in bytes for the compressed
+ JPEG buffer</description>
+ <range>Must be large enough to fit any JPEG produced by
+ the camera</range>
+ <details>This is used for sizing the gralloc buffers for
+ JPEG</details>
+ </entry>
+ </static>
+ <dynamic>
+ <clone entry="android.jpeg.gpsLocation" kind="controls">
+ </clone>
+ <clone entry="android.jpeg.gpsCoordinates" kind="controls">
+ </clone>
+ <clone entry="android.jpeg.gpsProcessingMethod"
+ kind="controls"></clone>
+ <clone entry="android.jpeg.gpsTimestamp" kind="controls">
+ </clone>
+ <clone entry="android.jpeg.orientation" kind="controls">
+ </clone>
+ <clone entry="android.jpeg.quality" kind="controls">
+ </clone>
+ <entry name="size" type="int32">
+ <description>The size of the compressed JPEG image, in
+ bytes</description>
+ <range>&gt;= 0</range>
+ <details>If no JPEG output is produced for the request,
+ this must be 0.
+
+ Otherwise, this describes the real size of the compressed
+ JPEG image placed in the output stream. More specifically,
+ if android.jpeg.maxSize = 1000000, and a specific capture
+ has android.jpeg.size = 500000, then the output buffer from
+ the JPEG stream will be 1000000 bytes, of which the first
+ 500000 make up the real data.</details>
+ <tag id="FUTURE" />
+ </entry>
+ <clone entry="android.jpeg.thumbnailQuality"
+ kind="controls"></clone>
+ <clone entry="android.jpeg.thumbnailSize" kind="controls">
+ </clone>
+ </dynamic>
+ </section>
+ <section name="lens">
+ <controls>
+ <entry name="aperture" type="float" visibility="public" hwlevel="full">
+ <description>The desired lens aperture size, as a ratio of lens focal length to the
+ effective aperture diameter.</description>
+ <units>The f-number (f/N)</units>
+ <range>android.lens.info.availableApertures</range>
+ <details>Setting this value is only supported on the camera devices that have a variable
+ aperture lens.
+
+ When this is supported and android.control.aeMode is OFF,
+ this can be set along with android.sensor.exposureTime,
+ android.sensor.sensitivity, and android.sensor.frameDuration
+ to achieve manual exposure control.
+
+ The requested aperture value may take several frames to reach the
+ requested value; the camera device will report the current (intermediate)
+ aperture size in capture result metadata while the aperture is changing.
+ While the aperture is still changing, android.lens.state will be set to MOVING.
+
+ When this is supported and android.control.aeMode is one of
+ the ON modes, this will be overridden by the camera device
+ auto-exposure algorithm, the overridden values are then provided
+ back to the user in the corresponding result.</details>
+ <tag id="V1" />
+ </entry>
+ <entry name="filterDensity" type="float" visibility="public" hwlevel="full">
+ <description>
+ The desired setting for the lens neutral density filter(s).
+ </description>
+ <units>Exposure Value (EV)</units>
+ <range>android.lens.info.availableFilterDensities</range>
+ <details>
+ This control will not be supported on most camera devices.
+
+ Lens filters are typically used to lower the amount of light the
+ sensor is exposed to (measured in steps of EV). As used here, an EV
+ step is the standard logarithmic representation, which are
+ non-negative, and inversely proportional to the amount of light
+ hitting the sensor. For example, setting this to 0 would result
+ in no reduction of the incoming light, and setting this to 2 would
+ mean that the filter is set to reduce incoming light by two stops
+ (allowing 1/4 of the prior amount of light to the sensor).
+
+ It may take several frames before the lens filter density changes
+ to the requested value. While the filter density is still changing,
+ android.lens.state will be set to MOVING.
+ </details>
+ <tag id="V1" />
+ </entry>
+ <entry name="focalLength" type="float" visibility="public" hwlevel="legacy">
+ <description>
+ The desired lens focal length; used for optical zoom.
+ </description>
+ <units>Millimeters</units>
+ <range>android.lens.info.availableFocalLengths</range>
+ <details>
+ This setting controls the physical focal length of the camera
+ device's lens. Changing the focal length changes the field of
+ view of the camera device, and is usually used for optical zoom.
+
+ Like android.lens.focusDistance and android.lens.aperture, this
+ setting won't be applied instantaneously, and it may take several
+ frames before the lens can change to the requested focal length.
+ While the focal length is still changing, android.lens.state will
+ be set to MOVING.
+
+ Optical zoom will not be supported on most devices.
+ </details>
+ <tag id="V1" />
+ </entry>
+ <entry name="focusDistance" type="float" visibility="public" hwlevel="full">
+ <description>Desired distance to plane of sharpest focus,
+ measured from frontmost surface of the lens.</description>
+ <units>See android.lens.info.focusDistanceCalibration for details</units>
+ <range>&gt;= 0</range>
+ <details>
+ This control can be used for setting manual focus, on devices that support
+ the MANUAL_SENSOR capability and have a variable-focus lens (see
+ android.lens.info.minimumFocusDistance).
+
+ A value of `0.0f` means infinity focus. The value set will be clamped to
+ `[0.0f, android.lens.info.minimumFocusDistance]`.
+
+ Like android.lens.focalLength, this setting won't be applied
+ instantaneously, and it may take several frames before the lens
+ can move to the requested focus distance. While the lens is still moving,
+ android.lens.state will be set to MOVING.
+
+ LEGACY devices support at most setting this to `0.0f`
+ for infinity focus.
+ </details>
+ <tag id="BC" />
+ <tag id="V1" />
+ </entry>
+ <entry name="opticalStabilizationMode" type="byte" visibility="public"
+ enum="true" hwlevel="limited">
+ <enum>
+ <value>OFF
+ <notes>Optical stabilization is unavailable.</notes>
+ </value>
+ <value optional="true">ON
+ <notes>Optical stabilization is enabled.</notes>
+ </value>
+ </enum>
+ <description>
+ Sets whether the camera device uses optical image stabilization (OIS)
+ when capturing images.
+ </description>
+ <range>android.lens.info.availableOpticalStabilization</range>
+ <details>
+ OIS is used to compensate for motion blur due to small
+ movements of the camera during capture. Unlike digital image
+ stabilization (android.control.videoStabilizationMode), OIS
+ makes use of mechanical elements to stabilize the camera
+ sensor, and thus allows for longer exposure times before
+ camera shake becomes apparent.
+
+ Switching between different optical stabilization modes may take several
+ frames to initialize, the camera device will report the current mode in
+ capture result metadata. For example, When "ON" mode is requested, the
+ optical stabilization modes in the first several capture results may still
+ be "OFF", and it will become "ON" when the initialization is done.
+
+ If a camera device supports both OIS and digital image stabilization
+ (android.control.videoStabilizationMode), turning both modes on may produce undesirable
+ interaction, so it is recommended not to enable both at the same time.
+
+ Not all devices will support OIS; see
+ android.lens.info.availableOpticalStabilization for
+ available controls.
+ </details>
+ <tag id="V1" />
+ </entry>
+ </controls>
+ <static>
+ <namespace name="info">
+ <entry name="availableApertures" type="float" visibility="public"
+ container="array" hwlevel="full">
+ <array>
+ <size>n</size>
+ </array>
+ <description>List of aperture size values for android.lens.aperture that are
+ supported by this camera device.</description>
+ <units>The aperture f-number</units>
+ <details>If the camera device doesn't support a variable lens aperture,
+ this list will contain only one value, which is the fixed aperture size.
+
+ If the camera device supports a variable aperture, the aperture values
+ in this list will be sorted in ascending order.</details>
+ <tag id="V1" />
+ </entry>
+ <entry name="availableFilterDensities" type="float" visibility="public"
+ container="array" hwlevel="full">
+ <array>
+ <size>n</size>
+ </array>
+ <description>
+ List of neutral density filter values for
+ android.lens.filterDensity that are supported by this camera device.
+ </description>
+ <units>Exposure value (EV)</units>
+ <range>
+ Values are &gt;= 0
+ </range>
+ <details>
+ If a neutral density filter is not supported by this camera device,
+ this list will contain only 0. Otherwise, this list will include every
+ filter density supported by the camera device, in ascending order.
+ </details>
+ <tag id="V1" />
+ </entry>
+ <entry name="availableFocalLengths" type="float" visibility="public"
+ type_notes="The list of available focal lengths"
+ container="array" hwlevel="legacy">
+ <array>
+ <size>n</size>
+ </array>
+ <description>
+ List of focal lengths for android.lens.focalLength that are supported by this camera
+ device.
+ </description>
+ <units>Millimeters</units>
+ <range>
+ Values are &gt; 0
+ </range>
+ <details>
+ If optical zoom is not supported, this list will only contain
+ a single value corresponding to the fixed focal length of the
+ device. Otherwise, this list will include every focal length supported
+ by the camera device, in ascending order.
+ </details>
+ <tag id="BC" />
+ <tag id="V1" />
+ </entry>
+ <entry name="availableOpticalStabilization" type="byte"
+ visibility="public" type_notes="list of enums" container="array"
+ typedef="enumList" hwlevel="limited">
+ <array>
+ <size>n</size>
+ </array>
+ <description>
+ List of optical image stabilization (OIS) modes for
+ android.lens.opticalStabilizationMode that are supported by this camera device.
+ </description>
+ <range>Any value listed in android.lens.opticalStabilizationMode</range>
+ <details>
+ If OIS is not supported by a given camera device, this list will
+ contain only OFF.
+ </details>
+ <tag id="V1" />
+ </entry>
+ <entry name="hyperfocalDistance" type="float" visibility="public" optional="true"
+ hwlevel="limited">
+ <description>Hyperfocal distance for this lens.</description>
+ <units>See android.lens.info.focusDistanceCalibration for details</units>
+ <range>If lens is fixed focus, &gt;= 0. If lens has focuser unit, the value is
+ within `(0.0f, android.lens.info.minimumFocusDistance]`</range>
+ <details>
+ If the lens is not fixed focus, the camera device will report this
+ field when android.lens.info.focusDistanceCalibration is APPROXIMATE or CALIBRATED.
+ </details>
+ </entry>
+ <entry name="minimumFocusDistance" type="float" visibility="public" optional="true"
+ hwlevel="limited">
+ <description>Shortest distance from frontmost surface
+ of the lens that can be brought into sharp focus.</description>
+ <units>See android.lens.info.focusDistanceCalibration for details</units>
+ <range>&gt;= 0</range>
+ <details>If the lens is fixed-focus, this will be
+ 0.</details>
+ <hal_details>Mandatory for FULL devices; LIMITED devices
+ must always set this value to 0 for fixed-focus; and may omit
+ the minimum focus distance otherwise.
+
+ This field is also mandatory for all devices advertising
+ the MANUAL_SENSOR capability.</hal_details>
+ <tag id="V1" />
+ </entry>
+ <entry name="shadingMapSize" type="int32" visibility="ndk_public"
+ type_notes="width and height (N, M) of lens shading map provided by the camera device."
+ container="array" typedef="size" hwlevel="full">
+ <array>
+ <size>2</size>
+ </array>
+ <description>Dimensions of lens shading map.</description>
+ <range>Both values &gt;= 1</range>
+ <details>
+ The map should be on the order of 30-40 rows and columns, and
+ must be smaller than 64x64.
+ </details>
+ <tag id="V1" />
+ </entry>
+ <entry name="focusDistanceCalibration" type="byte" visibility="public"
+ enum="true" hwlevel="limited">
+ <enum>
+ <value>UNCALIBRATED
+ <notes>
+ The lens focus distance is not accurate, and the units used for
+ android.lens.focusDistance do not correspond to any physical units.
+
+ Setting the lens to the same focus distance on separate occasions may
+ result in a different real focus distance, depending on factors such
+ as the orientation of the device, the age of the focusing mechanism,
+ and the device temperature. The focus distance value will still be
+ in the range of `[0, android.lens.info.minimumFocusDistance]`, where 0
+ represents the farthest focus.
+ </notes>
+ </value>
+ <value>APPROXIMATE
+ <notes>
+ The lens focus distance is measured in diopters.
+
+ However, setting the lens to the same focus distance
+ on separate occasions may result in a different real
+ focus distance, depending on factors such as the
+ orientation of the device, the age of the focusing
+ mechanism, and the device temperature.
+ </notes>
+ </value>
+ <value>CALIBRATED
+ <notes>
+ The lens focus distance is measured in diopters, and
+ is calibrated.
+
+ The lens mechanism is calibrated so that setting the
+ same focus distance is repeatable on multiple
+ occasions with good accuracy, and the focus distance
+ corresponds to the real physical distance to the plane
+ of best focus.
+ </notes>
+ </value>
+ </enum>
+ <description>The lens focus distance calibration quality.</description>
+ <details>
+ The lens focus distance calibration quality determines the reliability of
+ focus related metadata entries, i.e. android.lens.focusDistance,
+ android.lens.focusRange, android.lens.info.hyperfocalDistance, and
+ android.lens.info.minimumFocusDistance.
+
+ APPROXIMATE and CALIBRATED devices report the focus metadata in
+ units of diopters (1/meter), so `0.0f` represents focusing at infinity,
+ and increasing positive numbers represent focusing closer and closer
+ to the camera device. The focus distance control also uses diopters
+ on these devices.
+
+ UNCALIBRATED devices do not use units that are directly comparable
+ to any real physical measurement, but `0.0f` still represents farthest
+ focus, and android.lens.info.minimumFocusDistance represents the
+ nearest focus the device can achieve.
+ </details>
+ <hal_details>
+ For devices advertise APPROXIMATE quality or higher, diopters 0 (infinity
+ focus) must work. When autofocus is disabled (android.control.afMode == OFF)
+ and the lens focus distance is set to 0 diopters
+ (android.lens.focusDistance == 0), the lens will move to focus at infinity
+ and is stably focused at infinity even if the device tilts. It may take the
+ lens some time to move; during the move the lens state should be MOVING and
+ the output diopter value should be changing toward 0.
+ </hal_details>
+ <tag id="V1" />
+ </entry>
+ </namespace>
+ <entry name="facing" type="byte" visibility="public" enum="true" hwlevel="legacy">
+ <enum>
+ <value>FRONT
+ <notes>
+ The camera device faces the same direction as the device's screen.
+ </notes></value>
+ <value>BACK
+ <notes>
+ The camera device faces the opposite direction as the device's screen.
+ </notes></value>
+ <value>EXTERNAL
+ <notes>
+ The camera device is an external camera, and has no fixed facing relative to the
+ device's screen.
+ </notes></value>
+ </enum>
+ <description>Direction the camera faces relative to
+ device screen.</description>
+ </entry>
+ <entry name="poseRotation" type="float" visibility="public"
+ container="array">
+ <array>
+ <size>4</size>
+ </array>
+ <description>
+ The orientation of the camera relative to the sensor
+ coordinate system.
+ </description>
+ <units>
+ Quaternion coefficients
+ </units>
+ <details>
+ The four coefficients that describe the quaternion
+ rotation from the Android sensor coordinate system to a
+ camera-aligned coordinate system where the X-axis is
+ aligned with the long side of the image sensor, the Y-axis
+ is aligned with the short side of the image sensor, and
+ the Z-axis is aligned with the optical axis of the sensor.
+
+ To convert from the quaternion coefficients `(x,y,z,w)`
+ to the axis of rotation `(a_x, a_y, a_z)` and rotation
+ amount `theta`, the following formulas can be used:
+
+ theta = 2 * acos(w)
+ a_x = x / sin(theta/2)
+ a_y = y / sin(theta/2)
+ a_z = z / sin(theta/2)
+
+ To create a 3x3 rotation matrix that applies the rotation
+ defined by this quaternion, the following matrix can be
+ used:
+
+ R = [ 1 - 2y^2 - 2z^2, 2xy - 2zw, 2xz + 2yw,
+ 2xy + 2zw, 1 - 2x^2 - 2z^2, 2yz - 2xw,
+ 2xz - 2yw, 2yz + 2xw, 1 - 2x^2 - 2y^2 ]
+
+ This matrix can then be used to apply the rotation to a
+ column vector point with
+
+ `p' = Rp`
+
+ where `p` is in the device sensor coordinate system, and
+ `p'` is in the camera-oriented coordinate system.
+ </details>
+ <tag id="DEPTH" />
+ </entry>
+ <entry name="poseTranslation" type="float" visibility="public"
+ container="array">
+ <array>
+ <size>3</size>
+ </array>
+ <description>Position of the camera optical center.</description>
+ <units>Meters</units>
+ <details>
+ The position of the camera device's lens optical center,
+ as a three-dimensional vector `(x,y,z)`, relative to the
+ optical center of the largest camera device facing in the
+ same direction as this camera, in the
+ [Android sensor coordinate axes](https://developer.android.com/reference/android/hardware/SensorEvent.html).
+ Note that only the axis definitions are shared with
+ the sensor coordinate system, but not the origin.
+
+ If this device is the largest or only camera device with a
+ given facing, then this position will be `(0, 0, 0)`; a
+ camera device with a lens optical center located 3 cm from
+ the main sensor along the +X axis (to the right from the
+ user's perspective) will report `(0.03, 0, 0)`.
+
+ To transform a pixel coordinates between two cameras
+ facing the same direction, first the source camera
+ android.lens.radialDistortion must be corrected for. Then
+ the source camera android.lens.intrinsicCalibration needs
+ to be applied, followed by the android.lens.poseRotation
+ of the source camera, the translation of the source camera
+ relative to the destination camera, the
+ android.lens.poseRotation of the destination camera, and
+ finally the inverse of android.lens.intrinsicCalibration
+ of the destination camera. This obtains a
+ radial-distortion-free coordinate in the destination
+ camera pixel coordinates.
+
+ To compare this against a real image from the destination
+ camera, the destination camera image then needs to be
+ corrected for radial distortion before comparison or
+ sampling.
+ </details>
+ <tag id="DEPTH" />
+ </entry>
+ </static>
+ <dynamic>
+ <clone entry="android.lens.aperture" kind="controls">
+ <tag id="V1" />
+ </clone>
+ <clone entry="android.lens.filterDensity" kind="controls">
+ <tag id="V1" />
+ </clone>
+ <clone entry="android.lens.focalLength" kind="controls">
+ <tag id="BC" />
+ </clone>
+ <clone entry="android.lens.focusDistance" kind="controls">
+ <details>Should be zero for fixed-focus cameras</details>
+ <tag id="BC" />
+ </clone>
+ <entry name="focusRange" type="float" visibility="public"
+ type_notes="Range of scene distances that are in focus"
+ container="array" typedef="pairFloatFloat" hwlevel="limited">
+ <array>
+ <size>2</size>
+ </array>
+ <description>The range of scene distances that are in
+ sharp focus (depth of field).</description>
+ <units>A pair of focus distances in diopters: (near,
+ far); see android.lens.info.focusDistanceCalibration for details.</units>
+ <range>&gt;=0</range>
+ <details>If variable focus not supported, can still report
+ fixed depth of field range</details>
+ <tag id="BC" />
+ </entry>
+ <clone entry="android.lens.opticalStabilizationMode"
+ kind="controls">
+ <tag id="V1" />
+ </clone>
+ <entry name="state" type="byte" visibility="public" enum="true" hwlevel="limited">
+ <enum>
+ <value>STATIONARY
+ <notes>
+ The lens parameters (android.lens.focalLength, android.lens.focusDistance,
+ android.lens.filterDensity and android.lens.aperture) are not changing.
+ </notes>
+ </value>
+ <value>MOVING
+ <notes>
+ One or several of the lens parameters
+ (android.lens.focalLength, android.lens.focusDistance,
+ android.lens.filterDensity or android.lens.aperture) is
+ currently changing.
+ </notes>
+ </value>
+ </enum>
+ <description>Current lens status.</description>
+ <details>
+ For lens parameters android.lens.focalLength, android.lens.focusDistance,
+ android.lens.filterDensity and android.lens.aperture, when changes are requested,
+ they may take several frames to reach the requested values. This state indicates
+ the current status of the lens parameters.
+
+ When the state is STATIONARY, the lens parameters are not changing. This could be
+ either because the parameters are all fixed, or because the lens has had enough
+ time to reach the most recently-requested values.
+ If all these lens parameters are not changable for a camera device, as listed below:
+
+ * Fixed focus (`android.lens.info.minimumFocusDistance == 0`), which means
+ android.lens.focusDistance parameter will always be 0.
+ * Fixed focal length (android.lens.info.availableFocalLengths contains single value),
+ which means the optical zoom is not supported.
+ * No ND filter (android.lens.info.availableFilterDensities contains only 0).
+ * Fixed aperture (android.lens.info.availableApertures contains single value).
+
+ Then this state will always be STATIONARY.
+
+ When the state is MOVING, it indicates that at least one of the lens parameters
+ is changing.
+ </details>
+ <tag id="V1" />
+ </entry>
+ <clone entry="android.lens.poseRotation" kind="static">
+ </clone>
+ <clone entry="android.lens.poseTranslation" kind="static">
+ </clone>
+ </dynamic>
+ <static>
+ <entry name="intrinsicCalibration" type="float" visibility="public"
+ container="array">
+ <array>
+ <size>5</size>
+ </array>
+ <description>
+ The parameters for this camera device's intrinsic
+ calibration.
+ </description>
+ <units>
+ Pixels in the
+ android.sensor.info.preCorrectionActiveArraySize
+ coordinate system.
+ </units>
+ <details>
+ The five calibration parameters that describe the
+ transform from camera-centric 3D coordinates to sensor
+ pixel coordinates:
+
+ [f_x, f_y, c_x, c_y, s]
+
+ Where `f_x` and `f_y` are the horizontal and vertical
+ focal lengths, `[c_x, c_y]` is the position of the optical
+ axis, and `s` is a skew parameter for the sensor plane not
+ being aligned with the lens plane.
+
+ These are typically used within a transformation matrix K:
+
+ K = [ f_x, s, c_x,
+ 0, f_y, c_y,
+ 0 0, 1 ]
+
+ which can then be combined with the camera pose rotation
+ `R` and translation `t` (android.lens.poseRotation and
+ android.lens.poseTranslation, respective) to calculate the
+ complete transform from world coordinates to pixel
+ coordinates:
+
+ P = [ K 0 * [ R t
+ 0 1 ] 0 1 ]
+
+ and with `p_w` being a point in the world coordinate system
+ and `p_s` being a point in the camera active pixel array
+ coordinate system, and with the mapping including the
+ homogeneous division by z:
+
+ p_h = (x_h, y_h, z_h) = P p_w
+ p_s = p_h / z_h
+
+ so `[x_s, y_s]` is the pixel coordinates of the world
+ point, `z_s = 1`, and `w_s` is a measurement of disparity
+ (depth) in pixel coordinates.
+
+ Note that the coordinate system for this transform is the
+ android.sensor.info.preCorrectionActiveArraySize system,
+ where `(0,0)` is the top-left of the
+ preCorrectionActiveArraySize rectangle. Once the pose and
+ intrinsic calibration transforms have been applied to a
+ world point, then the android.lens.radialDistortion
+ transform needs to be applied, and the result adjusted to
+ be in the android.sensor.info.activeArraySize coordinate
+ system (where `(0, 0)` is the top-left of the
+ activeArraySize rectangle), to determine the final pixel
+ coordinate of the world point for processed (non-RAW)
+ output buffers.
+ </details>
+ <tag id="DEPTH" />
+ </entry>
+ <entry name="radialDistortion" type="float" visibility="public"
+ container="array">
+ <array>
+ <size>6</size>
+ </array>
+ <description>
+ The correction coefficients to correct for this camera device's
+ radial and tangential lens distortion.
+ </description>
+ <units>
+ Unitless coefficients.
+ </units>
+ <details>
+ Four radial distortion coefficients `[kappa_0, kappa_1, kappa_2,
+ kappa_3]` and two tangential distortion coefficients
+ `[kappa_4, kappa_5]` that can be used to correct the
+ lens's geometric distortion with the mapping equations:
+
+ x_c = x_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
+ kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 )
+ y_c = y_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
+ kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 )
+
+ Here, `[x_c, y_c]` are the coordinates to sample in the
+ input image that correspond to the pixel values in the
+ corrected image at the coordinate `[x_i, y_i]`:
+
+ correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage)
+
+ The pixel coordinates are defined in a normalized
+ coordinate system related to the
+ android.lens.intrinsicCalibration calibration fields.
+ Both `[x_i, y_i]` and `[x_c, y_c]` have `(0,0)` at the
+ lens optical center `[c_x, c_y]`. The maximum magnitudes
+ of both x and y coordinates are normalized to be 1 at the
+ edge further from the optical center, so the range
+ for both dimensions is `-1 <= x <= 1`.
+
+ Finally, `r` represents the radial distance from the
+ optical center, `r^2 = x_i^2 + y_i^2`, and its magnitude
+ is therefore no larger than `|r| <= sqrt(2)`.
+
+ The distortion model used is the Brown-Conrady model.
+ </details>
+ <tag id="DEPTH" />
+ </entry>
+ </static>
+ <dynamic>
+ <clone entry="android.lens.intrinsicCalibration" kind="static">
+ </clone>
+ <clone entry="android.lens.radialDistortion" kind="static">
+ </clone>
+ </dynamic>
+ </section>
+ <section name="noiseReduction">
+ <controls>
+ <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
+ <enum>
+ <value>OFF
+ <notes>No noise reduction is applied.</notes></value>
+ <value>FAST
+ <notes>Noise reduction is applied without reducing frame rate relative to sensor
+ output. It may be the same as OFF if noise reduction will reduce frame rate
+ relative to sensor.</notes></value>
+ <value>HIGH_QUALITY
+ <notes>High-quality noise reduction is applied, at the cost of possibly reduced frame
+ rate relative to sensor output.</notes></value>
+ <value optional="true">MINIMAL
+ <notes>MINIMAL noise reduction is applied without reducing frame rate relative to
+ sensor output. </notes></value>
+ <value optional="true">ZERO_SHUTTER_LAG
+
+ <notes>Noise reduction is applied at different levels for different output streams,
+ based on resolution. Streams at maximum recording resolution (see {@link
+ ACameraDevice_createCaptureSession}) or below have noise
+ reduction applied, while higher-resolution streams have MINIMAL (if supported) or no
+ noise reduction applied (if MINIMAL is not supported.) The degree of noise reduction
+ for low-resolution streams is tuned so that frame rate is not impacted, and the quality
+ is equal to or better than FAST (since it is only applied to lower-resolution outputs,
+ quality may improve from FAST).
+
+ This mode is intended to be used by applications operating in a zero-shutter-lag mode
+ with YUV or PRIVATE reprocessing, where the application continuously captures
+ high-resolution intermediate buffers into a circular buffer, from which a final image is
+ produced via reprocessing when a user takes a picture. For such a use case, the
+ high-resolution buffers must not have noise reduction applied to maximize efficiency of
+ preview and to avoid over-applying noise filtering when reprocessing, while
+ low-resolution buffers (used for recording or preview, generally) need noise reduction
+ applied for reasonable preview quality.
+
+ This mode is guaranteed to be supported by devices that support either the
+ YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities
+ (android.request.availableCapabilities lists either of those capabilities) and it will
+ be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.
+ </notes></value>
+ </enum>
+ <description>Mode of operation for the noise reduction algorithm.</description>
+ <range>android.noiseReduction.availableNoiseReductionModes</range>
+ <details>The noise reduction algorithm attempts to improve image quality by removing
+ excessive noise added by the capture process, especially in dark conditions.
+
+ OFF means no noise reduction will be applied by the camera device, for both raw and
+ YUV domain.
+
+ MINIMAL means that only sensor raw domain basic noise reduction is enabled ,to remove
+ demosaicing or other processing artifacts. For YUV_REPROCESSING, MINIMAL is same as OFF.
+ This mode is optional, may not be support by all devices. The application should check
+ android.noiseReduction.availableNoiseReductionModes before using it.
+
+ FAST/HIGH_QUALITY both mean camera device determined noise filtering
+ will be applied. HIGH_QUALITY mode indicates that the camera device
+ will use the highest-quality noise filtering algorithms,
+ even if it slows down capture rate. FAST means the camera device will not
+ slow down capture rate when applying noise filtering. FAST may be the same as MINIMAL if
+ MINIMAL is listed, or the same as OFF if any noise filtering will slow down capture rate.
+ Every output stream will have a similar amount of enhancement applied.
+
+ ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
+ buffer of high-resolution images during preview and reprocess image(s) from that buffer
+ into a final capture when triggered by the user. In this mode, the camera device applies
+ noise reduction to low-resolution streams (below maximum recording resolution) to maximize
+ preview quality, but does not apply noise reduction to high-resolution streams, since
+ those will be reprocessed later if necessary.
+
+ For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera device
+ will apply FAST/HIGH_QUALITY YUV domain noise reduction, respectively. The camera device
+ may adjust the noise reduction parameters for best image quality based on the
+ android.reprocess.effectiveExposureFactor if it is set.
+ </details>
+ <hal_details>
+ For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to
+ adjust the internal noise reduction parameters appropriately to get the best quality
+ images.
+ </hal_details>
+ <tag id="V1" />
+ <tag id="REPROC" />
+ </entry>
+ <entry name="strength" type="byte">
+ <description>Control the amount of noise reduction
+ applied to the images</description>
+ <units>1-10; 10 is max noise reduction</units>
+ <range>1 - 10</range>
+ <tag id="FUTURE" />
+ </entry>
+ </controls>
+ <static>
+ <entry name="availableNoiseReductionModes" type="byte" visibility="public"
+ type_notes="list of enums" container="array" typedef="enumList" hwlevel="limited">
+ <array>
+ <size>n</size>
+ </array>
+ <description>
+ List of noise reduction modes for android.noiseReduction.mode that are supported
+ by this camera device.
+ </description>
+ <range>Any value listed in android.noiseReduction.mode</range>
+ <details>
+ Full-capability camera devices will always support OFF and FAST.
+
+ Camera devices that support YUV_REPROCESSING or PRIVATE_REPROCESSING will support
+ ZERO_SHUTTER_LAG.
+
+ Legacy-capability camera devices will only support FAST mode.
+ </details>
+ <hal_details>
+ HAL must support both FAST and HIGH_QUALITY if noise reduction control is available
+ on the camera device, but the underlying implementation can be the same for both modes.
+ That is, if the highest quality implementation on the camera device does not slow down
+ capture rate, then FAST and HIGH_QUALITY will generate the same output.
+ </hal_details>
+ <tag id="V1" />
+ <tag id="REPROC" />
+ </entry>
+ </static>
+ <dynamic>
+ <clone entry="android.noiseReduction.mode" kind="controls">
+ <tag id="V1" />
+ <tag id="REPROC" />
+ </clone>
+ </dynamic>
+ </section>
+ <section name="quirks">
+ <static>
+ <entry name="meteringCropRegion" type="byte" visibility="system" deprecated="true" optional="true">
+ <description>If set to 1, the camera service does not
+ scale 'normalized' coordinates with respect to the crop
+ region. This applies to metering input (a{e,f,wb}Region
+ and output (face rectangles).</description>
+ <details>Normalized coordinates refer to those in the
+ (-1000,1000) range mentioned in the
+ android.hardware.Camera API.
+
+ HAL implementations should instead always use and emit
+ sensor array-relative coordinates for all region data. Does
+ not need to be listed in static metadata. Support will be
+ removed in future versions of camera service.</details>
+ </entry>
+ <entry name="triggerAfWithAuto" type="byte" visibility="system" deprecated="true" optional="true">
+ <description>If set to 1, then the camera service always
+ switches to FOCUS_MODE_AUTO before issuing a AF
+ trigger.</description>
+ <details>HAL implementations should implement AF trigger
+ modes for AUTO, MACRO, CONTINUOUS_FOCUS, and
+ CONTINUOUS_PICTURE modes instead of using this flag. Does
+ not need to be listed in static metadata. Support will be
+ removed in future versions of camera service</details>
+ </entry>
+ <entry name="useZslFormat" type="byte" visibility="system" deprecated="true" optional="true">
+ <description>If set to 1, the camera service uses
+ CAMERA2_PIXEL_FORMAT_ZSL instead of
+ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED for the zero
+ shutter lag stream</description>
+ <details>HAL implementations should use gralloc usage flags
+ to determine that a stream will be used for
+ zero-shutter-lag, instead of relying on an explicit
+ format setting. Does not need to be listed in static
+ metadata. Support will be removed in future versions of
+ camera service.</details>
+ </entry>
+ <entry name="usePartialResult" type="byte" visibility="hidden" deprecated="true" optional="true">
+ <description>
+ If set to 1, the HAL will always split result
+ metadata for a single capture into multiple buffers,
+ returned using multiple process_capture_result calls.
+ </description>
+ <details>
+ Does not need to be listed in static
+ metadata. Support for partial results will be reworked in
+ future versions of camera service. This quirk will stop
+ working at that point; DO NOT USE without careful
+ consideration of future support.
+ </details>
+ <hal_details>
+ Refer to `camera3_capture_result::partial_result`
+ for information on how to implement partial results.
+ </hal_details>
+ </entry>
+ </static>
+ <dynamic>
+ <entry name="partialResult" type="byte" visibility="hidden" deprecated="true" optional="true" enum="true" typedef="boolean">
+ <enum>
+ <value>FINAL
+ <notes>The last or only metadata result buffer
+ for this capture.</notes>
+ </value>
+ <value>PARTIAL
+ <notes>A partial buffer of result metadata for this
+ capture. More result buffers for this capture will be sent
+ by the camera device, the last of which will be marked
+ FINAL.</notes>
+ </value>
+ </enum>
+ <description>
+ Whether a result given to the framework is the
+ final one for the capture, or only a partial that contains a
+ subset of the full set of dynamic metadata
+ values.</description>
+ <range>Optional. Default value is FINAL.</range>
+ <details>
+ The entries in the result metadata buffers for a
+ single capture may not overlap, except for this entry. The
+ FINAL buffers must retain FIFO ordering relative to the
+ requests that generate them, so the FINAL buffer for frame 3 must
+ always be sent to the framework after the FINAL buffer for frame 2, and
+ before the FINAL buffer for frame 4. PARTIAL buffers may be returned
+ in any order relative to other frames, but all PARTIAL buffers for a given
+ capture must arrive before the FINAL buffer for that capture. This entry may
+ only be used by the camera device if quirks.usePartialResult is set to 1.
+ </details>
+ <hal_details>
+ Refer to `camera3_capture_result::partial_result`
+ for information on how to implement partial results.
+ </hal_details>
+ </entry>
+ </dynamic>
+ </section>
+ <section name="request">
+ <controls>
+ <entry name="frameCount" type="int32" visibility="system" deprecated="true">
+ <description>A frame counter set by the framework. Must
+ be maintained unchanged in output frame. This value monotonically
+ increases with every new result (that is, each new result has a unique
+ frameCount value).
+ </description>
+ <units>incrementing integer</units>
+ <range>Any int.</range>
+ </entry>
+ <entry name="id" type="int32" visibility="hidden">
+ <description>An application-specified ID for the current
+ request. Must be maintained unchanged in output
+ frame</description>
+ <units>arbitrary integer assigned by application</units>
+ <range>Any int</range>
+ <tag id="V1" />
+ </entry>
+ <entry name="inputStreams" type="int32" visibility="system" deprecated="true"
+ container="array">
+ <array>
+ <size>n</size>
+ </array>
+ <description>List which camera reprocess stream is used
+ for the source of reprocessing data.</description>
+ <units>List of camera reprocess stream IDs</units>
+ <range>
+ Typically, only one entry allowed, must be a valid reprocess stream ID.
+ </range>
+ <details>Only meaningful when android.request.type ==
+ REPROCESS. Ignored otherwise</details>
+ <tag id="HAL2" />
+ </entry>
+ <entry name="metadataMode" type="byte" visibility="system"
+ enum="true">
+ <enum>
+ <value>NONE
+ <notes>No metadata should be produced on output, except
+ for application-bound buffer data. If no
+ application-bound streams exist, no frame should be
+ placed in the output frame queue. If such streams
+ exist, a frame should be placed on the output queue
+ with null metadata but with the necessary output buffer
+ information. Timestamp information should still be
+ included with any output stream buffers</notes></value>
+ <value>FULL
+ <notes>All metadata should be produced. Statistics will
+ only be produced if they are separately
+ enabled</notes></value>
+ </enum>
+ <description>How much metadata to produce on
+ output</description>
+ <tag id="FUTURE" />
+ </entry>
+ <entry name="outputStreams" type="int32" visibility="system" deprecated="true"
+ container="array">
+ <array>
+ <size>n</size>
+ </array>
+ <description>Lists which camera output streams image data
+ from this capture must be sent to</description>
+ <units>List of camera stream IDs</units>
+ <range>List must only include streams that have been
+ created</range>
+ <details>If no output streams are listed, then the image
+ data should simply be discarded. The image data must
+ still be captured for metadata and statistics production,
+ and the lens and flash must operate as requested.</details>
+ <tag id="HAL2" />
+ </entry>
+ <entry name="type" type="byte" visibility="system" deprecated="true" enum="true">
+ <enum>
+ <value>CAPTURE
+ <notes>Capture a new image from the imaging hardware,
+ and process it according to the
+ settings</notes></value>
+ <value>REPROCESS
+ <notes>Process previously captured data; the
+ android.request.inputStreams parameter determines the
+ source reprocessing stream. TODO: Mark dynamic metadata
+ needed for reprocessing with [RP]</notes></value>
+ </enum>
+ <description>The type of the request; either CAPTURE or
+ REPROCESS. For HAL3, this tag is redundant.
+ </description>
+ <tag id="HAL2" />
+ </entry>
+ </controls>
+ <static>
+ <entry name="maxNumOutputStreams" type="int32" visibility="ndk_public"
+ container="array" hwlevel="legacy">
+ <array>
+ <size>3</size>
+ </array>
+ <description>The maximum numbers of different types of output streams
+ that can be configured and used simultaneously by a camera device.
+ </description>
+ <range>
+ For processed (and stalling) format streams, &gt;= 1.
+
+ For Raw format (either stalling or non-stalling) streams, &gt;= 0.
+
+ For processed (but not stalling) format streams, &gt;= 3
+ for FULL mode devices (`android.info.supportedHardwareLevel == FULL`);
+ &gt;= 2 for LIMITED mode devices (`android.info.supportedHardwareLevel == LIMITED`).
+ </range>
+ <details>
+ This is a 3 element tuple that contains the max number of output simultaneous
+ streams for raw sensor, processed (but not stalling), and processed (and stalling)
+ formats respectively. For example, assuming that JPEG is typically a processed and
+ stalling stream, if max raw sensor format output stream number is 1, max YUV streams
+ number is 3, and max JPEG stream number is 2, then this tuple should be `(1, 3, 2)`.
+
+ This lists the upper bound of the number of output streams supported by
+ the camera device. Using more streams simultaneously may require more hardware and
+ CPU resources that will consume more power. The image format for an output stream can
+ be any supported format provided by android.scaler.availableStreamConfigurations.
+ The formats defined in android.scaler.availableStreamConfigurations can be catergorized
+ into the 3 stream types as below:
+
+ * Processed (but stalling): any non-RAW format with a stallDurations &gt; 0.
+ Typically {@link AIMAGE_FORMAT_JPEG} format.
+ * Raw formats: {@link AIMAGE_FORMAT_RAW16}, {@link AIMAGE_FORMAT_RAW10}, or
+ {@link AIMAGE_FORMAT_RAW12}.
+ * Processed (but not-stalling): any non-RAW format without a stall duration.
+ Typically {@link AIMAGE_FORMAT_YUV_420_888}.
+ </details>
+ <tag id="BC" />
+ </entry>
+ <entry name="maxNumOutputRaw" type="int32" visibility="java_public" synthetic="true"
+ hwlevel="legacy">
+ <description>The maximum numbers of different types of output streams
+ that can be configured and used simultaneously by a camera device
+ for any `RAW` formats.
+ </description>
+ <range>
+ &gt;= 0
+ </range>
+ <details>
+ This value contains the max number of output simultaneous
+ streams from the raw sensor.
+
+ This lists the upper bound of the number of output streams supported by
+ the camera device. Using more streams simultaneously may require more hardware and
+ CPU resources that will consume more power. The image format for this kind of an output stream can
+ be any `RAW` and supported format provided by android.scaler.streamConfigurationMap.
+
+ In particular, a `RAW` format is typically one of:
+
+ * {@link AIMAGE_FORMAT_RAW16}
+ * {@link AIMAGE_FORMAT_RAW10}
+ * {@link AIMAGE_FORMAT_RAW12}
+
+ LEGACY mode devices (android.info.supportedHardwareLevel `==` LEGACY)
+ never support raw streams.
+ </details>
+ </entry>
+ <entry name="maxNumOutputProc" type="int32" visibility="java_public" synthetic="true"
+ hwlevel="legacy">
+ <description>The maximum numbers of different types of output streams
+ that can be configured and used simultaneously by a camera device
+ for any processed (but not-stalling) formats.
+ </description>
+ <range>
+ &gt;= 3
+ for FULL mode devices (`android.info.supportedHardwareLevel == FULL`);
+ &gt;= 2 for LIMITED mode devices (`android.info.supportedHardwareLevel == LIMITED`).
+ </range>
+ <details>
+ This value contains the max number of output simultaneous
+ streams for any processed (but not-stalling) formats.
+
+ This lists the upper bound of the number of output streams supported by
+ the camera device. Using more streams simultaneously may require more hardware and
+ CPU resources that will consume more power. The image format for this kind of an output stream can
+ be any non-`RAW` and supported format provided by android.scaler.streamConfigurationMap.
+
+ Processed (but not-stalling) is defined as any non-RAW format without a stall duration.
+ Typically:
+
+ * {@link AIMAGE_FORMAT_YUV_420_888}
+ * Implementation-defined formats, i.e. {@link
+ android.hardware.camera2.params.StreamConfigurationMap#isOutputSupportedFor(Class)}
+
+ For full guarantees, query {@link
+ android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} with a
+ processed format -- it will return 0 for a non-stalling stream.
+
+ LEGACY devices will support at least 2 processing/non-stalling streams.
+ </details>
+ </entry>
+ <entry name="maxNumOutputProcStalling" type="int32" visibility="java_public" synthetic="true"
+ hwlevel="legacy">
+ <description>The maximum numbers of different types of output streams
+ that can be configured and used simultaneously by a camera device
+ for any processed (and stalling) formats.
+ </description>
+ <range>
+ &gt;= 1
+ </range>
+ <details>
+ This value contains the max number of output simultaneous
+ streams for any processed (but not-stalling) formats.
+
+ This lists the upper bound of the number of output streams supported by
+ the camera device. Using more streams simultaneously may require more hardware and
+ CPU resources that will consume more power. The image format for this kind of an output stream can
+ be any non-`RAW` and supported format provided by android.scaler.streamConfigurationMap.
+
+ A processed and stalling format is defined as any non-RAW format with a stallDurations
+ &gt; 0. Typically only the {@link AIMAGE_FORMAT_JPEG} format is a
+ stalling format.
+
+ For full guarantees, query {@link
+ android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} with a
+ processed format -- it will return a non-0 value for a stalling stream.
+
+ LEGACY devices will support up to 1 processing/stalling stream.
+ </details>
+ </entry>
+ <entry name="maxNumReprocessStreams" type="int32" visibility="system"
+ deprecated="true" container="array">
+ <array>
+ <size>1</size>
+ </array>
+ <description>How many reprocessing streams of any type
+ can be allocated at the same time.</description>
+ <range>&gt;= 0</range>
+ <details>
+ Only used by HAL2.x.
+
+ When set to 0, it means no reprocess stream is supported.
+ </details>
+ <tag id="HAL2" />
+ </entry>
+ <entry name="maxNumInputStreams" type="int32" visibility="java_public" hwlevel="full">
+ <description>
+ The maximum numbers of any type of input streams
+ that can be configured and used simultaneously by a camera device.
+ </description>
+ <range>
+ 0 or 1.
+ </range>
+ <details>When set to 0, it means no input stream is supported.
+
+ The image format for a input stream can be any supported format returned by {@link
+ android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}. When using an
+ input stream, there must be at least one output stream configured to to receive the
+ reprocessed images.
+
+ When an input stream and some output streams are used in a reprocessing request,
+ only the input buffer will be used to produce these output stream buffers, and a
+ new sensor image will not be captured.
+
+ For example, for Zero Shutter Lag (ZSL) still capture use case, the input
+ stream image format will be PRIVATE, the associated output stream image format
+ should be JPEG.
+ </details>
+ <hal_details>
+ For the reprocessing flow and controls, see
+ hardware/libhardware/include/hardware/camera3.h Section 10 for more details.
+ </hal_details>
+ <tag id="REPROC" />
+ </entry>
+ </static>
+ <dynamic>
+ <entry name="frameCount" type="int32" visibility="hidden" deprecated="true">
+ <description>A frame counter set by the framework. This value monotonically
+ increases with every new result (that is, each new result has a unique
+ frameCount value).</description>
+ <units>count of frames</units>
+ <range>&gt; 0</range>
+ <details>Reset on release()</details>
+ </entry>
+ <clone entry="android.request.id" kind="controls"></clone>
+ <clone entry="android.request.metadataMode"
+ kind="controls"></clone>
+ <clone entry="android.request.outputStreams"
+ kind="controls"></clone>
+ <entry name="pipelineDepth" type="byte" visibility="public" hwlevel="legacy">
+ <description>Specifies the number of pipeline stages the frame went
+ through from when it was exposed to when the final completed result
+ was available to the framework.</description>
+ <range>&lt;= android.request.pipelineMaxDepth</range>
+ <details>Depending on what settings are used in the request, and
+ what streams are configured, the data may undergo less processing,
+ and some pipeline stages skipped.
+
+ See android.request.pipelineMaxDepth for more details.
+ </details>
+ <hal_details>
+ This value must always represent the accurate count of how many
+ pipeline stages were actually used.
+ </hal_details>
+ </entry>
+ </dynamic>
+ <static>
+ <entry name="pipelineMaxDepth" type="byte" visibility="public" hwlevel="legacy">
+ <description>Specifies the number of maximum pipeline stages a frame
+ has to go through from when it's exposed to when it's available
+ to the framework.</description>
+ <details>A typical minimum value for this is 2 (one stage to expose,
+ one stage to readout) from the sensor. The ISP then usually adds
+ its own stages to do custom HW processing. Further stages may be
+ added by SW processing.
+
+ Depending on what settings are used (e.g. YUV, JPEG) and what
+ processing is enabled (e.g. face detection), the actual pipeline
+ depth (specified by android.request.pipelineDepth) may be less than
+ the max pipeline depth.
+
+ A pipeline depth of X stages is equivalent to a pipeline latency of
+ X frame intervals.
+
+ This value will normally be 8 or less, however, for high speed capture session,
+ the max pipeline depth will be up to 8 x size of high speed capture request list.
+ </details>
+ <hal_details>
+ This value should be 4 or less, expect for the high speed recording session, where the
+ max batch sizes may be larger than 1.
+ </hal_details>
+ </entry>
+ <entry name="partialResultCount" type="int32" visibility="public" optional="true">
+ <description>Defines how many sub-components
+ a result will be composed of.
+ </description>
+ <range>&gt;= 1</range>
+ <details>In order to combat the pipeline latency, partial results
+ may be delivered to the application layer from the camera device as
+ soon as they are available.
+
+ Optional; defaults to 1. A value of 1 means that partial
+ results are not supported, and only the final TotalCaptureResult will
+ be produced by the camera device.
+
+ A typical use case for this might be: after requesting an
+ auto-focus (AF) lock the new AF state might be available 50%
+ of the way through the pipeline. The camera device could
+ then immediately dispatch this state via a partial result to
+ the application, and the rest of the metadata via later
+ partial results.
+ </details>
+ </entry>
+ <entry name="availableCapabilities" type="byte" visibility="public"
+ enum="true" container="array" hwlevel="legacy">
+ <array>
+ <size>n</size>
+ </array>
+ <enum>
+ <value>BACKWARD_COMPATIBLE
+ <notes>The minimal set of capabilities that every camera
+ device (regardless of android.info.supportedHardwareLevel)
+ supports.
+
+ This capability is listed by all normal devices, and
+ indicates that the camera device has a feature set
+ that's comparable to the baseline requirements for the
+ older android.hardware.Camera API.
+
+ Devices with the DEPTH_OUTPUT capability might not list this
+ capability, indicating that they support only depth measurement,
+ not standard color output.
+ </notes>
+ </value>
+ <value optional="true">MANUAL_SENSOR
+ <notes>
+ The camera device can be manually controlled (3A algorithms such
+ as auto-exposure, and auto-focus can be bypassed).
+ The camera device supports basic manual control of the sensor image
+ acquisition related stages. This means the following controls are
+ guaranteed to be supported:
+
+ * Manual frame duration control
+ * android.sensor.frameDuration
+ * android.sensor.info.maxFrameDuration
+ * Manual exposure control
+ * android.sensor.exposureTime
+ * android.sensor.info.exposureTimeRange
+ * Manual sensitivity control
+ * android.sensor.sensitivity
+ * android.sensor.info.sensitivityRange
+ * Manual lens control (if the lens is adjustable)
+ * android.lens.*
+ * Manual flash control (if a flash unit is present)
+ * android.flash.*
+ * Manual black level locking
+ * android.blackLevel.lock
+ * Auto exposure lock
+ * android.control.aeLock
+
+ If any of the above 3A algorithms are enabled, then the camera
+ device will accurately report the values applied by 3A in the
+ result.
+
+ A given camera device may also support additional manual sensor controls,
+ but this capability only covers the above list of controls.
+
+ If this is supported, android.scaler.streamConfigurationMap will
+ additionally return a min frame duration that is greater than
+ zero for each supported size-format combination.
+ </notes>
+ </value>
+ <value optional="true">MANUAL_POST_PROCESSING
+ <notes>
+ The camera device post-processing stages can be manually controlled.
+ The camera device supports basic manual control of the image post-processing
+ stages. This means the following controls are guaranteed to be supported:
+
+ * Manual tonemap control
+ * android.tonemap.curve
+ * android.tonemap.mode
+ * android.tonemap.maxCurvePoints
+ * android.tonemap.gamma
+ * android.tonemap.presetCurve
+
+ * Manual white balance control
+ * android.colorCorrection.transform
+ * android.colorCorrection.gains
+ * Manual lens shading map control
+ * android.shading.mode
+ * android.statistics.lensShadingMapMode
+ * android.statistics.lensShadingMap
+ * android.lens.info.shadingMapSize
+ * Manual aberration correction control (if aberration correction is supported)
+ * android.colorCorrection.aberrationMode
+ * android.colorCorrection.availableAberrationModes
+ * Auto white balance lock
+ * android.control.awbLock
+
+ If auto white balance is enabled, then the camera device
+ will accurately report the values applied by AWB in the result.
+
+ A given camera device may also support additional post-processing
+ controls, but this capability only covers the above list of controls.
+ </notes>
+ </value>
+ <value optional="true">RAW
+ <notes>
+ The camera device supports outputting RAW buffers and
+ metadata for interpreting them.
+
+ Devices supporting the RAW capability allow both for
+ saving DNG files, and for direct application processing of
+ raw sensor images.
+
+ * RAW_SENSOR is supported as an output format.
+ * The maximum available resolution for RAW_SENSOR streams
+ will match either the value in
+ android.sensor.info.pixelArraySize or
+ android.sensor.info.preCorrectionActiveArraySize.
+ * All DNG-related optional metadata entries are provided
+ by the camera device.
+ </notes>
+ </value>
+ <value optional="true" ndk_hidden="true">PRIVATE_REPROCESSING
+ <notes>
+ The camera device supports the Zero Shutter Lag reprocessing use case.
+
+ * One input stream is supported, that is, `android.request.maxNumInputStreams == 1`.
+ * {@link android.graphics.ImageFormat#PRIVATE} is supported as an output/input format,
+ that is, {@link android.graphics.ImageFormat#PRIVATE} is included in the lists of
+ formats returned by {@link
+ android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and {@link
+ android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}.
+ * {@link android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput}
+ returns non empty int[] for each supported input format returned by {@link
+ android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}.
+ * Each size returned by {@link
+ android.hardware.camera2.params.StreamConfigurationMap#getInputSizes
+ getInputSizes(ImageFormat.PRIVATE)} is also included in {@link
+ android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes
+ getOutputSizes(ImageFormat.PRIVATE)}
+ * Using {@link android.graphics.ImageFormat#PRIVATE} does not cause a frame rate drop
+ relative to the sensor's maximum capture rate (at that resolution).
+ * {@link android.graphics.ImageFormat#PRIVATE} will be reprocessable into both
+ {@link android.graphics.ImageFormat#YUV_420_888} and
+ {@link android.graphics.ImageFormat#JPEG} formats.
+ * The maximum available resolution for PRIVATE streams
+ (both input/output) will match the maximum available
+ resolution of JPEG streams.
+ * Static metadata android.reprocess.maxCaptureStall.
+ * Only below controls are effective for reprocessing requests and
+ will be present in capture results, other controls in reprocess
+ requests will be ignored by the camera device.
+ * android.jpeg.*
+ * android.noiseReduction.mode
+ * android.edge.mode
+ * android.noiseReduction.availableNoiseReductionModes and
+ android.edge.availableEdgeModes will both list ZERO_SHUTTER_LAG as a supported mode.
+ </notes>
+ </value>
+ <value optional="true">READ_SENSOR_SETTINGS
+ <notes>
+ The camera device supports accurately reporting the sensor settings for many of
+ the sensor controls while the built-in 3A algorithm is running. This allows
+ reporting of sensor settings even when these settings cannot be manually changed.
+
+ The values reported for the following controls are guaranteed to be available
+ in the CaptureResult, including when 3A is enabled:
+
+ * Exposure control
+ * android.sensor.exposureTime
+ * Sensitivity control
+ * android.sensor.sensitivity
+ * Lens controls (if the lens is adjustable)
+ * android.lens.focusDistance
+ * android.lens.aperture
+
+ This capability is a subset of the MANUAL_SENSOR control capability, and will
+ always be included if the MANUAL_SENSOR capability is available.
+ </notes>
+ </value>
+ <value optional="true">BURST_CAPTURE
+ <notes>
+ The camera device supports capturing high-resolution images at >= 20 frames per
+ second, in at least the uncompressed YUV format, when post-processing settings are set
+ to FAST. Additionally, maximum-resolution images can be captured at >= 10 frames
+ per second. Here, 'high resolution' means at least 8 megapixels, or the maximum
+ resolution of the device, whichever is smaller.
+
+ More specifically, this means that at least one output {@link
+ AIMAGE_FORMAT_YUV_420_888} size listed in
+ {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS} is larger or equal to the
+ 'high resolution' defined above, and can be captured at at least 20 fps.
+ For the largest {@link AIMAGE_FORMAT_YUV_420_888} size listed in
+ {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}, camera device can capture this
+ size for at least 10 frames per second.
+ Also the android.control.aeAvailableTargetFpsRanges entry lists at least one FPS range
+ where the minimum FPS is >= 1 / minimumFrameDuration for the largest YUV_420_888 size.
+
+ If the device supports the {@link AIMAGE_FORMAT_RAW10}, {@link
+ AIMAGE_FORMAT_RAW12}, then those can also be captured at the same rate
+ as the maximum-size YUV_420_888 resolution is.
+
+ In addition, the android.sync.maxLatency field is guaranted to have a value between 0
+ and 4, inclusive. android.control.aeLockAvailable and android.control.awbLockAvailable
+ are also guaranteed to be `true` so burst capture with these two locks ON yields
+ consistent image output.
+ </notes>
+ </value>
+ <value optional="true" ndk_hidden="true">YUV_REPROCESSING
+ <notes>
+ The camera device supports the YUV_420_888 reprocessing use case, similar as
+ PRIVATE_REPROCESSING, This capability requires the camera device to support the
+ following:
+
+ * One input stream is supported, that is, `android.request.maxNumInputStreams == 1`.
+ * {@link android.graphics.ImageFormat#YUV_420_888} is supported as an output/input format, that is,
+ YUV_420_888 is included in the lists of formats returned by
+ {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and
+ {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}.
+ * {@link
+ android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput}
+ returns non-empty int[] for each supported input format returned by {@link
+ android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}.
+ * Each size returned by {@link
+ android.hardware.camera2.params.StreamConfigurationMap#getInputSizes
+ getInputSizes(YUV_420_888)} is also included in {@link
+ android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes
+ getOutputSizes(YUV_420_888)}
+ * Using {@link android.graphics.ImageFormat#YUV_420_888} does not cause a frame rate drop
+ relative to the sensor's maximum capture rate (at that resolution).
+ * {@link android.graphics.ImageFormat#YUV_420_888} will be reprocessable into both
+ {@link android.graphics.ImageFormat#YUV_420_888} and {@link
+ android.graphics.ImageFormat#JPEG} formats.
+ * The maximum available resolution for {@link
+ android.graphics.ImageFormat#YUV_420_888} streams (both input/output) will match the
+ maximum available resolution of {@link android.graphics.ImageFormat#JPEG} streams.
+ * Static metadata android.reprocess.maxCaptureStall.
+ * Only the below controls are effective for reprocessing requests and will be present
+ in capture results. The reprocess requests are from the original capture results that
+ are associated with the intermediate {@link android.graphics.ImageFormat#YUV_420_888}
+ output buffers. All other controls in the reprocess requests will be ignored by the
+ camera device.
+ * android.jpeg.*
+ * android.noiseReduction.mode
+ * android.edge.mode
+ * android.reprocess.effectiveExposureFactor
+ * android.noiseReduction.availableNoiseReductionModes and
+ android.edge.availableEdgeModes will both list ZERO_SHUTTER_LAG as a supported mode.
+ </notes>
+ </value>
+ <value optional="true">DEPTH_OUTPUT
+ <notes>
+ The camera device can produce depth measurements from its field of view.
+
+ This capability requires the camera device to support the following:
+
+ * {@link AIMAGE_FORMAT_DEPTH16} is supported as an output format.
+ * {@link AIMAGE_FORMAT_DEPTH_POINT_CLOUD} is optionally supported as an
+ output format.
+ * This camera device, and all camera devices with the same android.lens.facing,
+ will list the following calibration entries in {@link ACameraMetadata} from both
+ {@link ACameraManager_getCameraCharacteristics} and
+ {@link ACameraCaptureSession_captureCallback_result}:
+ - android.lens.poseTranslation
+ - android.lens.poseRotation
+ - android.lens.intrinsicCalibration
+ - android.lens.radialDistortion
+ * The android.depth.depthIsExclusive entry is listed by this device.
+ * A LIMITED camera with only the DEPTH_OUTPUT capability does not have to support
+ normal YUV_420_888, JPEG, and PRIV-format outputs. It only has to support the DEPTH16
+ format.
+
+ Generally, depth output operates at a slower frame rate than standard color capture,
+ so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that
+ should be accounted for (see
+ {@link ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS}).
+ On a device that supports both depth and color-based output, to enable smooth preview,
+ using a repeating burst is recommended, where a depth-output target is only included
+ once every N frames, where N is the ratio between preview output rate and depth output
+ rate, including depth stall time.
+ </notes>
+ </value>
+ <value optional="true" ndk_hidden="true">CONSTRAINED_HIGH_SPEED_VIDEO
+ <notes>
+ The device supports constrained high speed video recording (frame rate >=120fps)
+ use case. The camera device will support high speed capture session created by
+ {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}, which
+ only accepts high speed request lists created by
+ {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
+
+ A camera device can still support high speed video streaming by advertising the high speed
+ FPS ranges in android.control.aeAvailableTargetFpsRanges. For this case, all normal
+ capture request per frame control and synchronization requirements will apply to
+ the high speed fps ranges, the same as all other fps ranges. This capability describes
+ the capability of a specialized operating mode with many limitations (see below), which
+ is only targeted at high speed video recording.
+
+ The supported high speed video sizes and fps ranges are specified in
+ {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}.
+ To get desired output frame rates, the application is only allowed to select video size
+ and FPS range combinations provided by
+ {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}.
+ The fps range can be controlled via android.control.aeTargetFpsRange.
+
+ In this capability, the camera device will override aeMode, awbMode, and afMode to
+ ON, AUTO, and CONTINUOUS_VIDEO, respectively. All post-processing block mode
+ controls will be overridden to be FAST. Therefore, no manual control of capture
+ and post-processing parameters is possible. All other controls operate the
+ same as when android.control.mode == AUTO. This means that all other
+ android.control.* fields continue to work, such as
+
+ * android.control.aeTargetFpsRange
+ * android.control.aeExposureCompensation
+ * android.control.aeLock
+ * android.control.awbLock
+ * android.control.effectMode
+ * android.control.aeRegions
+ * android.control.afRegions
+ * android.control.awbRegions
+ * android.control.afTrigger
+ * android.control.aePrecaptureTrigger
+
+ Outside of android.control.*, the following controls will work:
+
+ * android.flash.mode (TORCH mode only, automatic flash for still capture will not
+ work since aeMode is ON)
+ * android.lens.opticalStabilizationMode (if it is supported)
+ * android.scaler.cropRegion
+ * android.statistics.faceDetectMode (if it is supported)
+
+ For high speed recording use case, the actual maximum supported frame rate may
+ be lower than what camera can output, depending on the destination Surfaces for
+ the image data. For example, if the destination surface is from video encoder,
+ the application need check if the video encoder is capable of supporting the
+ high frame rate for a given video size, or it will end up with lower recording
+ frame rate. If the destination surface is from preview window, the actual preview frame
+ rate will be bounded by the screen refresh rate.
+
+ The camera device will only support up to 2 high speed simultaneous output surfaces
+ (preview and recording surfaces)
+ in this mode. Above controls will be effective only if all of below conditions are true:
+
+ * The application creates a camera capture session with no more than 2 surfaces via
+ {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}. The
+ targeted surfaces must be preview surface (either from
+ {@link android.view.SurfaceView} or {@link android.graphics.SurfaceTexture}) or
+ recording surface(either from {@link android.media.MediaRecorder#getSurface} or
+ {@link android.media.MediaCodec#createInputSurface}).
+ * The stream sizes are selected from the sizes reported by
+ {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}.
+ * The FPS ranges are selected from
+ {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}.
+
+ When above conditions are NOT satistied,
+ {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}
+ will fail.
+
+ Switching to a FPS range that has different maximum FPS may trigger some camera device
+ reconfigurations, which may introduce extra latency. It is recommended that
+ the application avoids unnecessary maximum target FPS changes as much as possible
+ during high speed streaming.
+ </notes>
+ </value>
+ </enum>
+ <description>List of capabilities that this camera device
+ advertises as fully supporting.</description>
+ <details>
+ A capability is a contract that the camera device makes in order
+ to be able to satisfy one or more use cases.
+
+ Listing a capability guarantees that the whole set of features
+ required to support a common use will all be available.
+
+ Using a subset of the functionality provided by an unsupported
+ capability may be possible on a specific camera device implementation;
+ to do this query each of android.request.availableRequestKeys,
+ android.request.availableResultKeys,
+ android.request.availableCharacteristicsKeys.
+
+ The following capabilities are guaranteed to be available on
+ android.info.supportedHardwareLevel `==` FULL devices:
+
+ * MANUAL_SENSOR
+ * MANUAL_POST_PROCESSING
+
+ Other capabilities may be available on either FULL or LIMITED
+ devices, but the application should query this key to be sure.
+ </details>
+ <hal_details>
+ Additional constraint details per-capability will be available
+ in the Compatibility Test Suite.
+
+ Minimum baseline requirements required for the
+ BACKWARD_COMPATIBLE capability are not explicitly listed.
+ Instead refer to "BC" tags and the camera CTS tests in the
+ android.hardware.camera2.cts package.
+
+ Listed controls that can be either request or result (e.g.
+ android.sensor.exposureTime) must be available both in the
+ request and the result in order to be considered to be
+ capability-compliant.
+
+ For example, if the HAL claims to support MANUAL control,
+ then exposure time must be configurable via the request _and_
+ the actual exposure applied must be available via
+ the result.
+
+ If MANUAL_SENSOR is omitted, the HAL may choose to omit the
+ android.scaler.availableMinFrameDurations static property entirely.
+
+ For PRIVATE_REPROCESSING and YUV_REPROCESSING capabilities, see
+ hardware/libhardware/include/hardware/camera3.h Section 10 for more information.
+
+ Devices that support the MANUAL_SENSOR capability must support the
+ CAMERA3_TEMPLATE_MANUAL template defined in camera3.h.
+
+ Devices that support the PRIVATE_REPROCESSING capability or the
+ YUV_REPROCESSING capability must support the
+ CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template defined in camera3.h.
+
+ For DEPTH_OUTPUT, the depth-format keys
+ android.depth.availableDepthStreamConfigurations,
+ android.depth.availableDepthMinFrameDurations,
+ android.depth.availableDepthStallDurations must be available, in
+ addition to the other keys explicitly mentioned in the DEPTH_OUTPUT
+ enum notes. The entry android.depth.maxDepthSamples must be available
+ if the DEPTH_POINT_CLOUD format is supported (HAL pixel format BLOB, dataspace
+ DEPTH).
+ </hal_details>
+ </entry>
+ <entry name="availableRequestKeys" type="int32" visibility="ndk_public"
+ container="array" hwlevel="legacy">
+ <array>
+ <size>n</size>
+ </array>
+ <description>A list of all keys that the camera device has available
+ to use with {@link ACaptureRequest}.</description>
+
+ <details>Attempting to set a key into a CaptureRequest that is not
+ listed here will result in an invalid request and will be rejected
+ by the camera device.
+
+ This field can be used to query the feature set of a camera device
+ at a more granular level than capabilities. This is especially
+ important for optional keys that are not listed under any capability
+ in android.request.availableCapabilities.
+ </details>
+ <hal_details>
+ Vendor tags must not be listed here. Use the vendor tag metadata
+ extensions C api instead (refer to camera3.h for more details).
+
+ Setting/getting vendor tags will be checked against the metadata
+ vendor extensions API and not against this field.
+
+ The HAL must not consume any request tags that are not listed either
+ here or in the vendor tag list.
+
+ The public camera2 API will always make the vendor tags visible
+ via
+ {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys}.
+ </hal_details>
+ </entry>
+ <entry name="availableResultKeys" type="int32" visibility="ndk_public"
+ container="array" hwlevel="legacy">
+ <array>
+ <size>n</size>
+ </array>
+ <description>A list of all keys that the camera device has available
+ to query with {@link ACameraMetadata} from
+ {@link ACameraCaptureSession_captureCallback_result}.</description>
+
+ <details>Attempting to get a key from a CaptureResult that is not
+ listed here will always return a `null` value. Getting a key from
+ a CaptureResult that is listed here will generally never return a `null`
+ value.
+
+ The following keys may return `null` unless they are enabled:
+
+ * android.statistics.lensShadingMap (non-null iff android.statistics.lensShadingMapMode == ON)
+
+ (Those sometimes-null keys will nevertheless be listed here
+ if they are available.)
+
+ This field can be used to query the feature set of a camera device
+ at a more granular level than capabilities. This is especially
+ important for optional keys that are not listed under any capability
+ in android.request.availableCapabilities.
+ </details>
+ <hal_details>
+ Tags listed here must always have an entry in the result metadata,
+ even if that size is 0 elements. Only array-type tags (e.g. lists,
+ matrices, strings) are allowed to have 0 elements.
+
+ Vendor tags must not be listed here. Use the vendor tag metadata
+ extensions C api instead (refer to camera3.h for more details).
+
+ Setting/getting vendor tags will be checked against the metadata
+ vendor extensions API and not against this field.
+
+ The HAL must not produce any result tags that are not listed either
+ here or in the vendor tag list.
+
+ The public camera2 API will always make the vendor tags visible via {@link
+ android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys}.
+ </hal_details>
+ </entry>
+ <entry name="availableCharacteristicsKeys" type="int32" visibility="ndk_public"
+ container="array" hwlevel="legacy">
+ <array>
+ <size>n</size>
+ </array>
+ <description>A list of all keys that the camera device has available
+ to query with {@link ACameraMetadata} from
+ {@link ACameraManager_getCameraCharacteristics}.</description>
+ <details>This entry follows the same rules as
+ android.request.availableResultKeys (except that it applies for
+ CameraCharacteristics instead of CaptureResult). See above for more
+ details.
+ </details>
+ <hal_details>
+ Keys listed here must always have an entry in the static info metadata,
+ even if that size is 0 elements. Only array-type tags (e.g. lists,
+ matrices, strings) are allowed to have 0 elements.
+
+ Vendor tags must not be listed here. Use the vendor tag metadata
+ extensions C api instead (refer to camera3.h for more details).
+
+ Setting/getting vendor tags will be checked against the metadata
+ vendor extensions API and not against this field.
+
+ The HAL must not have any tags in its static info that are not listed
+ either here or in the vendor tag list.
+
+ The public camera2 API will always make the vendor tags visible
+ via {@link android.hardware.camera2.CameraCharacteristics#getKeys}.
+ </hal_details>
+ </entry>
+ </static>
+ </section>
+ <section name="scaler">
+ <controls>
+ <entry name="cropRegion" type="int32" visibility="public"
+ container="array" typedef="rectangle" hwlevel="legacy">
+ <array>
+ <size>4</size>
+ </array>
+ <description>The desired region of the sensor to read out for this capture.</description>
+ <units>Pixel coordinates relative to
+ android.sensor.info.activeArraySize</units>
+ <details>
+ This control can be used to implement digital zoom.
+
+ The data representation is int[4], which maps to (left, top, width, height).
+
+ The crop region coordinate system is based off
+ android.sensor.info.activeArraySize, with `(0, 0)` being the
+ top-left corner of the sensor active array.
+
+ Output streams use this rectangle to produce their output,
+ cropping to a smaller region if necessary to maintain the
+ stream's aspect ratio, then scaling the sensor input to
+ match the output's configured resolution.
+
+ The crop region is applied after the RAW to other color
+ space (e.g. YUV) conversion. Since raw streams
+ (e.g. RAW16) don't have the conversion stage, they are not
+ croppable. The crop region will be ignored by raw streams.
+
+ For non-raw streams, any additional per-stream cropping will
+ be done to maximize the final pixel area of the stream.
+
+ For example, if the crop region is set to a 4:3 aspect
+ ratio, then 4:3 streams will use the exact crop
+ region. 16:9 streams will further crop vertically
+ (letterbox).
+
+ Conversely, if the crop region is set to a 16:9, then 4:3
+ outputs will crop horizontally (pillarbox), and 16:9
+ streams will match exactly. These additional crops will
+ be centered within the crop region.
+
+ The width and height of the crop region cannot
+ be set to be smaller than
+ `floor( activeArraySize.width / android.scaler.availableMaxDigitalZoom )` and
+ `floor( activeArraySize.height / android.scaler.availableMaxDigitalZoom )`, respectively.
+
+ The camera device may adjust the crop region to account
+ for rounding and other hardware requirements; the final
+ crop region used will be included in the output capture
+ result.
+ </details>
+ <hal_details>
+ The output streams must maintain square pixels at all
+ times, no matter what the relative aspect ratios of the
+ crop region and the stream are. Negative values for
+ corner are allowed for raw output if full pixel array is
+ larger than active pixel array. Width and height may be
+ rounded to nearest larger supportable width, especially
+ for raw output, where only a few fixed scales may be
+ possible.
+
+ For a set of output streams configured, if the sensor output is cropped to a smaller
+ size than active array size, the HAL need follow below cropping rules:
+
+ * The HAL need handle the cropRegion as if the sensor crop size is the effective active
+ array size.More specifically, the HAL must transform the request cropRegion from
+ android.sensor.info.activeArraySize to the sensor cropped pixel area size in this way:
+ 1. Translate the requested cropRegion w.r.t., the left top corner of the sensor
+ cropped pixel area by (tx, ty),
+ where `tx = sensorCrop.top * (sensorCrop.height / activeArraySize.height)`
+ and `tx = sensorCrop.left * (sensorCrop.width / activeArraySize.width)`. The
+ (sensorCrop.top, sensorCrop.left) is the coordinate based off the
+ android.sensor.info.activeArraySize.
+ 2. Scale the width and height of requested cropRegion with scaling factor of
+ sensorCrop.width/activeArraySize.width and sensorCrop.height/activeArraySize.height
+ respectively.
+ Once this new cropRegion is calculated, the HAL must use this region to crop the image
+ with regard to the sensor crop size (effective active array size). The HAL still need
+ follow the general cropping rule for this new cropRegion and effective active
+ array size.
+
+ * The HAL must report the cropRegion with regard to android.sensor.info.activeArraySize.
+ The HAL need convert the new cropRegion generated above w.r.t., full active array size.
+ The reported cropRegion may be slightly different with the requested cropRegion since
+ the HAL may adjust the crop region to account for rounding, conversion error, or other
+ hardware limitations.
+
+ HAL2.x uses only (x, y, width)
+ </hal_details>
+ <tag id="BC" />
+ </entry>
+ </controls>
+ <static>
+ <entry name="availableFormats" type="int32"
+ visibility="hidden" deprecated="true" enum="true"
+ container="array" typedef="imageFormat">
+ <array>
+ <size>n</size>
+ </array>
+ <enum>
+ <value optional="true" id="0x20">RAW16
+ <notes>
+ RAW16 is a standard, cross-platform format for raw image
+ buffers with 16-bit pixels.
+
+ Buffers of this format are typically expected to have a
+ Bayer Color Filter Array (CFA) layout, which is given in
+ android.sensor.info.colorFilterArrangement. Sensors with
+ CFAs that are not representable by a format in
+ android.sensor.info.colorFilterArrangement should not
+ use this format.
+
+ Buffers of this format will also follow the constraints given for
+ RAW_OPAQUE buffers, but with relaxed performance constraints.
+
+ This format is intended to give users access to the full contents
+ of the buffers coming directly from the image sensor prior to any
+ cropping or scaling operations, and all coordinate systems for
+ metadata used for this format are relative to the size of the
+ active region of the image sensor before any geometric distortion
+ correction has been applied (i.e.
+ android.sensor.info.preCorrectionActiveArraySize). Supported
+ dimensions for this format are limited to the full dimensions of
+ the sensor (e.g. either android.sensor.info.pixelArraySize or
+ android.sensor.info.preCorrectionActiveArraySize will be the
+ only supported output size).
+
+ See android.scaler.availableInputOutputFormatsMap for
+ the full set of performance guarantees.
+ </notes>
+ </value>
+ <value optional="true" id="0x24">RAW_OPAQUE
+ <notes>
+ RAW_OPAQUE (or
+ {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE}
+ as referred in public API) is a format for raw image buffers
+ coming from an image sensor.
+
+ The actual structure of buffers of this format is
+ platform-specific, but must follow several constraints:
+
+ 1. No image post-processing operations may have been applied to
+ buffers of this type. These buffers contain raw image data coming
+ directly from the image sensor.
+ 1. If a buffer of this format is passed to the camera device for
+ reprocessing, the resulting images will be identical to the images
+ produced if the buffer had come directly from the sensor and was
+ processed with the same settings.
+
+ The intended use for this format is to allow access to the native
+ raw format buffers coming directly from the camera sensor without
+ any additional conversions or decrease in framerate.
+
+ See android.scaler.availableInputOutputFormatsMap for the full set of
+ performance guarantees.
+ </notes>
+ </value>
+ <value optional="true" id="0x32315659">YV12
+ <notes>YCrCb 4:2:0 Planar</notes>
+ </value>
+ <value optional="true" id="0x11">YCrCb_420_SP
+ <notes>NV21</notes>
+ </value>
+ <value id="0x22">IMPLEMENTATION_DEFINED
+ <notes>System internal format, not application-accessible</notes>
+ </value>
+ <value id="0x23">YCbCr_420_888
+ <notes>Flexible YUV420 Format</notes>
+ </value>
+ <value id="0x21">BLOB
+ <notes>JPEG format</notes>
+ </value>
+ </enum>
+ <description>The list of image formats that are supported by this
+ camera device for output streams.</description>
+ <details>
+ All camera devices will support JPEG and YUV_420_888 formats.
+
+ When set to YUV_420_888, application can access the YUV420 data directly.
+ </details>
+ <hal_details>
+ These format values are from HAL_PIXEL_FORMAT_* in
+ system/core/include/system/graphics.h.
+
+ When IMPLEMENTATION_DEFINED is used, the platform
+ gralloc module will select a format based on the usage flags provided
+ by the camera HAL device and the other endpoint of the stream. It is
+ usually used by preview and recording streams, where the application doesn't
+ need access the image data.
+
+ YCbCr_420_888 format must be supported by the HAL. When an image stream
+ needs CPU/application direct access, this format will be used.
+
+ The BLOB format must be supported by the HAL. This is used for the JPEG stream.
+
+ A RAW_OPAQUE buffer should contain only pixel data. It is strongly
+ recommended that any information used by the camera device when
+ processing images is fully expressed by the result metadata
+ for that image buffer.
+ </hal_details>
+ <tag id="BC" />
+ </entry>
+ <entry name="availableJpegMinDurations" type="int64" visibility="hidden" deprecated="true"
+ container="array">
+ <array>
+ <size>n</size>
+ </array>
+ <description>The minimum frame duration that is supported
+ for each resolution in android.scaler.availableJpegSizes.
+ </description>
+ <units>Nanoseconds</units>
+ <range>TODO: Remove property.</range>
+ <details>
+ This corresponds to the minimum steady-state frame duration when only
+ that JPEG stream is active and captured in a burst, with all
+ processing (typically in android.*.mode) set to FAST.
+
+ When multiple streams are configured, the minimum
+ frame duration will be &gt;= max(individual stream min
+ durations)</details>
+ <tag id="BC" />
+ </entry>
+ <entry name="availableJpegSizes" type="int32" visibility="hidden"
+ deprecated="true" container="array" typedef="size">
+ <array>
+ <size>n</size>
+ <size>2</size>
+ </array>
+ <description>The JPEG resolutions that are supported by this camera device.</description>
+ <range>TODO: Remove property.</range>
+ <details>
+ The resolutions are listed as `(width, height)` pairs. All camera devices will support
+ sensor maximum resolution (defined by android.sensor.info.activeArraySize).
+ </details>
+ <hal_details>
+ The HAL must include sensor maximum resolution
+ (defined by android.sensor.info.activeArraySize),
+ and should include half/quarter of sensor maximum resolution.
+ </hal_details>
+ <tag id="BC" />
+ </entry>
+ <entry name="availableMaxDigitalZoom" type="float" visibility="public"
+ hwlevel="legacy">
+ <description>The maximum ratio between both active area width
+ and crop region width, and active area height and
+ crop region height, for android.scaler.cropRegion.
+ </description>
+ <units>Zoom scale factor</units>
+ <range>&gt;=1</range>
+ <details>
+ This represents the maximum amount of zooming possible by
+ the camera device, or equivalently, the minimum cropping
+ window size.
+
+ Crop regions that have a width or height that is smaller
+ than this ratio allows will be rounded up to the minimum
+ allowed size by the camera device.
+ </details>
+ <tag id="BC" />
+ </entry>
+ <entry name="availableProcessedMinDurations" type="int64" visibility="hidden" deprecated="true"
+ container="array">
+ <array>
+ <size>n</size>
+ </array>
+ <description>For each available processed output size (defined in
+ android.scaler.availableProcessedSizes), this property lists the
+ minimum supportable frame duration for that size.
+ </description>
+ <units>Nanoseconds</units>
+ <details>
+ This should correspond to the frame duration when only that processed
+ stream is active, with all processing (typically in android.*.mode)
+ set to FAST.
+
+ When multiple streams are configured, the minimum frame duration will
+ be &gt;= max(individual stream min durations).
+ </details>
+ <tag id="BC" />
+ </entry>
+ <entry name="availableProcessedSizes" type="int32" visibility="hidden"
+ deprecated="true" container="array" typedef="size">
+ <array>
+ <size>n</size>
+ <size>2</size>
+ </array>
+ <description>The resolutions available for use with
+ processed output streams, such as YV12, NV12, and
+ platform opaque YUV/RGB streams to the GPU or video
+ encoders.</description>
+ <details>
+ The resolutions are listed as `(width, height)` pairs.
+
+ For a given use case, the actual maximum supported resolution
+ may be lower than what is listed here, depending on the destination
+ Surface for the image data. For example, for recording video,
+ the video encoder chosen may have a maximum size limit (e.g. 1080p)
+ smaller than what the camera (e.g. maximum resolution is 3264x2448)
+ can provide.
+
+ Please reference the documentation for the image data destination to
+ check if it limits the maximum size for image data.
+ </details>
+ <hal_details>
+ For FULL capability devices (`android.info.supportedHardwareLevel == FULL`),
+ the HAL must include all JPEG sizes listed in android.scaler.availableJpegSizes
+ and each below resolution if it is smaller than or equal to the sensor
+ maximum resolution (if they are not listed in JPEG sizes already):
+
+ * 240p (320 x 240)
+ * 480p (640 x 480)
+ * 720p (1280 x 720)
+ * 1080p (1920 x 1080)
+
+ For LIMITED capability devices (`android.info.supportedHardwareLevel == LIMITED`),
+ the HAL only has to list up to the maximum video size supported by the devices.
+ </hal_details>
+ <tag id="BC" />
+ </entry>
+ <entry name="availableRawMinDurations" type="int64" deprecated="true"
+ container="array">
+ <array>
+ <size>n</size>
+ </array>
+ <description>
+ For each available raw output size (defined in
+ android.scaler.availableRawSizes), this property lists the minimum
+ supportable frame duration for that size.
+ </description>
+ <units>Nanoseconds</units>
+ <details>
+ Should correspond to the frame duration when only the raw stream is
+ active.
+
+ When multiple streams are configured, the minimum
+ frame duration will be &gt;= max(individual stream min
+ durations)</details>
+ <tag id="BC" />
+ </entry>
+ <entry name="availableRawSizes" type="int32" deprecated="true"
+ container="array" typedef="size">
+ <array>
+ <size>n</size>
+ <size>2</size>
+ </array>
+ <description>The resolutions available for use with raw
+ sensor output streams, listed as width,
+ height</description>
+ </entry>
+ </static>
+ <dynamic>
+ <clone entry="android.scaler.cropRegion" kind="controls">
+ </clone>
+ </dynamic>
+ <static>
+ <entry name="availableInputOutputFormatsMap" type="int32" visibility="hidden"
+ typedef="reprocessFormatsMap">
+ <description>The mapping of image formats that are supported by this
+ camera device for input streams, to their corresponding output formats.
+ </description>
+ <details>
+ All camera devices with at least 1
+ android.request.maxNumInputStreams will have at least one
+ available input format.
+
+ The camera device will support the following map of formats,
+ if its dependent capability (android.request.availableCapabilities) is supported:
+
+ Input Format | Output Format | Capability
+ :-------------------------------------------------|:--------------------------------------------------|:----------
+ {@link android.graphics.ImageFormat#PRIVATE} | {@link android.graphics.ImageFormat#JPEG} | PRIVATE_REPROCESSING
+ {@link android.graphics.ImageFormat#PRIVATE} | {@link android.graphics.ImageFormat#YUV_420_888} | PRIVATE_REPROCESSING
+ {@link android.graphics.ImageFormat#YUV_420_888} | {@link android.graphics.ImageFormat#JPEG} | YUV_REPROCESSING
+ {@link android.graphics.ImageFormat#YUV_420_888} | {@link android.graphics.ImageFormat#YUV_420_888} | YUV_REPROCESSING
+
+ PRIVATE refers to a device-internal format that is not directly application-visible. A
+ PRIVATE input surface can be acquired by {@link android.media.ImageReader#newInstance}
+ with {@link android.graphics.ImageFormat#PRIVATE} as the format.
+
+ For a PRIVATE_REPROCESSING-capable camera device, using the PRIVATE format as either input
+ or output will never hurt maximum frame rate (i.e. {@link
+ android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration
+ getOutputStallDuration(ImageFormat.PRIVATE, size)} is always 0),
+
+ Attempting to configure an input stream with output streams not
+ listed as available in this map is not valid.
+ </details>
+ <hal_details>
+ For the formats, see `system/core/include/system/graphics.h` for a definition
+ of the image format enumerations. The PRIVATE format refers to the
+ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED format. The HAL could determine
+ the actual format by using the gralloc usage flags.
+ For ZSL use case in particular, the HAL could choose appropriate format (partially
+ processed YUV or RAW based format) by checking the format and GRALLOC_USAGE_HW_CAMERA_ZSL.
+ See camera3.h for more details.
+
+ This value is encoded as a variable-size array-of-arrays.
+ The inner array always contains `[format, length, ...]` where
+ `...` has `length` elements. An inner array is followed by another
+ inner array if the total metadata entry size hasn't yet been exceeded.
+
+ A code sample to read/write this encoding (with a device that
+ supports reprocessing IMPLEMENTATION_DEFINED to YUV_420_888, and JPEG,
+ and reprocessing YUV_420_888 to YUV_420_888 and JPEG):
+
+ // reading
+ int32_t* contents = &entry.i32[0];
+ for (size_t i = 0; i < entry.count; ) {
+ int32_t format = contents[i++];
+ int32_t length = contents[i++];
+ int32_t output_formats[length];
+ memcpy(&output_formats[0], &contents[i],
+ length * sizeof(int32_t));
+ i += length;
+ }
+
+ // writing (static example, PRIVATE_REPROCESSING + YUV_REPROCESSING)
+ int32_t[] contents = {
+ IMPLEMENTATION_DEFINED, 2, YUV_420_888, BLOB,
+ YUV_420_888, 2, YUV_420_888, BLOB,
+ };
+ update_camera_metadata_entry(metadata, index, &contents[0],
+ sizeof(contents)/sizeof(contents[0]), &updated_entry);
+
+ If the HAL claims to support any of the capabilities listed in the
+ above details, then it must also support all the input-output
+ combinations listed for that capability. It can optionally support
+ additional formats if it so chooses.
+ </hal_details>
+ <tag id="REPROC" />
+ </entry>
+ <entry name="availableStreamConfigurations" type="int32" visibility="ndk_public"
+ enum="true" container="array" typedef="streamConfiguration" hwlevel="legacy">
+ <array>
+ <size>n</size>
+ <size>4</size>
+ </array>
+ <enum>
+ <value>OUTPUT</value>
+ <value>INPUT</value>
+ </enum>
+ <description>The available stream configurations that this
+ camera device supports
+ (i.e. format, width, height, output/input stream).
+ </description>
+ <details>
+ The configurations are listed as `(format, width, height, input?)`
+ tuples.
+
+ For a given use case, the actual maximum supported resolution
+ may be lower than what is listed here, depending on the destination
+ Surface for the image data. For example, for recording video,
+ the video encoder chosen may have a maximum size limit (e.g. 1080p)
+ smaller than what the camera (e.g. maximum resolution is 3264x2448)
+ can provide.
+
+ Please reference the documentation for the image data destination to
+ check if it limits the maximum size for image data.
+
+ Not all output formats may be supported in a configuration with
+ an input stream of a particular format. For more details, see
+ android.scaler.availableInputOutputFormatsMap.
+
+ The following table describes the minimum required output stream
+ configurations based on the hardware level
+ (android.info.supportedHardwareLevel):
+
+ Format | Size | Hardware Level | Notes
+ :-------------:|:--------------------------------------------:|:--------------:|:--------------:
+ JPEG | android.sensor.info.activeArraySize | Any |
+ JPEG | 1920x1080 (1080p) | Any | if 1080p <= activeArraySize
+ JPEG | 1280x720 (720) | Any | if 720p <= activeArraySize
+ JPEG | 640x480 (480p) | Any | if 480p <= activeArraySize
+ JPEG | 320x240 (240p) | Any | if 240p <= activeArraySize
+ YUV_420_888 | all output sizes available for JPEG | FULL |
+ YUV_420_888 | all output sizes available for JPEG, up to the maximum video size | LIMITED |
+ IMPLEMENTATION_DEFINED | same as YUV_420_888 | Any |
+
+ Refer to android.request.availableCapabilities for additional
+ mandatory stream configurations on a per-capability basis.
+ </details>
+ <hal_details>
+ It is recommended (but not mandatory) to also include half/quarter
+ of sensor maximum resolution for JPEG formats (regardless of hardware
+ level).
+
+ (The following is a rewording of the above required table):
+
+ For JPEG format, the sizes may be restricted by below conditions:
+
+ * The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones
+ (e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution
+ (defined by android.sensor.info.activeArraySize) has an aspect ratio other than these,
+ it does not have to be included in the supported JPEG sizes.
+ * Some hardware JPEG encoders may have pixel boundary alignment requirements, such as
+ the dimensions being a multiple of 16.
+
+ Therefore, the maximum JPEG size may be smaller than sensor maximum resolution.
+ However, the largest JPEG size must be as close as possible to the sensor maximum
+ resolution given above constraints. It is required that after aspect ratio adjustments,
+ additional size reduction due to other issues must be less than 3% in area. For example,
+ if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect
+ ratio 4:3, the JPEG encoder alignment requirement is 16, the maximum JPEG size will be
+ 3264x2448.
+
+ For FULL capability devices (`android.info.supportedHardwareLevel == FULL`),
+ the HAL must include all YUV_420_888 sizes that have JPEG sizes listed
+ here as output streams.
+
+ It must also include each below resolution if it is smaller than or
+ equal to the sensor maximum resolution (for both YUV_420_888 and JPEG
+ formats), as output streams:
+
+ * 240p (320 x 240)
+ * 480p (640 x 480)
+ * 720p (1280 x 720)
+ * 1080p (1920 x 1080)
+
+ For LIMITED capability devices
+ (`android.info.supportedHardwareLevel == LIMITED`),
+ the HAL only has to list up to the maximum video size
+ supported by the device.
+
+ Regardless of hardware level, every output resolution available for
+ YUV_420_888 must also be available for IMPLEMENTATION_DEFINED.
+
+ This supercedes the following fields, which are now deprecated:
+
+ * availableFormats
+ * available[Processed,Raw,Jpeg]Sizes
+ </hal_details>
+ </entry>
+ <entry name="availableMinFrameDurations" type="int64" visibility="ndk_public"
+ container="array" typedef="streamConfigurationDuration" hwlevel="legacy">
+ <array>
+ <size>4</size>
+ <size>n</size>
+ </array>
+ <description>This lists the minimum frame duration for each
+ format/size combination.
+ </description>
+ <units>(format, width, height, ns) x n</units>
+ <details>
+ This should correspond to the frame duration when only that
+ stream is active, with all processing (typically in android.*.mode)
+ set to either OFF or FAST.
+
+ When multiple streams are used in a request, the minimum frame
+ duration will be max(individual stream min durations).
+
+ The minimum frame duration of a stream (of a particular format, size)
+ is the same regardless of whether the stream is input or output.
+
+ See android.sensor.frameDuration and
+ android.scaler.availableStallDurations for more details about
+ calculating the max frame rate.
+ </details>
+ <tag id="V1" />
+ </entry>
+ <entry name="availableStallDurations" type="int64" visibility="ndk_public"
+ container="array" typedef="streamConfigurationDuration" hwlevel="legacy">
+ <array>
+ <size>4</size>
+ <size>n</size>
+ </array>
+ <description>This lists the maximum stall duration for each
+ output format/size combination.
+ </description>
+ <units>(format, width, height, ns) x n</units>
+ <details>
+ A stall duration is how much extra time would get added
+ to the normal minimum frame duration for a repeating request
+ that has streams with non-zero stall.
+
+ For example, consider JPEG captures which have the following
+ characteristics:
+
+ * JPEG streams act like processed YUV streams in requests for which
+ they are not included; in requests in which they are directly
+ referenced, they act as JPEG streams. This is because supporting a
+ JPEG stream requires the underlying YUV data to always be ready for
+ use by a JPEG encoder, but the encoder will only be used (and impact
+ frame duration) on requests that actually reference a JPEG stream.
+ * The JPEG processor can run concurrently to the rest of the camera
+ pipeline, but cannot process more than 1 capture at a time.
+
+ In other words, using a repeating YUV request would result
+ in a steady frame rate (let's say it's 30 FPS). If a single
+ JPEG request is submitted periodically, the frame rate will stay
+ at 30 FPS (as long as we wait for the previous JPEG to return each
+ time). If we try to submit a repeating YUV + JPEG request, then
+ the frame rate will drop from 30 FPS.
+
+ In general, submitting a new request with a non-0 stall time
+ stream will _not_ cause a frame rate drop unless there are still
+ outstanding buffers for that stream from previous requests.
+
+ Submitting a repeating request with streams (call this `S`)
+ is the same as setting the minimum frame duration from
+ the normal minimum frame duration corresponding to `S`, added with
+ the maximum stall duration for `S`.
+
+ If interleaving requests with and without a stall duration,
+ a request will stall by the maximum of the remaining times
+ for each can-stall stream with outstanding buffers.
+
+ This means that a stalling request will not have an exposure start
+ until the stall has completed.
+
+ This should correspond to the stall duration when only that stream is
+ active, with all processing (typically in android.*.mode) set to FAST
+ or OFF. Setting any of the processing modes to HIGH_QUALITY
+ effectively results in an indeterminate stall duration for all
+ streams in a request (the regular stall calculation rules are
+ ignored).
+
+ The following formats may always have a stall duration:
+
+ * {@link AIMAGE_FORMAT_JPEG}
+ * {@link AIMAGE_FORMAT_RAW16}
+
+ The following formats will never have a stall duration:
+
+ * {@link AIMAGE_FORMAT_YUV_420_888}
+ * {@link AIMAGE_FORMAT_RAW10}
+
+ All other formats may or may not have an allowed stall duration on
+ a per-capability basis; refer to android.request.availableCapabilities
+ for more details.
+
+ See android.sensor.frameDuration for more information about
+ calculating the max frame rate (absent stalls).
+ </details>
+ <hal_details>
+ If possible, it is recommended that all non-JPEG formats
+ (such as RAW16) should not have a stall duration. RAW10, RAW12, RAW_OPAQUE
+ and IMPLEMENTATION_DEFINED must not have stall durations.
+ </hal_details>
+ <tag id="V1" />
+ </entry>
+ <entry name="streamConfigurationMap" type="int32" visibility="java_public"
+ synthetic="true" typedef="streamConfigurationMap"
+ hwlevel="legacy">
+ <description>The available stream configurations that this
+ camera device supports; also includes the minimum frame durations
+ and the stall durations for each format/size combination.
+ </description>
+ <details>
+ All camera devices will support sensor maximum resolution (defined by
+ android.sensor.info.activeArraySize) for the JPEG format.
+
+ For a given use case, the actual maximum supported resolution
+ may be lower than what is listed here, depending on the destination
+ Surface for the image data. For example, for recording video,
+ the video encoder chosen may have a maximum size limit (e.g. 1080p)
+ smaller than what the camera (e.g. maximum resolution is 3264x2448)
+ can provide.
+
+ Please reference the documentation for the image data destination to
+ check if it limits the maximum size for image data.
+
+ The following table describes the minimum required output stream
+ configurations based on the hardware level
+ (android.info.supportedHardwareLevel):
+
+ Format | Size | Hardware Level | Notes
+ :-------------------------------------------------:|:--------------------------------------------:|:--------------:|:--------------:
+ {@link android.graphics.ImageFormat#JPEG} | android.sensor.info.activeArraySize (*1) | Any |
+ {@link android.graphics.ImageFormat#JPEG} | 1920x1080 (1080p) | Any | if 1080p <= activeArraySize
+ {@link android.graphics.ImageFormat#JPEG} | 1280x720 (720p) | Any | if 720p <= activeArraySize
+ {@link android.graphics.ImageFormat#JPEG} | 640x480 (480p) | Any | if 480p <= activeArraySize
+ {@link android.graphics.ImageFormat#JPEG} | 320x240 (240p) | Any | if 240p <= activeArraySize
+ {@link android.graphics.ImageFormat#YUV_420_888} | all output sizes available for JPEG | FULL |
+ {@link android.graphics.ImageFormat#YUV_420_888} | all output sizes available for JPEG, up to the maximum video size | LIMITED |
+ {@link android.graphics.ImageFormat#PRIVATE} | same as YUV_420_888 | Any |
+
+ Refer to android.request.availableCapabilities and {@link
+ android.hardware.camera2.CameraDevice#createCaptureSession} for additional mandatory
+ stream configurations on a per-capability basis.
+
+ *1: For JPEG format, the sizes may be restricted by below conditions:
+
+ * The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones
+ (e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution
+ (defined by android.sensor.info.activeArraySize) has an aspect ratio other than these,
+ it does not have to be included in the supported JPEG sizes.
+ * Some hardware JPEG encoders may have pixel boundary alignment requirements, such as
+ the dimensions being a multiple of 16.
+ Therefore, the maximum JPEG size may be smaller than sensor maximum resolution.
+ However, the largest JPEG size will be as close as possible to the sensor maximum
+ resolution given above constraints. It is required that after aspect ratio adjustments,
+ additional size reduction due to other issues must be less than 3% in area. For example,
+ if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect
+ ratio 4:3, and the JPEG encoder alignment requirement is 16, the maximum JPEG size will be
+ 3264x2448.
+ </details>
+ <hal_details>
+ Do not set this property directly
+ (it is synthetic and will not be available at the HAL layer);
+ set the android.scaler.availableStreamConfigurations instead.
+
+ Not all output formats may be supported in a configuration with
+ an input stream of a particular format. For more details, see
+ android.scaler.availableInputOutputFormatsMap.
+
+ It is recommended (but not mandatory) to also include half/quarter
+ of sensor maximum resolution for JPEG formats (regardless of hardware
+ level).
+
+ (The following is a rewording of the above required table):
+
+ The HAL must include sensor maximum resolution (defined by
+ android.sensor.info.activeArraySize).
+
+ For FULL capability devices (`android.info.supportedHardwareLevel == FULL`),
+ the HAL must include all YUV_420_888 sizes that have JPEG sizes listed
+ here as output streams.
+
+ It must also include each below resolution if it is smaller than or
+ equal to the sensor maximum resolution (for both YUV_420_888 and JPEG
+ formats), as output streams:
+
+ * 240p (320 x 240)
+ * 480p (640 x 480)
+ * 720p (1280 x 720)
+ * 1080p (1920 x 1080)
+
+ For LIMITED capability devices
+ (`android.info.supportedHardwareLevel == LIMITED`),
+ the HAL only has to list up to the maximum video size
+ supported by the device.
+
+ Regardless of hardware level, every output resolution available for
+ YUV_420_888 must also be available for IMPLEMENTATION_DEFINED.
+
+ This supercedes the following fields, which are now deprecated:
+
+ * availableFormats
+ * available[Processed,Raw,Jpeg]Sizes
+ </hal_details>
+ </entry>
+ <entry name="croppingType" type="byte" visibility="public" enum="true"
+ hwlevel="legacy">
+ <enum>
+ <value>CENTER_ONLY
+ <notes>
+ The camera device only supports centered crop regions.
+ </notes>
+ </value>
+ <value>FREEFORM
+ <notes>
+ The camera device supports arbitrarily chosen crop regions.
+ </notes>
+ </value>
+ </enum>
+ <description>The crop type that this camera device supports.</description>
+ <details>
+ When passing a non-centered crop region (android.scaler.cropRegion) to a camera
+ device that only supports CENTER_ONLY cropping, the camera device will move the
+ crop region to the center of the sensor active array (android.sensor.info.activeArraySize)
+ and keep the crop region width and height unchanged. The camera device will return the
+ final used crop region in metadata result android.scaler.cropRegion.
+
+ Camera devices that support FREEFORM cropping will support any crop region that
+ is inside of the active array. The camera device will apply the same crop region and
+ return the final used crop region in capture result metadata android.scaler.cropRegion.
+
+ LEGACY capability devices will only support CENTER_ONLY cropping.
+ </details>
+ </entry>
+ </static>
+ </section>
+ <section name="sensor">
+ <controls>
+ <entry name="exposureTime" type="int64" visibility="public" hwlevel="full">
+ <description>Duration each pixel is exposed to
+ light.</description>
+ <units>Nanoseconds</units>
+ <range>android.sensor.info.exposureTimeRange</range>
+ <details>If the sensor can't expose this exact duration, it will shorten the
+ duration exposed to the nearest possible value (rather than expose longer).
+ The final exposure time used will be available in the output capture result.
+
+ This control is only effective if android.control.aeMode or android.control.mode is set to
+ OFF; otherwise the auto-exposure algorithm will override this value.
+ </details>
+ <tag id="V1" />
+ </entry>
+ <entry name="frameDuration" type="int64" visibility="public" hwlevel="full">
+ <description>Duration from start of frame exposure to
+ start of next frame exposure.</description>
+ <units>Nanoseconds</units>
+ <range>See android.sensor.info.maxFrameDuration,
+ ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS. The duration
+ is capped to `max(duration, exposureTime + overhead)`.</range>
+ <details>
+ The maximum frame rate that can be supported by a camera subsystem is
+ a function of many factors:
+
+ * Requested resolutions of output image streams
+ * Availability of binning / skipping modes on the imager
+ * The bandwidth of the imager interface
+ * The bandwidth of the various ISP processing blocks
+
+ Since these factors can vary greatly between different ISPs and
+ sensors, the camera abstraction tries to represent the bandwidth
+ restrictions with as simple a model as possible.
+
+ The model presented has the following characteristics:
+
+ * The image sensor is always configured to output the smallest
+ resolution possible given the application's requested output stream
+ sizes. The smallest resolution is defined as being at least as large
+ as the largest requested output stream size; the camera pipeline must
+ never digitally upsample sensor data when the crop region covers the
+ whole sensor. In general, this means that if only small output stream
+ resolutions are configured, the sensor can provide a higher frame
+ rate.
+ * Since any request may use any or all the currently configured
+ output streams, the sensor and ISP must be configured to support
+ scaling a single capture to all the streams at the same time. This
+ means the camera pipeline must be ready to produce the largest
+ requested output size without any delay. Therefore, the overall
+ frame rate of a given configured stream set is governed only by the
+ largest requested stream resolution.
+ * Using more than one output stream in a request does not affect the
+ frame duration.
+ * Certain format-streams may need to do additional background processing
+ before data is consumed/produced by that stream. These processors
+ can run concurrently to the rest of the camera pipeline, but
+ cannot process more than 1 capture at a time.
+
+ The necessary information for the application, given the model above,
+ is provided via
+ {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}.
+ These are used to determine the maximum frame rate / minimum frame
+ duration that is possible for a given stream configuration.
+
+ Specifically, the application can use the following rules to
+ determine the minimum frame duration it can request from the camera
+ device:
+
+ 1. Let the set of currently configured input/output streams
+ be called `S`.
+ 1. Find the minimum frame durations for each stream in `S`, by looking
+ it up in {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}
+ (with its respective size/format). Let this set of frame durations be
+ called `F`.
+ 1. For any given request `R`, the minimum frame duration allowed
+ for `R` is the maximum out of all values in `F`. Let the streams
+ used in `R` be called `S_r`.
+
+ If none of the streams in `S_r` have a stall time (listed in {@link
+ ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}
+ using its respective size/format), then the frame duration in `F`
+ determines the steady state frame rate that the application will get
+ if it uses `R` as a repeating request. Let this special kind of
+ request be called `Rsimple`.
+
+ A repeating request `Rsimple` can be _occasionally_ interleaved
+ by a single capture of a new request `Rstall` (which has at least
+ one in-use stream with a non-0 stall time) and if `Rstall` has the
+ same minimum frame duration this will not cause a frame rate loss
+ if all buffers from the previous `Rstall` have already been
+ delivered.
+
+ For more details about stalling, see
+ {@link ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}.
+
+ This control is only effective if android.control.aeMode or android.control.mode is set to
+ OFF; otherwise the auto-exposure algorithm will override this value.
+ </details>
+ <hal_details>
+ For more details about stalling, see
+ android.scaler.availableStallDurations.
+ </hal_details>
+ <tag id="V1" />
+ </entry>
+ <entry name="sensitivity" type="int32" visibility="public" hwlevel="full">
+ <description>The amount of gain applied to sensor data
+ before processing.</description>
+ <units>ISO arithmetic units</units>
+ <range>android.sensor.info.sensitivityRange</range>
+ <details>
+ The sensitivity is the standard ISO sensitivity value,
+ as defined in ISO 12232:2006.
+
+ The sensitivity must be within android.sensor.info.sensitivityRange, and
+ if if it less than android.sensor.maxAnalogSensitivity, the camera device
+ is guaranteed to use only analog amplification for applying the gain.
+
+ If the camera device cannot apply the exact sensitivity
+ requested, it will reduce the gain to the nearest supported
+ value. The final sensitivity used will be available in the
+ output capture result.
+
+ This control is only effective if android.control.aeMode or android.control.mode is set to
+ OFF; otherwise the auto-exposure algorithm will override this value.
+ </details>
+ <hal_details>ISO 12232:2006 REI method is acceptable.</hal_details>
+ <tag id="V1" />
+ </entry>
+ </controls>
+ <static>
+ <namespace name="info">
+ <entry name="activeArraySize" type="int32" visibility="public"
+ type_notes="Four ints defining the active pixel rectangle"
+ container="array" typedef="rectangle" hwlevel="legacy">
+ <array>
+ <size>4</size>
+ </array>
+ <description>
+ The area of the image sensor which corresponds to active pixels after any geometric
+ distortion correction has been applied.
+ </description>
+ <units>Pixel coordinates on the image sensor</units>
+ <details>
+ This is the rectangle representing the size of the active region of the sensor (i.e.
+ the region that actually receives light from the scene) after any geometric correction
+ has been applied, and should be treated as the maximum size in pixels of any of the
+ image output formats aside from the raw formats.
+
+ This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
+ the full pixel array, and the size of the full pixel array is given by
+ android.sensor.info.pixelArraySize.
+
+ The data representation is int[4], which maps to (left, top, width, height).
+
+ The coordinate system for most other keys that list pixel coordinates, including
+ android.scaler.cropRegion, is defined relative to the active array rectangle given in
+ this field, with `(0, 0)` being the top-left of this rectangle.
+
+ The active array may be smaller than the full pixel array, since the full array may
+ include black calibration pixels or other inactive regions, and geometric correction
+ resulting in scaling or cropping may have been applied.
+ </details>
+ <hal_details>
+ This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be
+ &gt;= `(0,0)`.
+ The `(width, height)` must be &lt;= `android.sensor.info.pixelArraySize`.
+ </hal_details>
+ <tag id="RAW" />
+ </entry>
+ <entry name="sensitivityRange" type="int32" visibility="public"
+ type_notes="Range of supported sensitivities"
+ container="array" typedef="rangeInt"
+ hwlevel="full">
+ <array>
+ <size>2</size>
+ </array>
+ <description>Range of sensitivities for android.sensor.sensitivity supported by this
+ camera device.</description>
+ <range>Min <= 100, Max &gt;= 800</range>
+ <details>
+ The values are the standard ISO sensitivity values,
+ as defined in ISO 12232:2006.
+ </details>
+
+ <tag id="BC" />
+ <tag id="V1" />
+ </entry>
+ <entry name="colorFilterArrangement" type="byte" visibility="public" enum="true"
+ hwlevel="full">
+ <enum>
+ <value>RGGB</value>
+ <value>GRBG</value>
+ <value>GBRG</value>
+ <value>BGGR</value>
+ <value>RGB
+ <notes>Sensor is not Bayer; output has 3 16-bit
+ values for each pixel, instead of just 1 16-bit value
+ per pixel.</notes></value>
+ </enum>
+ <description>The arrangement of color filters on sensor;
+ represents the colors in the top-left 2x2 section of
+ the sensor, in reading order.</description>
+ <tag id="RAW" />
+ </entry>
+ <entry name="exposureTimeRange" type="int64" visibility="public"
+ type_notes="nanoseconds" container="array" typedef="rangeLong"
+ hwlevel="full">
+ <array>
+ <size>2</size>
+ </array>
+ <description>The range of image exposure times for android.sensor.exposureTime supported
+ by this camera device.
+ </description>
+ <units>Nanoseconds</units>
+ <range>The minimum exposure time will be less than 100 us. For FULL
+ capability devices (android.info.supportedHardwareLevel == FULL),
+ the maximum exposure time will be greater than 100ms.</range>
+ <hal_details>For FULL capability devices (android.info.supportedHardwareLevel == FULL),
+ The maximum of the range SHOULD be at least 1 second (1e9), MUST be at least
+ 100ms.
+ </hal_details>
+ <tag id="V1" />
+ </entry>
+ <entry name="maxFrameDuration" type="int64" visibility="public"
+ hwlevel="full">
+ <description>The maximum possible frame duration (minimum frame rate) for
+ android.sensor.frameDuration that is supported this camera device.</description>
+ <units>Nanoseconds</units>
+ <range>For FULL capability devices
+ (android.info.supportedHardwareLevel == FULL), at least 100ms.
+ </range>
+ <details>Attempting to use frame durations beyond the maximum will result in the frame
+ duration being clipped to the maximum. See that control for a full definition of frame
+ durations.
+
+ Refer to {@link
+ ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}
+ for the minimum frame duration values.
+ </details>
+ <hal_details>
+ For FULL capability devices (android.info.supportedHardwareLevel == FULL),
+ The maximum of the range SHOULD be at least
+ 1 second (1e9), MUST be at least 100ms (100e6).
+
+ android.sensor.info.maxFrameDuration must be greater or
+ equal to the android.sensor.info.exposureTimeRange max
+ value (since exposure time overrides frame duration).
+
+ Available minimum frame durations for JPEG must be no greater
+ than that of the YUV_420_888/IMPLEMENTATION_DEFINED
+ minimum frame durations (for that respective size).
+
+ Since JPEG processing is considered offline and can take longer than
+ a single uncompressed capture, refer to
+ android.scaler.availableStallDurations
+ for details about encoding this scenario.
+ </hal_details>
+ <tag id="V1" />
+ </entry>
+ <entry name="physicalSize" type="float" visibility="public"
+ type_notes="width x height"
+ container="array" typedef="sizeF" hwlevel="legacy">
+ <array>
+ <size>2</size>
+ </array>
+ <description>The physical dimensions of the full pixel
+ array.</description>
+ <units>Millimeters</units>
+ <details>This is the physical size of the sensor pixel
+ array defined by android.sensor.info.pixelArraySize.
+ </details>
+ <hal_details>Needed for FOV calculation for old API</hal_details>
+ <tag id="V1" />
+ <tag id="BC" />
+ </entry>
+ <entry name="pixelArraySize" type="int32" visibility="public"
+ container="array" typedef="size" hwlevel="legacy">
+ <array>
+ <size>2</size>
+ </array>
+ <description>Dimensions of the full pixel array, possibly
+ including black calibration pixels.</description>
+ <units>Pixels</units>
+ <details>The pixel count of the full pixel array of the image sensor, which covers
+ android.sensor.info.physicalSize area. This represents the full pixel dimensions of
+ the raw buffers produced by this sensor.
+
+ If a camera device supports raw sensor formats, either this or
+ android.sensor.info.preCorrectionActiveArraySize is the maximum dimensions for the raw
+ output formats listed in ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS (this depends on
+ whether or not the image sensor returns buffers containing pixels that are not
+ part of the active array region for blacklevel calibration or other purposes).
+
+ Some parts of the full pixel array may not receive light from the scene,
+ or be otherwise inactive. The android.sensor.info.preCorrectionActiveArraySize key
+ defines the rectangle of active pixels that will be included in processed image
+ formats.
+ </details>
+ <tag id="RAW" />
+ <tag id="BC" />
+ </entry>
+ <entry name="whiteLevel" type="int32" visibility="public">
+ <description>
+ Maximum raw value output by sensor.
+ </description>
+ <range>&gt; 255 (8-bit output)</range>
+ <details>
+ This specifies the fully-saturated encoding level for the raw
+ sample values from the sensor. This is typically caused by the
+ sensor becoming highly non-linear or clipping. The minimum for
+ each channel is specified by the offset in the
+ android.sensor.blackLevelPattern key.
+
+ The white level is typically determined either by sensor bit depth
+ (8-14 bits is expected), or by the point where the sensor response
+ becomes too non-linear to be useful. The default value for this is
+ maximum representable value for a 16-bit raw sample (2^16 - 1).
+
+ The white level values of captured images may vary for different
+ capture settings (e.g., android.sensor.sensitivity). This key
+ represents a coarse approximation for such case. It is recommended
+ to use android.sensor.dynamicWhiteLevel for captures when supported
+ by the camera device, which provides more accurate white level values.
+ </details>
+ <hal_details>
+ The full bit depth of the sensor must be available in the raw data,
+ so the value for linear sensors should not be significantly lower
+ than maximum raw value supported, i.e. 2^(sensor bits per pixel).
+ </hal_details>
+ <tag id="RAW" />
+ </entry>
+ <entry name="timestampSource" type="byte" visibility="public"
+ enum="true" hwlevel="legacy">
+ <enum>
+ <value>UNKNOWN
+ <notes>
+ Timestamps from android.sensor.timestamp are in nanoseconds and monotonic,
+ but can not be compared to timestamps from other subsystems
+ (e.g. accelerometer, gyro etc.), or other instances of the same or different
+ camera devices in the same system. Timestamps between streams and results for
+ a single camera instance are comparable, and the timestamps for all buffers
+ and the result metadata generated by a single capture are identical.
+ </notes>
+ </value>
+ <value>REALTIME
+ <notes>
+ Timestamps from android.sensor.timestamp are in the same timebase as
+ [elapsedRealtimeNanos](https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos)
+ (or CLOCK_BOOTTIME), and they can be compared to other timestamps using that base.
+ </notes>
+ </value>
+ </enum>
+ <description>The time base source for sensor capture start timestamps.</description>
+ <details>
+ The timestamps provided for captures are always in nanoseconds and monotonic, but
+ may not based on a time source that can be compared to other system time sources.
+
+ This characteristic defines the source for the timestamps, and therefore whether they
+ can be compared against other system time sources/timestamps.
+ </details>
+ <tag id="V1" />
+ </entry>
+ <entry name="lensShadingApplied" type="byte" visibility="public" enum="true"
+ typedef="boolean">
+ <enum>
+ <value>FALSE</value>
+ <value>TRUE</value>
+ </enum>
+ <description>Whether the RAW images output from this camera device are subject to
+ lens shading correction.</description>
+ <details>
+ If TRUE, all images produced by the camera device in the RAW image formats will
+ have lens shading correction already applied to it. If FALSE, the images will
+ not be adjusted for lens shading correction.
+ See android.request.maxNumOutputRaw for a list of RAW image formats.
+
+ This key will be `null` for all devices do not report this information.
+ Devices with RAW capability will always report this information in this key.
+ </details>
+ </entry>
+ <entry name="preCorrectionActiveArraySize" type="int32" visibility="public"
+ type_notes="Four ints defining the active pixel rectangle" container="array"
+ typedef="rectangle" hwlevel="legacy">
+ <array>
+ <size>4</size>
+ </array>
+ <description>
+ The area of the image sensor which corresponds to active pixels prior to the
+ application of any geometric distortion correction.
+ </description>
+ <units>Pixel coordinates on the image sensor</units>
+ <details>
+ The data representation is int[4], which maps to (left, top, width, height).
+
+ This is the rectangle representing the size of the active region of the sensor (i.e.
+ the region that actually receives light from the scene) before any geometric correction
+ has been applied, and should be treated as the active region rectangle for any of the
+ raw formats. All metadata associated with raw processing (e.g. the lens shading
+ correction map, and radial distortion fields) treats the top, left of this rectangle as
+ the origin, (0,0).
+
+ The size of this region determines the maximum field of view and the maximum number of
+ pixels that an image from this sensor can contain, prior to the application of
+ geometric distortion correction. The effective maximum pixel dimensions of a
+ post-distortion-corrected image is given by the android.sensor.info.activeArraySize
+ field, and the effective maximum field of view for a post-distortion-corrected image
+ can be calculated by applying the geometric distortion correction fields to this
+ rectangle, and cropping to the rectangle given in android.sensor.info.activeArraySize.
+
+ E.g. to calculate position of a pixel, (x,y), in a processed YUV output image with the
+ dimensions in android.sensor.info.activeArraySize given the position of a pixel,
+ (x', y'), in the raw pixel array with dimensions give in
+ android.sensor.info.pixelArraySize:
+
+ 1. Choose a pixel (x', y') within the active array region of the raw buffer given in
+ android.sensor.info.preCorrectionActiveArraySize, otherwise this pixel is considered
+ to be outside of the FOV, and will not be shown in the processed output image.
+ 1. Apply geometric distortion correction to get the post-distortion pixel coordinate,
+ (x_i, y_i). When applying geometric correction metadata, note that metadata for raw
+ buffers is defined relative to the top, left of the
+ android.sensor.info.preCorrectionActiveArraySize rectangle.
+ 1. If the resulting corrected pixel coordinate is within the region given in
+ android.sensor.info.activeArraySize, then the position of this pixel in the
+ processed output image buffer is `(x_i - activeArray.left, y_i - activeArray.top)`,
+ when the top, left coordinate of that buffer is treated as (0, 0).
+
+ Thus, for pixel x',y' = (25, 25) on a sensor where android.sensor.info.pixelArraySize
+ is (100,100), android.sensor.info.preCorrectionActiveArraySize is (10, 10, 100, 100),
+ android.sensor.info.activeArraySize is (20, 20, 80, 80), and the geometric distortion
+ correction doesn't change the pixel coordinate, the resulting pixel selected in
+ pixel coordinates would be x,y = (25, 25) relative to the top,left of the raw buffer
+ with dimensions given in android.sensor.info.pixelArraySize, and would be (5, 5)
+ relative to the top,left of post-processed YUV output buffer with dimensions given in
+ android.sensor.info.activeArraySize.
+
+ The currently supported fields that correct for geometric distortion are:
+
+ 1. android.lens.radialDistortion.
+
+ If all of the geometric distortion fields are no-ops, this rectangle will be the same
+ as the post-distortion-corrected rectangle given in
+ android.sensor.info.activeArraySize.
+
+ This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
+ the full pixel array, and the size of the full pixel array is given by
+ android.sensor.info.pixelArraySize.
+
+ The pre-correction active array may be smaller than the full pixel array, since the
+ full array may include black calibration pixels or other inactive regions.
+ </details>
+ <hal_details>
+ This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be
+ &gt;= `(0,0)`.
+ The `(width, height)` must be &lt;= `android.sensor.info.pixelArraySize`.
+
+ If omitted by the HAL implementation, the camera framework will assume that this is
+ the same as the post-correction active array region given in
+ android.sensor.info.activeArraySize.
+ </hal_details>
+ <tag id="RAW" />
+ </entry>
+ </namespace>
+ <entry name="referenceIlluminant1" type="byte" visibility="public"
+ enum="true">
+ <enum>
+ <value id="1">DAYLIGHT</value>
+ <value id="2">FLUORESCENT</value>
+ <value id="3">TUNGSTEN
+ <notes>Incandescent light</notes>
+ </value>
+ <value id="4">FLASH</value>
+ <value id="9">FINE_WEATHER</value>
+ <value id="10">CLOUDY_WEATHER</value>
+ <value id="11">SHADE</value>
+ <value id="12">DAYLIGHT_FLUORESCENT
+ <notes>D 5700 - 7100K</notes>
+ </value>
+ <value id="13">DAY_WHITE_FLUORESCENT
+ <notes>N 4600 - 5400K</notes>
+ </value>
+ <value id="14">COOL_WHITE_FLUORESCENT
+ <notes>W 3900 - 4500K</notes>
+ </value>
+ <value id="15">WHITE_FLUORESCENT
+ <notes>WW 3200 - 3700K</notes>
+ </value>
+ <value id="17">STANDARD_A</value>
+ <value id="18">STANDARD_B</value>
+ <value id="19">STANDARD_C</value>
+ <value id="20">D55</value>
+ <value id="21">D65</value>
+ <value id="22">D75</value>
+ <value id="23">D50</value>
+ <value id="24">ISO_STUDIO_TUNGSTEN</value>
+ </enum>
+ <description>
+ The standard reference illuminant used as the scene light source when
+ calculating the android.sensor.colorTransform1,
+ android.sensor.calibrationTransform1, and
+ android.sensor.forwardMatrix1 matrices.
+ </description>
+ <details>
+ The values in this key correspond to the values defined for the
+ EXIF LightSource tag. These illuminants are standard light sources
+ that are often used calibrating camera devices.
+
+ If this key is present, then android.sensor.colorTransform1,
+ android.sensor.calibrationTransform1, and
+ android.sensor.forwardMatrix1 will also be present.
+
+ Some devices may choose to provide a second set of calibration
+ information for improved quality, including
+ android.sensor.referenceIlluminant2 and its corresponding matrices.
+ </details>
+ <hal_details>
+ The first reference illuminant (android.sensor.referenceIlluminant1)
+ and corresponding matrices must be present to support the RAW capability
+ and DNG output.
+
+ When producing raw images with a color profile that has only been
+ calibrated against a single light source, it is valid to omit
+ android.sensor.referenceIlluminant2 along with the
+ android.sensor.colorTransform2, android.sensor.calibrationTransform2,
+ and android.sensor.forwardMatrix2 matrices.
+
+ If only android.sensor.referenceIlluminant1 is included, it should be
+ chosen so that it is representative of typical scene lighting. In
+ general, D50 or DAYLIGHT will be chosen for this case.
+
+ If both android.sensor.referenceIlluminant1 and
+ android.sensor.referenceIlluminant2 are included, they should be
+ chosen to represent the typical range of scene lighting conditions.
+ In general, low color temperature illuminant such as Standard-A will
+ be chosen for the first reference illuminant and a higher color
+ temperature illuminant such as D65 will be chosen for the second
+ reference illuminant.
+ </hal_details>
+ <tag id="RAW" />
+ </entry>
+ <entry name="referenceIlluminant2" type="byte" visibility="public">
+ <description>
+ The standard reference illuminant used as the scene light source when
+ calculating the android.sensor.colorTransform2,
+ android.sensor.calibrationTransform2, and
+ android.sensor.forwardMatrix2 matrices.
+ </description>
+ <range>Any value listed in android.sensor.referenceIlluminant1</range>
+ <details>
+ See android.sensor.referenceIlluminant1 for more details.
+
+ If this key is present, then android.sensor.colorTransform2,
+ android.sensor.calibrationTransform2, and
+ android.sensor.forwardMatrix2 will also be present.
+ </details>
+ <tag id="RAW" />
+ </entry>
+ <entry name="calibrationTransform1" type="rational"
+ visibility="public" optional="true"
+ type_notes="3x3 matrix in row-major-order" container="array"
+ typedef="colorSpaceTransform">
+ <array>
+ <size>3</size>
+ <size>3</size>
+ </array>
+ <description>
+ A per-device calibration transform matrix that maps from the
+ reference sensor colorspace to the actual device sensor colorspace.
+ </description>
+ <details>
+ This matrix is used to correct for per-device variations in the
+ sensor colorspace, and is used for processing raw buffer data.
+
+ The matrix is expressed as a 3x3 matrix in row-major-order, and
+ contains a per-device calibration transform that maps colors
+ from reference sensor color space (i.e. the "golden module"
+ colorspace) into this camera device's native sensor color
+ space under the first reference illuminant
+ (android.sensor.referenceIlluminant1).
+ </details>
+ <tag id="RAW" />
+ </entry>
+ <entry name="calibrationTransform2" type="rational"
+ visibility="public" optional="true"
+ type_notes="3x3 matrix in row-major-order" container="array"
+ typedef="colorSpaceTransform">
+ <array>
+ <size>3</size>
+ <size>3</size>
+ </array>
+ <description>
+ A per-device calibration transform matrix that maps from the
+ reference sensor colorspace to the actual device sensor colorspace
+ (this is the colorspace of the raw buffer data).
+ </description>
+ <details>
+ This matrix is used to correct for per-device variations in the
+ sensor colorspace, and is used for processing raw buffer data.
+
+ The matrix is expressed as a 3x3 matrix in row-major-order, and
+ contains a per-device calibration transform that maps colors
+ from reference sensor color space (i.e. the "golden module"
+ colorspace) into this camera device's native sensor color
+ space under the second reference illuminant
+ (android.sensor.referenceIlluminant2).
+
+ This matrix will only be present if the second reference
+ illuminant is present.
+ </details>
+ <tag id="RAW" />
+ </entry>
+ <entry name="colorTransform1" type="rational"
+ visibility="public" optional="true"
+ type_notes="3x3 matrix in row-major-order" container="array"
+ typedef="colorSpaceTransform">
+ <array>
+ <size>3</size>
+ <size>3</size>
+ </array>
+ <description>
+ A matrix that transforms color values from CIE XYZ color space to
+ reference sensor color space.
+ </description>
+ <details>
+ This matrix is used to convert from the standard CIE XYZ color
+ space to the reference sensor colorspace, and is used when processing
+ raw buffer data.
+
+ The matrix is expressed as a 3x3 matrix in row-major-order, and
+ contains a color transform matrix that maps colors from the CIE
+ XYZ color space to the reference sensor color space (i.e. the
+ "golden module" colorspace) under the first reference illuminant
+ (android.sensor.referenceIlluminant1).
+
+ The white points chosen in both the reference sensor color space
+ and the CIE XYZ colorspace when calculating this transform will
+ match the standard white point for the first reference illuminant
+ (i.e. no chromatic adaptation will be applied by this transform).
+ </details>
+ <tag id="RAW" />
+ </entry>
+ <entry name="colorTransform2" type="rational"
+ visibility="public" optional="true"
+ type_notes="3x3 matrix in row-major-order" container="array"
+ typedef="colorSpaceTransform">
+ <array>
+ <size>3</size>
+ <size>3</size>
+ </array>
+ <description>
+ A matrix that transforms color values from CIE XYZ color space to
+ reference sensor color space.
+ </description>
+ <details>
+ This matrix is used to convert from the standard CIE XYZ color
+ space to the reference sensor colorspace, and is used when processing
+ raw buffer data.
+
+ The matrix is expressed as a 3x3 matrix in row-major-order, and
+ contains a color transform matrix that maps colors from the CIE
+ XYZ color space to the reference sensor color space (i.e. the
+ "golden module" colorspace) under the second reference illuminant
+ (android.sensor.referenceIlluminant2).
+
+ The white points chosen in both the reference sensor color space
+ and the CIE XYZ colorspace when calculating this transform will
+ match the standard white point for the second reference illuminant
+ (i.e. no chromatic adaptation will be applied by this transform).
+
+ This matrix will only be present if the second reference
+ illuminant is present.
+ </details>
+ <tag id="RAW" />
+ </entry>
+ <entry name="forwardMatrix1" type="rational"
+ visibility="public" optional="true"
+ type_notes="3x3 matrix in row-major-order" container="array"
+ typedef="colorSpaceTransform">
+ <array>
+ <size>3</size>
+ <size>3</size>
+ </array>
+ <description>
+ A matrix that transforms white balanced camera colors from the reference
+ sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.
+ </description>
+ <details>
+ This matrix is used to convert to the standard CIE XYZ colorspace, and
+ is used when processing raw buffer data.
+
+ This matrix is expressed as a 3x3 matrix in row-major-order, and contains
+ a color transform matrix that maps white balanced colors from the
+ reference sensor color space to the CIE XYZ color space with a D50 white
+ point.
+
+ Under the first reference illuminant (android.sensor.referenceIlluminant1)
+ this matrix is chosen so that the standard white point for this reference
+ illuminant in the reference sensor colorspace is mapped to D50 in the
+ CIE XYZ colorspace.
+ </details>
+ <tag id="RAW" />
+ </entry>
+ <entry name="forwardMatrix2" type="rational"
+ visibility="public" optional="true"
+ type_notes="3x3 matrix in row-major-order" container="array"
+ typedef="colorSpaceTransform">
+ <array>
+ <size>3</size>
+ <size>3</size>
+ </array>
+ <description>
+ A matrix that transforms white balanced camera colors from the reference
+ sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.
+ </description>
+ <details>
+ This matrix is used to convert to the standard CIE XYZ colorspace, and
+ is used when processing raw buffer data.
+
+ This matrix is expressed as a 3x3 matrix in row-major-order, and contains
+ a color transform matrix that maps white balanced colors from the
+ reference sensor color space to the CIE XYZ color space with a D50 white
+ point.
+
+ Under the second reference illuminant (android.sensor.referenceIlluminant2)
+ this matrix is chosen so that the standard white point for this reference
+ illuminant in the reference sensor colorspace is mapped to D50 in the
+ CIE XYZ colorspace.
+
+ This matrix will only be present if the second reference
+ illuminant is present.
+ </details>
+ <tag id="RAW" />
+ </entry>
+ <entry name="baseGainFactor" type="rational"
+ optional="true">
+ <description>Gain factor from electrons to raw units when
+ ISO=100</description>
+ <tag id="FUTURE" />
+ </entry>
+ <entry name="blackLevelPattern" type="int32" visibility="public"
+ optional="true" type_notes="2x2 raw count block" container="array"
+ typedef="blackLevelPattern">
+ <array>
+ <size>4</size>
+ </array>
+ <description>
+ A fixed black level offset for each of the color filter arrangement
+ (CFA) mosaic channels.
+ </description>
+ <range>&gt;= 0 for each.</range>
+ <details>
+ This key specifies the zero light value for each of the CFA mosaic
+ channels in the camera sensor. The maximal value output by the
+ sensor is represented by the value in android.sensor.info.whiteLevel.
+
+ The values are given in the same order as channels listed for the CFA
+ layout key (see android.sensor.info.colorFilterArrangement), i.e. the
+ nth value given corresponds to the black level offset for the nth
+ color channel listed in the CFA.
+
+ The black level values of captured images may vary for different
+ capture settings (e.g., android.sensor.sensitivity). This key
+ represents a coarse approximation for such case. It is recommended to
+ use android.sensor.dynamicBlackLevel or use pixels from
+ android.sensor.opticalBlackRegions directly for captures when
+ supported by the camera device, which provides more accurate black
+ level values. For raw capture in particular, it is recommended to use
+ pixels from android.sensor.opticalBlackRegions to calculate black
+ level values for each frame.
+ </details>
+ <hal_details>
+ The values are given in row-column scan order, with the first value
+ corresponding to the element of the CFA in row=0, column=0.
+ </hal_details>
+ <tag id="RAW" />
+ </entry>
+ <entry name="maxAnalogSensitivity" type="int32" visibility="public"
+ optional="true" hwlevel="full">
+ <description>Maximum sensitivity that is implemented
+ purely through analog gain.</description>
+ <details>For android.sensor.sensitivity values less than or
+ equal to this, all applied gain must be analog. For
+ values above this, the gain applied can be a mix of analog and
+ digital.</details>
+ <tag id="V1" />
+ <tag id="FULL" />
+ </entry>
+ <entry name="orientation" type="int32" visibility="public"
+ hwlevel="legacy">
+ <description>Clockwise angle through which the output image needs to be rotated to be
+ upright on the device screen in its native orientation.
+ </description>
+ <units>Degrees of clockwise rotation; always a multiple of
+ 90</units>
+ <range>0, 90, 180, 270</range>
+ <details>
+ Also defines the direction of rolling shutter readout, which is from top to bottom in
+ the sensor's coordinate system.
+ </details>
+ <tag id="BC" />
+ </entry>
+ <entry name="profileHueSatMapDimensions" type="int32"
+ visibility="system" optional="true"
+ type_notes="Number of samples for hue, saturation, and value"
+ container="array">
+ <array>
+ <size>3</size>
+ </array>
+ <description>
+ The number of input samples for each dimension of
+ android.sensor.profileHueSatMap.
+ </description>
+ <range>
+ Hue &gt;= 1,
+ Saturation &gt;= 2,
+ Value &gt;= 1
+ </range>
+ <details>
+ The number of input samples for the hue, saturation, and value
+ dimension of android.sensor.profileHueSatMap. The order of the
+ dimensions given is hue, saturation, value; where hue is the 0th
+ element.
+ </details>
+ <tag id="RAW" />
+ </entry>
+ </static>
+ <dynamic>
+ <clone entry="android.sensor.exposureTime" kind="controls">
+ </clone>
+ <clone entry="android.sensor.frameDuration"
+ kind="controls"></clone>
+ <clone entry="android.sensor.sensitivity" kind="controls">
+ </clone>
+ <entry name="timestamp" type="int64" visibility="public"
+ hwlevel="legacy">
+ <description>Time at start of exposure of first
+ row of the image sensor active array, in nanoseconds.</description>
+ <units>Nanoseconds</units>
+ <range>&gt; 0</range>
+ <details>The timestamps are also included in all image
+ buffers produced for the same capture, and will be identical
+ on all the outputs.
+
+ When android.sensor.info.timestampSource `==` UNKNOWN,
+ the timestamps measure time since an unspecified starting point,
+ and are monotonically increasing. They can be compared with the
+ timestamps for other captures from the same camera device, but are
+ not guaranteed to be comparable to any other time source.
+
+ When android.sensor.info.timestampSource `==` REALTIME, the
+ timestamps measure time in the same timebase as
+ [elapsedRealtimeNanos](https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos)
+ (or CLOCK_BOOTTIME), and they can
+ be compared to other timestamps from other subsystems that
+ are using that base.
+
+ For reprocessing, the timestamp will match the start of exposure of
+ the input image, i.e. {@link CaptureResult#SENSOR_TIMESTAMP the
+ timestamp} in the TotalCaptureResult that was used to create the
+ reprocess capture request.
+ </details>
+ <hal_details>
+ All timestamps must be in reference to the kernel's
+ CLOCK_BOOTTIME monotonic clock, which properly accounts for
+ time spent asleep. This allows for synchronization with
+ sensors that continue to operate while the system is
+ otherwise asleep.
+
+ If android.sensor.info.timestampSource `==` REALTIME,
+ The timestamp must be synchronized with the timestamps from other
+ sensor subsystems that are using the same timebase.
+
+ For reprocessing, the input image's start of exposure can be looked up
+ with android.sensor.timestamp from the metadata included in the
+ capture request.
+ </hal_details>
+ <tag id="BC" />
+ </entry>
+ <entry name="temperature" type="float"
+ optional="true">
+ <description>The temperature of the sensor, sampled at the time
+ exposure began for this frame.
+
+ The thermal diode being queried should be inside the sensor PCB, or
+ somewhere close to it.
+ </description>
+
+ <units>Celsius</units>
+ <range>Optional. This value is missing if no temperature is available.</range>
+ <tag id="FUTURE" />
+ </entry>
+ <entry name="neutralColorPoint" type="rational" visibility="public"
+ optional="true" container="array">
+ <array>
+ <size>3</size>
+ </array>
+ <description>
+ The estimated camera neutral color in the native sensor colorspace at
+ the time of capture.
+ </description>
+ <details>
+ This value gives the neutral color point encoded as an RGB value in the
+ native sensor color space. The neutral color point indicates the
+ currently estimated white point of the scene illumination. It can be
+ used to interpolate between the provided color transforms when
+ processing raw sensor data.
+
+ The order of the values is R, G, B; where R is in the lowest index.
+ </details>
+ <tag id="RAW" />
+ </entry>
+ <entry name="noiseProfile" type="double" visibility="public"
+ optional="true" type_notes="Pairs of noise model coefficients"
+ container="array" typedef="pairDoubleDouble">
+ <array>
+ <size>2</size>
+ <size>CFA Channels</size>
+ </array>
+ <description>
+ Noise model coefficients for each CFA mosaic channel.
+ </description>
+ <details>
+ This key contains two noise model coefficients for each CFA channel
+ corresponding to the sensor amplification (S) and sensor readout
+ noise (O). These are given as pairs of coefficients for each channel
+ in the same order as channels listed for the CFA layout key
+ (see android.sensor.info.colorFilterArrangement). This is
+ represented as an array of Pair&lt;Double, Double&gt;, where
+ the first member of the Pair at index n is the S coefficient and the
+ second member is the O coefficient for the nth color channel in the CFA.
+
+ These coefficients are used in a two parameter noise model to describe
+ the amount of noise present in the image for each CFA channel. The
+ noise model used here is:
+
+ N(x) = sqrt(Sx + O)
+
+ Where x represents the recorded signal of a CFA channel normalized to
+ the range [0, 1], and S and O are the noise model coeffiecients for
+ that channel.
+
+ A more detailed description of the noise model can be found in the
+ Adobe DNG specification for the NoiseProfile tag.
+ </details>
+ <hal_details>
+ For a CFA layout of RGGB, the list of coefficients would be given as
+ an array of doubles S0,O0,S1,O1,..., where S0 and O0 are the coefficients
+ for the red channel, S1 and O1 are the coefficients for the first green
+ channel, etc.
+ </hal_details>
+ <tag id="RAW" />
+ </entry>
+ <entry name="profileHueSatMap" type="float"
+ visibility="system" optional="true"
+ type_notes="Mapping for hue, saturation, and value"
+ container="array">
+ <array>
+ <size>hue_samples</size>
+ <size>saturation_samples</size>
+ <size>value_samples</size>
+ <size>3</size>
+ </array>
+ <description>
+ A mapping containing a hue shift, saturation scale, and value scale
+ for each pixel.
+ </description>
+ <units>
+ The hue shift is given in degrees; saturation and value scale factors are
+ unitless and are between 0 and 1 inclusive
+ </units>
+ <details>
+ hue_samples, saturation_samples, and value_samples are given in
+ android.sensor.profileHueSatMapDimensions.
+
+ Each entry of this map contains three floats corresponding to the
+ hue shift, saturation scale, and value scale, respectively; where the
+ hue shift has the lowest index. The map entries are stored in the key
+ in nested loop order, with the value divisions in the outer loop, the
+ hue divisions in the middle loop, and the saturation divisions in the
+ inner loop. All zero input saturation entries are required to have a
+ value scale factor of 1.0.
+ </details>
+ <tag id="RAW" />
+ </entry>
+ <entry name="profileToneCurve" type="float"
+ visibility="system" optional="true"
+ type_notes="Samples defining a spline for a tone-mapping curve"
+ container="array">
+ <array>
+ <size>samples</size>
+ <size>2</size>
+ </array>
+ <description>
+ A list of x,y samples defining a tone-mapping curve for gamma adjustment.
+ </description>
+ <range>
+ Each sample has an input range of `[0, 1]` and an output range of
+ `[0, 1]`. The first sample is required to be `(0, 0)`, and the last
+ sample is required to be `(1, 1)`.
+ </range>
+ <details>
+ This key contains a default tone curve that can be applied while
+ processing the image as a starting point for user adjustments.
+ The curve is specified as a list of value pairs in linear gamma.
+ The curve is interpolated using a cubic spline.
+ </details>
+ <tag id="RAW" />
+ </entry>
+ <entry name="greenSplit" type="float" visibility="public" optional="true">
+ <description>
+ The worst-case divergence between Bayer green channels.
+ </description>
+ <range>
+ &gt;= 0
+ </range>
+ <details>
+ This value is an estimate of the worst case split between the
+ Bayer green channels in the red and blue rows in the sensor color
+ filter array.
+
+ The green split is calculated as follows:
+
+ 1. A 5x5 pixel (or larger) window W within the active sensor array is
+ chosen. The term 'pixel' here is taken to mean a group of 4 Bayer
+ mosaic channels (R, Gr, Gb, B). The location and size of the window
+ chosen is implementation defined, and should be chosen to provide a
+ green split estimate that is both representative of the entire image
+ for this camera sensor, and can be calculated quickly.
+ 1. The arithmetic mean of the green channels from the red
+ rows (mean_Gr) within W is computed.
+ 1. The arithmetic mean of the green channels from the blue
+ rows (mean_Gb) within W is computed.
+ 1. The maximum ratio R of the two means is computed as follows:
+ `R = max((mean_Gr + 1)/(mean_Gb + 1), (mean_Gb + 1)/(mean_Gr + 1))`
+
+ The ratio R is the green split divergence reported for this property,
+ which represents how much the green channels differ in the mosaic
+ pattern. This value is typically used to determine the treatment of
+ the green mosaic channels when demosaicing.
+
+ The green split value can be roughly interpreted as follows:
+
+ * R &lt; 1.03 is a negligible split (&lt;3% divergence).
+ * 1.20 &lt;= R &gt;= 1.03 will require some software
+ correction to avoid demosaic errors (3-20% divergence).
+ * R &gt; 1.20 will require strong software correction to produce
+ a usuable image (&gt;20% divergence).
+ </details>
+ <hal_details>
+ The green split given may be a static value based on prior
+ characterization of the camera sensor using the green split
+ calculation method given here over a large, representative, sample
+ set of images. Other methods of calculation that produce equivalent
+ results, and can be interpreted in the same manner, may be used.
+ </hal_details>
+ <tag id="RAW" />
+ </entry>
+ </dynamic>
+ <controls>
+ <entry name="testPatternData" type="int32" visibility="public" optional="true" container="array">
+ <array>
+ <size>4</size>
+ </array>
+ <description>
+ A pixel `[R, G_even, G_odd, B]` that supplies the test pattern
+ when android.sensor.testPatternMode is SOLID_COLOR.
+ </description>
+ <details>
+ Each color channel is treated as an unsigned 32-bit integer.
+ The camera device then uses the most significant X bits
+ that correspond to how many bits are in its Bayer raw sensor
+ output.
+
+ For example, a sensor with RAW10 Bayer output would use the
+ 10 most significant bits from each color channel.
+ </details>
+ <hal_details>
+ </hal_details>
+ </entry>
+ <entry name="testPatternMode" type="int32" visibility="public" optional="true"
+ enum="true">
+ <enum>
+ <value>OFF
+ <notes>No test pattern mode is used, and the camera
+ device returns captures from the image sensor.
+
+ This is the default if the key is not set.</notes>
+ </value>
+ <value>SOLID_COLOR
+ <notes>
+ Each pixel in `[R, G_even, G_odd, B]` is replaced by its
+ respective color channel provided in
+ android.sensor.testPatternData.
+
+ For example:
+
+ android.testPatternData = [0, 0xFFFFFFFF, 0xFFFFFFFF, 0]
+
+ All green pixels are 100% green. All red/blue pixels are black.
+
+ android.testPatternData = [0xFFFFFFFF, 0, 0xFFFFFFFF, 0]
+
+ All red pixels are 100% red. Only the odd green pixels
+ are 100% green. All blue pixels are 100% black.
+ </notes>
+ </value>
+ <value>COLOR_BARS
+ <notes>
+ All pixel data is replaced with an 8-bar color pattern.
+
+ The vertical bars (left-to-right) are as follows:
+
+ * 100% white
+ * yellow
+ * cyan
+ * green
+ * magenta
+ * red
+ * blue
+ * black
+
+ In general the image would look like the following:
+
+ W Y C G M R B K
+ W Y C G M R B K
+ W Y C G M R B K
+ W Y C G M R B K
+ W Y C G M R B K
+ . . . . . . . .
+ . . . . . . . .
+ . . . . . . . .
+
+ (B = Blue, K = Black)
+
+ Each bar should take up 1/8 of the sensor pixel array width.
+ When this is not possible, the bar size should be rounded
+ down to the nearest integer and the pattern can repeat
+ on the right side.
+
+ Each bar's height must always take up the full sensor
+ pixel array height.
+
+ Each pixel in this test pattern must be set to either
+ 0% intensity or 100% intensity.
+ </notes>
+ </value>
+ <value>COLOR_BARS_FADE_TO_GRAY
+ <notes>
+ The test pattern is similar to COLOR_BARS, except that
+ each bar should start at its specified color at the top,
+ and fade to gray at the bottom.
+
+ Furthermore each bar is further subdivided into a left and
+ right half. The left half should have a smooth gradient,
+ and the right half should have a quantized gradient.
+
+ In particular, the right half's should consist of blocks of the
+ same color for 1/16th active sensor pixel array width.
+
+ The least significant bits in the quantized gradient should
+ be copied from the most significant bits of the smooth gradient.
+
+ The height of each bar should always be a multiple of 128.
+ When this is not the case, the pattern should repeat at the bottom
+ of the image.
+ </notes>
+ </value>
+ <value>PN9
+ <notes>
+ All pixel data is replaced by a pseudo-random sequence
+ generated from a PN9 512-bit sequence (typically implemented
+ in hardware with a linear feedback shift register).
+
+ The generator should be reset at the beginning of each frame,
+ and thus each subsequent raw frame with this test pattern should
+ be exactly the same as the last.
+ </notes>
+ </value>
+ <value id="256">CUSTOM1
+ <notes>The first custom test pattern. All custom patterns that are
+ available only on this camera device are at least this numeric
+ value.
+
+ All of the custom test patterns will be static
+ (that is the raw image must not vary from frame to frame).
+ </notes>
+ </value>
+ </enum>
+ <description>When enabled, the sensor sends a test pattern instead of
+ doing a real exposure from the camera.
+ </description>
+ <range>android.sensor.availableTestPatternModes</range>
+ <details>
+ When a test pattern is enabled, all manual sensor controls specified
+ by android.sensor.* will be ignored. All other controls should
+ work as normal.
+
+ For example, if manual flash is enabled, flash firing should still
+ occur (and that the test pattern remain unmodified, since the flash
+ would not actually affect it).
+
+ Defaults to OFF.
+ </details>
+ <hal_details>
+ All test patterns are specified in the Bayer domain.
+
+ The HAL may choose to substitute test patterns from the sensor
+ with test patterns from on-device memory. In that case, it should be
+ indistinguishable to the ISP whether the data came from the
+ sensor interconnect bus (such as CSI2) or memory.
+ </hal_details>
+ </entry>
+ </controls>
+ <dynamic>
+ <clone entry="android.sensor.testPatternData" kind="controls">
+ </clone>
+ <clone entry="android.sensor.testPatternMode" kind="controls">
+ </clone>
+ </dynamic>
+ <static>
+ <entry name="availableTestPatternModes" type="int32" visibility="public" optional="true"
+ type_notes="list of enums" container="array">
+ <array>
+ <size>n</size>
+ </array>
+ <description>List of sensor test pattern modes for android.sensor.testPatternMode
+ supported by this camera device.
+ </description>
+ <range>Any value listed in android.sensor.testPatternMode</range>
+ <details>
+ Defaults to OFF, and always includes OFF if defined.
+ </details>
+ <hal_details>
+ All custom modes must be >= CUSTOM1.
+ </hal_details>
+ </entry>
+ </static>
+ <dynamic>
+ <entry name="rollingShutterSkew" type="int64" visibility="public" hwlevel="limited">
+ <description>Duration between the start of first row exposure
+ and the start of last row exposure.</description>
+ <units>Nanoseconds</units>
+ <range> &gt;= 0 and &lt;
+ {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}.</range>
+ <details>
+ This is the exposure time skew between the first and last
+ row exposure start times. The first row and the last row are
+ the first and last rows inside of the
+ android.sensor.info.activeArraySize.
+
+ For typical camera sensors that use rolling shutters, this is also equivalent
+ to the frame readout time.
+ </details>
+ <hal_details>
+ The HAL must report `0` if the sensor is using global shutter, where all pixels begin
+ exposure at the same time.
+ </hal_details>
+ <tag id="V1" />
+ </entry>
+ </dynamic>
+ <static>
+ <entry name="opticalBlackRegions" type="int32" visibility="public" optional="true"
+ container="array" typedef="rectangle">
+ <array>
+ <size>4</size>
+ <size>num_regions</size>
+ </array>
+ <description>List of disjoint rectangles indicating the sensor
+ optically shielded black pixel regions.
+ </description>
+ <details>
+ In most camera sensors, the active array is surrounded by some
+ optically shielded pixel areas. By blocking light, these pixels
+ provides a reliable black reference for black level compensation
+ in active array region.
+
+ The data representation is int[4], which maps to (left, top, width, height).
+
+ This key provides a list of disjoint rectangles specifying the
+ regions of optically shielded (with metal shield) black pixel
+ regions if the camera device is capable of reading out these black
+ pixels in the output raw images. In comparison to the fixed black
+ level values reported by android.sensor.blackLevelPattern, this key
+ may provide a more accurate way for the application to calculate
+ black level of each captured raw images.
+
+ When this key is reported, the android.sensor.dynamicBlackLevel and
+ android.sensor.dynamicWhiteLevel will also be reported.
+ </details>
+ <hal_details>
+ This array contains (xmin, ymin, width, height). The (xmin, ymin)
+ must be &gt;= (0,0) and &lt;=
+ android.sensor.info.pixelArraySize. The (width, height) must be
+ &lt;= android.sensor.info.pixelArraySize. Each region must be
+ outside the region reported by
+ android.sensor.info.preCorrectionActiveArraySize.
+
+ The HAL must report minimal number of disjoint regions for the
+ optically shielded back pixel regions. For example, if a region can
+ be covered by one rectangle, the HAL must not split this region into
+ multiple rectangles.
+ </hal_details>
+ </entry>
+ </static>
+ <dynamic>
+ <entry name="dynamicBlackLevel" type="float" visibility="public"
+ optional="true" type_notes="2x2 raw count block" container="array">
+ <array>
+ <size>4</size>
+ </array>
+ <description>
+ A per-frame dynamic black level offset for each of the color filter
+ arrangement (CFA) mosaic channels.
+ </description>
+ <range>&gt;= 0 for each.</range>
+ <details>
+ Camera sensor black levels may vary dramatically for different
+ capture settings (e.g. android.sensor.sensitivity). The fixed black
+ level reported by android.sensor.blackLevelPattern may be too
+ inaccurate to represent the actual value on a per-frame basis. The
+ camera device internal pipeline relies on reliable black level values
+ to process the raw images appropriately. To get the best image
+ quality, the camera device may choose to estimate the per frame black
+ level values either based on optically shielded black regions
+ (android.sensor.opticalBlackRegions) or its internal model.
+
+ This key reports the camera device estimated per-frame zero light
+ value for each of the CFA mosaic channels in the camera sensor. The
+ android.sensor.blackLevelPattern may only represent a coarse
+ approximation of the actual black level values. This value is the
+ black level used in camera device internal image processing pipeline
+ and generally more accurate than the fixed black level values.
+ However, since they are estimated values by the camera device, they
+ may not be as accurate as the black level values calculated from the
+ optical black pixels reported by android.sensor.opticalBlackRegions.
+
+ The values are given in the same order as channels listed for the CFA
+ layout key (see android.sensor.info.colorFilterArrangement), i.e. the
+ nth value given corresponds to the black level offset for the nth
+ color channel listed in the CFA.
+
+ This key will be available if android.sensor.opticalBlackRegions is
+ available or the camera device advertises this key via
+ {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}.
+ </details>
+ <hal_details>
+ The values are given in row-column scan order, with the first value
+ corresponding to the element of the CFA in row=0, column=0.
+ </hal_details>
+ <tag id="RAW" />
+ </entry>
+ <entry name="dynamicWhiteLevel" type="int32" visibility="public"
+ optional="true" >
+ <description>
+ Maximum raw value output by sensor for this frame.
+ </description>
+ <range> &gt;= 0</range>
+ <details>
+ Since the android.sensor.blackLevelPattern may change for different
+ capture settings (e.g., android.sensor.sensitivity), the white
+ level will change accordingly. This key is similar to
+ android.sensor.info.whiteLevel, but specifies the camera device
+ estimated white level for each frame.
+
+ This key will be available if android.sensor.opticalBlackRegions is
+ available or the camera device advertises this key via
+ {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}.
+ </details>
+ <hal_details>
+ The full bit depth of the sensor must be available in the raw data,
+ so the value for linear sensors should not be significantly lower
+ than maximum raw value supported, i.e. 2^(sensor bits per pixel).
+ </hal_details>
+ <tag id="RAW" />
+ </entry>
+ </dynamic>
+ <static>
+ <entry name="opaqueRawSize" type="int32" visibility="system" container="array">
+ <array>
+ <size>n</size>
+ <size>3</size>
+ </array>
+ <description>Size in bytes for all the listed opaque RAW buffer sizes</description>
+ <range>Must be large enough to fit the opaque RAW of corresponding size produced by
+ the camera</range>
+ <details>
+ This configurations are listed as `(width, height, size_in_bytes)` tuples.
+ This is used for sizing the gralloc buffers for opaque RAW buffers.
+ All RAW_OPAQUE output stream configuration listed in
+ android.scaler.availableStreamConfigurations will have a corresponding tuple in
+ this key.
+ </details>
+ <hal_details>
+ This key is added in HAL3.4.
+ For HAL3.4 or above: devices advertising RAW_OPAQUE format output must list this key.
+ For HAL3.3 or earlier devices: if RAW_OPAQUE ouput is advertised, camera framework
+ will derive this key by assuming each pixel takes two bytes and no padding bytes
+ between rows.
+ </hal_details>
+ </entry>
+ </static>
+ </section>
+ <section name="shading">
+ <controls>
+ <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
+ <enum>
+ <value>OFF
+ <notes>No lens shading correction is applied.</notes></value>
+ <value>FAST
+ <notes>Apply lens shading corrections, without slowing
+ frame rate relative to sensor raw output</notes></value>
+ <value>HIGH_QUALITY
+ <notes>Apply high-quality lens shading correction, at the
+ cost of possibly reduced frame rate.</notes></value>
+ </enum>
+ <description>Quality of lens shading correction applied
+ to the image data.</description>
+ <range>android.shading.availableModes</range>
+ <details>
+ When set to OFF mode, no lens shading correction will be applied by the
+ camera device, and an identity lens shading map data will be provided
+ if `android.statistics.lensShadingMapMode == ON`. For example, for lens
+ shading map with size of `[ 4, 3 ]`,
+ the output android.statistics.lensShadingCorrectionMap for this case will be an identity
+ map shown below:
+
+ [ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
+ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
+ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
+ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
+ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
+ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ]
+
+ When set to other modes, lens shading correction will be applied by the camera
+ device. Applications can request lens shading map data by setting
+ android.statistics.lensShadingMapMode to ON, and then the camera device will provide lens
+ shading map data in android.statistics.lensShadingCorrectionMap; the returned shading map
+ data will be the one applied by the camera device for this capture request.
+
+ The shading map data may depend on the auto-exposure (AE) and AWB statistics, therefore
+ the reliability of the map data may be affected by the AE and AWB algorithms. When AE and
+ AWB are in AUTO modes(android.control.aeMode `!=` OFF and android.control.awbMode `!=`
+ OFF), to get best results, it is recommended that the applications wait for the AE and AWB
+ to be converged before using the returned shading map data.
+ </details>
+ </entry>
+ <entry name="strength" type="byte">
+ <description>Control the amount of shading correction
+ applied to the images</description>
+ <units>unitless: 1-10; 10 is full shading
+ compensation</units>
+ <tag id="FUTURE" />
+ </entry>
+ </controls>
+ <dynamic>
+ <clone entry="android.shading.mode" kind="controls">
+ </clone>
+ </dynamic>
+ <static>
+ <entry name="availableModes" type="byte" visibility="public"
+ type_notes="List of enums (android.shading.mode)." container="array"
+ typedef="enumList" hwlevel="legacy">
+ <array>
+ <size>n</size>
+ </array>
+ <description>
+ List of lens shading modes for android.shading.mode that are supported by this camera device.
+ </description>
+ <range>Any value listed in android.shading.mode</range>
+ <details>
+ This list contains lens shading modes that can be set for the camera device.
+ Camera devices that support the MANUAL_POST_PROCESSING capability will always
+ list OFF and FAST mode. This includes all FULL level devices.
+ LEGACY devices will always only support FAST mode.
+ </details>
+ <hal_details>
+ HAL must support both FAST and HIGH_QUALITY if lens shading correction control is
+ available on the camera device, but the underlying implementation can be the same for
+ both modes. That is, if the highest quality implementation on the camera device does not
+ slow down capture rate, then FAST and HIGH_QUALITY will generate the same output.
+ </hal_details>
+ </entry>
+ </static>
+ </section>
+ <section name="statistics">
+ <controls>
+ <entry name="faceDetectMode" type="byte" visibility="public" enum="true"
+ hwlevel="legacy">
+ <enum>
+ <value>OFF
+ <notes>Do not include face detection statistics in capture
+ results.</notes></value>
+ <value optional="true">SIMPLE
+ <notes>Return face rectangle and confidence values only.
+ </notes></value>
+ <value optional="true">FULL
+ <notes>Return all face
+ metadata.
+
+ In this mode, face rectangles, scores, landmarks, and face IDs are all valid.
+ </notes></value>
+ </enum>
+ <description>Operating mode for the face detector
+ unit.</description>
+ <range>android.statistics.info.availableFaceDetectModes</range>
+ <details>Whether face detection is enabled, and whether it
+ should output just the basic fields or the full set of
+ fields.</details>
+ <hal_details>
+ SIMPLE mode must fill in android.statistics.faceRectangles and
+ android.statistics.faceScores.
+ FULL mode must also fill in android.statistics.faceIds, and
+ android.statistics.faceLandmarks.
+ </hal_details>
+ <tag id="BC" />
+ </entry>
+ <entry name="histogramMode" type="byte" enum="true" typedef="boolean">
+ <enum>
+ <value>OFF</value>
+ <value>ON</value>
+ </enum>
+ <description>Operating mode for histogram
+ generation</description>
+ <tag id="FUTURE" />
+ </entry>
+ <entry name="sharpnessMapMode" type="byte" enum="true" typedef="boolean">
+ <enum>
+ <value>OFF</value>
+ <value>ON</value>
+ </enum>
+ <description>Operating mode for sharpness map
+ generation</description>
+ <tag id="FUTURE" />
+ </entry>
+ <entry name="hotPixelMapMode" type="byte" visibility="public" enum="true"
+ typedef="boolean">
+ <enum>
+ <value>OFF
+ <notes>Hot pixel map production is disabled.
+ </notes></value>
+ <value>ON
+ <notes>Hot pixel map production is enabled.
+ </notes></value>
+ </enum>
+ <description>
+ Operating mode for hot pixel map generation.
+ </description>
+ <range>android.statistics.info.availableHotPixelMapModes</range>
+ <details>
+ If set to `true`, a hot pixel map is returned in android.statistics.hotPixelMap.
+ If set to `false`, no hot pixel map will be returned.
+ </details>
+ <tag id="V1" />
+ <tag id="RAW" />
+ </entry>
+ </controls>
+ <static>
+ <namespace name="info">
+ <entry name="availableFaceDetectModes" type="byte"
+ visibility="public"
+ type_notes="List of enums from android.statistics.faceDetectMode"
+ container="array"
+ typedef="enumList"
+ hwlevel="legacy">
+ <array>
+ <size>n</size>
+ </array>
+ <description>List of face detection modes for android.statistics.faceDetectMode that are
+ supported by this camera device.
+ </description>
+ <range>Any value listed in android.statistics.faceDetectMode</range>
+ <details>OFF is always supported.
+ </details>
+ </entry>
+ <entry name="histogramBucketCount" type="int32">
+ <description>Number of histogram buckets
+ supported</description>
+ <range>&gt;= 64</range>
+ <tag id="FUTURE" />
+ </entry>
+ <entry name="maxFaceCount" type="int32" visibility="public" hwlevel="legacy">
+ <description>The maximum number of simultaneously detectable
+ faces.</description>
+ <range>0 for cameras without available face detection; otherwise:
+ `>=4` for LIMITED or FULL hwlevel devices or
+ `>0` for LEGACY devices.</range>
+ <tag id="BC" />
+ </entry>
+ <entry name="maxHistogramCount" type="int32">
+ <description>Maximum value possible for a histogram
+ bucket</description>
+ <tag id="FUTURE" />
+ </entry>
+ <entry name="maxSharpnessMapValue" type="int32">
+ <description>Maximum value possible for a sharpness map
+ region.</description>
+ <tag id="FUTURE" />
+ </entry>
+ <entry name="sharpnessMapSize" type="int32"
+ type_notes="width x height" container="array" typedef="size">
+ <array>
+ <size>2</size>
+ </array>
+ <description>Dimensions of the sharpness
+ map</description>
+ <range>Must be at least 32 x 32</range>
+ <tag id="FUTURE" />
+ </entry>
+ <entry name="availableHotPixelMapModes" type="byte" visibility="public"
+ type_notes="list of enums" container="array" typedef="boolean">
+ <array>
+ <size>n</size>
+ </array>
+ <description>
+ List of hot pixel map output modes for android.statistics.hotPixelMapMode that are
+ supported by this camera device.
+ </description>
+ <range>Any value listed in android.statistics.hotPixelMapMode</range>
+ <details>
+ If no hotpixel map output is available for this camera device, this will contain only
+ `false`.
+
+ ON is always supported on devices with the RAW capability.
+ </details>
+ <tag id="V1" />
+ <tag id="RAW" />
+ </entry>
+ <entry name="availableLensShadingMapModes" type="byte" visibility="public"
+ type_notes="list of enums" container="array" typedef="enumList">
+ <array>
+ <size>n</size>
+ </array>
+ <description>
+ List of lens shading map output modes for android.statistics.lensShadingMapMode that
+ are supported by this camera device.
+ </description>
+ <range>Any value listed in android.statistics.lensShadingMapMode</range>
+ <details>
+ If no lens shading map output is available for this camera device, this key will
+ contain only OFF.
+
+ ON is always supported on devices with the RAW capability.
+ LEGACY mode devices will always only support OFF.
+ </details>
+ </entry>
+ </namespace>
+ </static>
+ <dynamic>
+ <clone entry="android.statistics.faceDetectMode"
+ kind="controls"></clone>
+ <entry name="faceIds" type="int32" visibility="ndk_public"
+ container="array" hwlevel="legacy">
+ <array>
+ <size>n</size>
+ </array>
+ <description>List of unique IDs for detected faces.</description>
+ <details>
+ Each detected face is given a unique ID that is valid for as long as the face is visible
+ to the camera device. A face that leaves the field of view and later returns may be
+ assigned a new ID.
+
+ Only available if android.statistics.faceDetectMode == FULL</details>
+ <tag id="BC" />
+ </entry>
+ <entry name="faceLandmarks" type="int32" visibility="ndk_public"
+ type_notes="(leftEyeX, leftEyeY, rightEyeX, rightEyeY, mouthX, mouthY)"
+ container="array" hwlevel="legacy">
+ <array>
+ <size>n</size>
+ <size>6</size>
+ </array>
+ <description>List of landmarks for detected
+ faces.</description>
+ <details>
+ The coordinate system is that of android.sensor.info.activeArraySize, with
+ `(0, 0)` being the top-left pixel of the active array.
+
+ Only available if android.statistics.faceDetectMode == FULL</details>
+ <tag id="BC" />
+ </entry>
+ <entry name="faceRectangles" type="int32" visibility="ndk_public"
+ type_notes="(xmin, ymin, xmax, ymax). (0,0) is top-left of active pixel area"
+ container="array" typedef="rectangle" hwlevel="legacy">
+ <array>
+ <size>n</size>
+ <size>4</size>
+ </array>
+ <description>List of the bounding rectangles for detected
+ faces.</description>
+ <details>
+ The data representation is int[4], which maps to (left, top, width, height).
+
+ The coordinate system is that of android.sensor.info.activeArraySize, with
+ `(0, 0)` being the top-left pixel of the active array.
+
+ Only available if android.statistics.faceDetectMode != OFF</details>
+ <tag id="BC" />
+ </entry>
+ <entry name="faceScores" type="byte" visibility="ndk_public"
+ container="array" hwlevel="legacy">
+ <array>
+ <size>n</size>
+ </array>
+ <description>List of the face confidence scores for
+ detected faces</description>
+ <range>1-100</range>
+ <details>Only available if android.statistics.faceDetectMode != OFF.
+ </details>
+ <hal_details>
+ The value should be meaningful (for example, setting 100 at
+ all times is illegal).</hal_details>
+ <tag id="BC" />
+ </entry>
+ <entry name="faces" type="int32" visibility="java_public" synthetic="true"
+ container="array" typedef="face" hwlevel="legacy">
+ <array>
+ <size>n</size>
+ </array>
+ <description>List of the faces detected through camera face detection
+ in this capture.</description>
+ <details>
+ Only available if android.statistics.faceDetectMode `!=` OFF.
+ </details>
+ </entry>
+ <entry name="histogram" type="int32"
+ type_notes="count of pixels for each color channel that fall into each histogram bucket, scaled to be between 0 and maxHistogramCount"
+ container="array">
+ <array>
+ <size>n</size>
+ <size>3</size>
+ </array>
+ <description>A 3-channel histogram based on the raw
+ sensor data</description>
+ <details>The k'th bucket (0-based) covers the input range
+ (with w = android.sensor.info.whiteLevel) of [ k * w/N,
+ (k + 1) * w / N ). If only a monochrome sharpness map is
+ supported, all channels should have the same data</details>
+ <tag id="FUTURE" />
+ </entry>
+ <clone entry="android.statistics.histogramMode"
+ kind="controls"></clone>
+ <entry name="sharpnessMap" type="int32"
+ type_notes="estimated sharpness for each region of the input image. Normalized to be between 0 and maxSharpnessMapValue. Higher values mean sharper (better focused)"
+ container="array">
+ <array>
+ <size>n</size>
+ <size>m</size>
+ <size>3</size>
+ </array>
+ <description>A 3-channel sharpness map, based on the raw
+ sensor data</description>
+ <details>If only a monochrome sharpness map is supported,
+ all channels should have the same data</details>
+ <tag id="FUTURE" />
+ </entry>
+ <clone entry="android.statistics.sharpnessMapMode"
+ kind="controls"></clone>
+ <entry name="lensShadingCorrectionMap" type="byte" visibility="public"
+ typedef="lensShadingMap" hwlevel="full">
+ <description>The shading map is a low-resolution floating-point map
+ that lists the coefficients used to correct for vignetting, for each
+ Bayer color channel.</description>
+ <range>Each gain factor is &gt;= 1</range>
+ <details>The least shaded section of the image should have a gain factor
+ of 1; all other sections should have gains above 1.
+
+ When android.colorCorrection.mode = TRANSFORM_MATRIX, the map
+ must take into account the colorCorrection settings.
+
+ The shading map is for the entire active pixel array, and is not
+ affected by the crop region specified in the request. Each shading map
+ entry is the value of the shading compensation map over a specific
+ pixel on the sensor. Specifically, with a (N x M) resolution shading
+ map, and an active pixel array size (W x H), shading map entry
+ (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
+ pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
+ The map is assumed to be bilinearly interpolated between the sample points.
+
+ The channel order is [R, Geven, Godd, B], where Geven is the green
+ channel for the even rows of a Bayer pattern, and Godd is the odd rows.
+ The shading map is stored in a fully interleaved format.
+
+ The shading map should have on the order of 30-40 rows and columns,
+ and must be smaller than 64x64.
+
+ As an example, given a very small map defined as:
+
+ width,height = [ 4, 3 ]
+ values =
+ [ 1.3, 1.2, 1.15, 1.2, 1.2, 1.2, 1.15, 1.2,
+ 1.1, 1.2, 1.2, 1.2, 1.3, 1.2, 1.3, 1.3,
+ 1.2, 1.2, 1.25, 1.1, 1.1, 1.1, 1.1, 1.0,
+ 1.0, 1.0, 1.0, 1.0, 1.2, 1.3, 1.25, 1.2,
+ 1.3, 1.2, 1.2, 1.3, 1.2, 1.15, 1.1, 1.2,
+ 1.2, 1.1, 1.0, 1.2, 1.3, 1.15, 1.2, 1.3 ]
+
+ The low-resolution scaling map images for each channel are
+ (displayed using nearest-neighbor interpolation):
+
+ ![Red lens shading map](android.statistics.lensShadingMap/red_shading.png)
+ ![Green (even rows) lens shading map](android.statistics.lensShadingMap/green_e_shading.png)
+ ![Green (odd rows) lens shading map](android.statistics.lensShadingMap/green_o_shading.png)
+ ![Blue lens shading map](android.statistics.lensShadingMap/blue_shading.png)
+
+ As a visualization only, inverting the full-color map to recover an
+ image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:
+
+ ![Image of a uniform white wall (inverse shading map)](android.statistics.lensShadingMap/inv_shading.png)
+ </details>
+ </entry>
+ <entry name="lensShadingMap" type="float" visibility="ndk_public"
+ type_notes="2D array of float gain factors per channel to correct lens shading"
+ container="array" hwlevel="full">
+ <array>
+ <size>4</size>
+ <size>n</size>
+ <size>m</size>
+ </array>
+ <description>The shading map is a low-resolution floating-point map
+ that lists the coefficients used to correct for vignetting and color shading,
+ for each Bayer color channel of RAW image data.</description>
+ <range>Each gain factor is &gt;= 1</range>
+ <details>
+ The lens shading correction is defined as a full shading correction that
+ corrects both color shading for the output non-RAW images. After the
+ shading map is applied, the output non-RAW images will be flat-field images
+ for flat scenes under uniform illumination.
+
+ When there is no lens shading correction applied to RAW output images
+ (android.sensor.info.lensShadingApplied `==` false), this map is a full lens
+ shading correction map; when there is some lens shading correction applied
+ to the RAW output image (android.sensor.info.lensShadingApplied `==` true),
+ this map reports the remaining lens shading correction map that needs to be
+ applied to get fully shading corrected images.
+
+ For a full shading correction map, the least shaded section of the image
+ should have a gain factor of 1; all other sections should have gains above 1.
+
+ When android.colorCorrection.mode = TRANSFORM_MATRIX, the map
+ must take into account the colorCorrection settings.
+
+ The shading map is for the entire active pixel array, and is not
+ affected by the crop region specified in the request. Each shading map
+ entry is the value of the shading compensation map over a specific
+ pixel on the sensor. Specifically, with a (N x M) resolution shading
+ map, and an active pixel array size (W x H), shading map entry
+ (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
+ pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
+ The map is assumed to be bilinearly interpolated between the sample points.
+
+ The channel order is [R, Geven, Godd, B], where Geven is the green
+ channel for the even rows of a Bayer pattern, and Godd is the odd rows.
+ The shading map is stored in a fully interleaved format, and its size
+ is provided in the camera static metadata by android.lens.info.shadingMapSize.
+
+ The shading map should have on the order of 30-40 rows and columns,
+ and must be smaller than 64x64.
+
+ As an example, given a very small map defined as:
+
+ android.lens.info.shadingMapSize = [ 4, 3 ]
+ android.statistics.lensShadingMap =
+ [ 1.3, 1.2, 1.15, 1.2, 1.2, 1.2, 1.15, 1.2,
+ 1.1, 1.2, 1.2, 1.2, 1.3, 1.2, 1.3, 1.3,
+ 1.2, 1.2, 1.25, 1.1, 1.1, 1.1, 1.1, 1.0,
+ 1.0, 1.0, 1.0, 1.0, 1.2, 1.3, 1.25, 1.2,
+ 1.3, 1.2, 1.2, 1.3, 1.2, 1.15, 1.1, 1.2,
+ 1.2, 1.1, 1.0, 1.2, 1.3, 1.15, 1.2, 1.3 ]
+
+ The low-resolution scaling map images for each channel are
+ (displayed using nearest-neighbor interpolation):
+
+ ![Red lens shading map](android.statistics.lensShadingMap/red_shading.png)
+ ![Green (even rows) lens shading map](android.statistics.lensShadingMap/green_e_shading.png)
+ ![Green (odd rows) lens shading map](android.statistics.lensShadingMap/green_o_shading.png)
+ ![Blue lens shading map](android.statistics.lensShadingMap/blue_shading.png)
+
+ As a visualization only, inverting the full-color map to recover an
+ image of a gray wall (using bicubic interpolation for visual quality)
+ as captured by the sensor gives:
+
+ ![Image of a uniform white wall (inverse shading map)](android.statistics.lensShadingMap/inv_shading.png)
+
+ Note that the RAW image data might be subject to lens shading
+ correction not reported on this map. Query
+ android.sensor.info.lensShadingApplied to see if RAW image data has subject
+ to lens shading correction. If android.sensor.info.lensShadingApplied
+ is TRUE, the RAW image data is subject to partial or full lens shading
+ correction. In the case full lens shading correction is applied to RAW
+ images, the gain factor map reported in this key will contain all 1.0 gains.
+ In other words, the map reported in this key is the remaining lens shading
+ that needs to be applied on the RAW image to get images without lens shading
+ artifacts. See android.request.maxNumOutputRaw for a list of RAW image
+ formats.
+ </details>
+ <hal_details>
+ The lens shading map calculation may depend on exposure and white balance statistics.
+ When AE and AWB are in AUTO modes
+ (android.control.aeMode `!=` OFF and android.control.awbMode `!=` OFF), the HAL
+ may have all the information it need to generate most accurate lens shading map. When
+ AE or AWB are in manual mode
+ (android.control.aeMode `==` OFF or android.control.awbMode `==` OFF), the shading map
+ may be adversely impacted by manual exposure or white balance parameters. To avoid
+ generating unreliable shading map data, the HAL may choose to lock the shading map with
+ the latest known good map generated when the AE and AWB are in AUTO modes.
+ </hal_details>
+ </entry>
+ <entry name="predictedColorGains" type="float"
+ visibility="hidden"
+ deprecated="true"
+ optional="true"
+ type_notes="A 1D array of floats for 4 color channel gains"
+ container="array">
+ <array>
+ <size>4</size>
+ </array>
+ <description>The best-fit color channel gains calculated
+ by the camera device's statistics units for the current output frame.
+ </description>
+ <details>
+ This may be different than the gains used for this frame,
+ since statistics processing on data from a new frame
+ typically completes after the transform has already been
+ applied to that frame.
+
+ The 4 channel gains are defined in Bayer domain,
+ see android.colorCorrection.gains for details.
+
+ This value should always be calculated by the auto-white balance (AWB) block,
+ regardless of the android.control.* current values.
+ </details>
+ </entry>
+ <entry name="predictedColorTransform" type="rational"
+ visibility="hidden"
+ deprecated="true"
+ optional="true"
+ type_notes="3x3 rational matrix in row-major order"
+ container="array">
+ <array>
+ <size>3</size>
+ <size>3</size>
+ </array>
+ <description>The best-fit color transform matrix estimate
+ calculated by the camera device's statistics units for the current
+ output frame.</description>
+ <details>The camera device will provide the estimate from its
+ statistics unit on the white balance transforms to use
+ for the next frame. These are the values the camera device believes
+ are the best fit for the current output frame. This may
+ be different than the transform used for this frame, since
+ statistics processing on data from a new frame typically
+ completes after the transform has already been applied to
+ that frame.
+
+ These estimates must be provided for all frames, even if
+ capture settings and color transforms are set by the application.
+
+ This value should always be calculated by the auto-white balance (AWB) block,
+ regardless of the android.control.* current values.
+ </details>
+ </entry>
+ <entry name="sceneFlicker" type="byte" visibility="public" enum="true"
+ hwlevel="full">
+ <enum>
+ <value>NONE
+ <notes>The camera device does not detect any flickering illumination
+ in the current scene.</notes></value>
+ <value>50HZ
+ <notes>The camera device detects illumination flickering at 50Hz
+ in the current scene.</notes></value>
+ <value>60HZ
+ <notes>The camera device detects illumination flickering at 60Hz
+ in the current scene.</notes></value>
+ </enum>
+ <description>The camera device estimated scene illumination lighting
+ frequency.</description>
+ <details>
+ Many light sources, such as most fluorescent lights, flicker at a rate
+ that depends on the local utility power standards. This flicker must be
+ accounted for by auto-exposure routines to avoid artifacts in captured images.
+ The camera device uses this entry to tell the application what the scene
+ illuminant frequency is.
+
+ When manual exposure control is enabled
+ (`android.control.aeMode == OFF` or `android.control.mode ==
+ OFF`), the android.control.aeAntibandingMode doesn't perform
+ antibanding, and the application can ensure it selects
+ exposure times that do not cause banding issues by looking
+ into this metadata field. See
+ android.control.aeAntibandingMode for more details.
+
+ Reports NONE if there doesn't appear to be flickering illumination.
+ </details>
+ </entry>
+ <clone entry="android.statistics.hotPixelMapMode" kind="controls">
+ </clone>
+ <entry name="hotPixelMap" type="int32" visibility="public"
+ type_notes="list of coordinates based on android.sensor.pixelArraySize"
+ container="array" typedef="point">
+ <array>
+ <size>2</size>
+ <size>n</size>
+ </array>
+ <description>
+ List of `(x, y)` coordinates of hot/defective pixels on the sensor.
+ </description>
+ <range>
+ n <= number of pixels on the sensor.
+ The `(x, y)` coordinates must be bounded by
+ android.sensor.info.pixelArraySize.
+ </range>
+ <details>
+ A coordinate `(x, y)` must lie between `(0, 0)`, and
+ `(width - 1, height - 1)` (inclusive), which are the top-left and
+ bottom-right of the pixel array, respectively. The width and
+ height dimensions are given in android.sensor.info.pixelArraySize.
+ This may include hot pixels that lie outside of the active array
+ bounds given by android.sensor.info.activeArraySize.
+ </details>
+ <hal_details>
+ A hotpixel map contains the coordinates of pixels on the camera
+ sensor that do report valid values (usually due to defects in
+ the camera sensor). This includes pixels that are stuck at certain
+ values, or have a response that does not accuractly encode the
+ incoming light from the scene.
+
+ To avoid performance issues, there should be significantly fewer hot
+ pixels than actual pixels on the camera sensor.
+ </hal_details>
+ <tag id="V1" />
+ <tag id="RAW" />
+ </entry>
+ </dynamic>
+ <controls>
+ <entry name="lensShadingMapMode" type="byte" visibility="public" enum="true" hwlevel="full">
+ <enum>
+ <value>OFF
+ <notes>Do not include a lens shading map in the capture result.</notes></value>
+ <value>ON
+ <notes>Include a lens shading map in the capture result.</notes></value>
+ </enum>
+ <description>Whether the camera device will output the lens
+ shading map in output result metadata.</description>
+ <range>android.statistics.info.availableLensShadingMapModes</range>
+ <details>When set to ON,
+ android.statistics.lensShadingMap will be provided in
+ the output result metadata.
+
+ ON is always supported on devices with the RAW capability.
+ </details>
+ <tag id="RAW" />
+ </entry>
+ </controls>
+ <dynamic>
+ <clone entry="android.statistics.lensShadingMapMode" kind="controls">
+ </clone>
+ </dynamic>
+ </section>
+ <section name="tonemap">
+ <controls>
+ <entry name="curveBlue" type="float" visibility="ndk_public"
+ type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints."
+ container="array" hwlevel="full">
+ <array>
+ <size>n</size>
+ <size>2</size>
+ </array>
+ <description>Tonemapping / contrast / gamma curve for the blue
+ channel, to use when android.tonemap.mode is
+ CONTRAST_CURVE.</description>
+ <details>See android.tonemap.curveRed for more details.</details>
+ </entry>
+ <entry name="curveGreen" type="float" visibility="ndk_public"
+ type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints."
+ container="array" hwlevel="full">
+ <array>
+ <size>n</size>
+ <size>2</size>
+ </array>
+ <description>Tonemapping / contrast / gamma curve for the green
+ channel, to use when android.tonemap.mode is
+ CONTRAST_CURVE.</description>
+ <details>See android.tonemap.curveRed for more details.</details>
+ </entry>
+ <entry name="curveRed" type="float" visibility="ndk_public"
+ type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints."
+ container="array" hwlevel="full">
+ <array>
+ <size>n</size>
+ <size>2</size>
+ </array>
+ <description>Tonemapping / contrast / gamma curve for the red
+ channel, to use when android.tonemap.mode is
+ CONTRAST_CURVE.</description>
+ <range>0-1 on both input and output coordinates, normalized
+ as a floating-point value such that 0 == black and 1 == white.
+ </range>
+ <details>
+ Each channel's curve is defined by an array of control points:
+
+ android.tonemap.curveRed =
+ [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ]
+ 2 <= N <= android.tonemap.maxCurvePoints
+
+ These are sorted in order of increasing `Pin`; it is
+ required that input values 0.0 and 1.0 are included in the list to
+ define a complete mapping. For input values between control points,
+ the camera device must linearly interpolate between the control
+ points.
+
+ Each curve can have an independent number of points, and the number
+ of points can be less than max (that is, the request doesn't have to
+ always provide a curve with number of points equivalent to
+ android.tonemap.maxCurvePoints).
+
+ A few examples, and their corresponding graphical mappings; these
+ only specify the red channel and the precision is limited to 4
+ digits, for conciseness.
+
+ Linear mapping:
+
+ android.tonemap.curveRed = [ 0, 0, 1.0, 1.0 ]
+
+ ![Linear mapping curve](android.tonemap.curveRed/linear_tonemap.png)
+
+ Invert mapping:
+
+ android.tonemap.curveRed = [ 0, 1.0, 1.0, 0 ]
+
+ ![Inverting mapping curve](android.tonemap.curveRed/inverse_tonemap.png)
+
+ Gamma 1/2.2 mapping, with 16 control points:
+
+ android.tonemap.curveRed = [
+ 0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812,
+ 0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072,
+ 0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685,
+ 0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ]
+
+ ![Gamma = 1/2.2 tonemapping curve](android.tonemap.curveRed/gamma_tonemap.png)
+
+ Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:
+
+ android.tonemap.curveRed = [
+ 0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845,
+ 0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130,
+ 0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721,
+ 0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ]
+
+ ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png)
+ </details>
+ <hal_details>
+ For good quality of mapping, at least 128 control points are
+ preferred.
+
+ A typical use case of this would be a gamma-1/2.2 curve, with as many
+ control points used as are available.
+ </hal_details>
+ </entry>
+ <entry name="curve" type="float" visibility="java_public" synthetic="true"
+ typedef="tonemapCurve"
+ hwlevel="full">
+ <description>Tonemapping / contrast / gamma curve to use when android.tonemap.mode
+ is CONTRAST_CURVE.</description>
+ <details>
+ The tonemapCurve consist of three curves for each of red, green, and blue
+ channels respectively. The following example uses the red channel as an
+ example. The same logic applies to green and blue channel.
+ Each channel's curve is defined by an array of control points:
+
+ curveRed =
+ [ P0(in, out), P1(in, out), P2(in, out), P3(in, out), ..., PN(in, out) ]
+ 2 <= N <= android.tonemap.maxCurvePoints
+
+ These are sorted in order of increasing `Pin`; it is always
+ guaranteed that input values 0.0 and 1.0 are included in the list to
+ define a complete mapping. For input values between control points,
+ the camera device must linearly interpolate between the control
+ points.
+
+ Each curve can have an independent number of points, and the number
+ of points can be less than max (that is, the request doesn't have to
+ always provide a curve with number of points equivalent to
+ android.tonemap.maxCurvePoints).
+
+ A few examples, and their corresponding graphical mappings; these
+ only specify the red channel and the precision is limited to 4
+ digits, for conciseness.
+
+ Linear mapping:
+
+ curveRed = [ (0, 0), (1.0, 1.0) ]
+
+ ![Linear mapping curve](android.tonemap.curveRed/linear_tonemap.png)
+
+ Invert mapping:
+
+ curveRed = [ (0, 1.0), (1.0, 0) ]
+
+ ![Inverting mapping curve](android.tonemap.curveRed/inverse_tonemap.png)
+
+ Gamma 1/2.2 mapping, with 16 control points:
+
+ curveRed = [
+ (0.0000, 0.0000), (0.0667, 0.2920), (0.1333, 0.4002), (0.2000, 0.4812),
+ (0.2667, 0.5484), (0.3333, 0.6069), (0.4000, 0.6594), (0.4667, 0.7072),
+ (0.5333, 0.7515), (0.6000, 0.7928), (0.6667, 0.8317), (0.7333, 0.8685),
+ (0.8000, 0.9035), (0.8667, 0.9370), (0.9333, 0.9691), (1.0000, 1.0000) ]
+
+ ![Gamma = 1/2.2 tonemapping curve](android.tonemap.curveRed/gamma_tonemap.png)
+
+ Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:
+
+ curveRed = [
+ (0.0000, 0.0000), (0.0667, 0.2864), (0.1333, 0.4007), (0.2000, 0.4845),
+ (0.2667, 0.5532), (0.3333, 0.6125), (0.4000, 0.6652), (0.4667, 0.7130),
+ (0.5333, 0.7569), (0.6000, 0.7977), (0.6667, 0.8360), (0.7333, 0.8721),
+ (0.8000, 0.9063), (0.8667, 0.9389), (0.9333, 0.9701), (1.0000, 1.0000) ]
+
+ ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png)
+ </details>
+ <hal_details>
+ This entry is created by the framework from the curveRed, curveGreen and
+ curveBlue entries.
+ </hal_details>
+ </entry>
+ <entry name="mode" type="byte" visibility="public" enum="true"
+ hwlevel="full">
+ <enum>
+ <value>CONTRAST_CURVE
+ <notes>Use the tone mapping curve specified in
+ the android.tonemap.curve* entries.
+
+ All color enhancement and tonemapping must be disabled, except
+ for applying the tonemapping curve specified by
+ android.tonemap.curve.
+
+ Must not slow down frame rate relative to raw
+ sensor output.
+ </notes>
+ </value>
+ <value>FAST
+ <notes>
+ Advanced gamma mapping and color enhancement may be applied, without
+ reducing frame rate compared to raw sensor output.
+ </notes>
+ </value>
+ <value>HIGH_QUALITY
+ <notes>
+ High-quality gamma mapping and color enhancement will be applied, at
+ the cost of possibly reduced frame rate compared to raw sensor output.
+ </notes>
+ </value>
+ <value>GAMMA_VALUE
+ <notes>
+ Use the gamma value specified in android.tonemap.gamma to peform
+ tonemapping.
+
+ All color enhancement and tonemapping must be disabled, except
+ for applying the tonemapping curve specified by android.tonemap.gamma.
+
+ Must not slow down frame rate relative to raw sensor output.
+ </notes>
+ </value>
+ <value>PRESET_CURVE
+ <notes>
+ Use the preset tonemapping curve specified in
+ android.tonemap.presetCurve to peform tonemapping.
+
+ All color enhancement and tonemapping must be disabled, except
+ for applying the tonemapping curve specified by
+ android.tonemap.presetCurve.
+
+ Must not slow down frame rate relative to raw sensor output.
+ </notes>
+ </value>
+ </enum>
+ <description>High-level global contrast/gamma/tonemapping control.
+ </description>
+ <range>android.tonemap.availableToneMapModes</range>
+ <details>
+ When switching to an application-defined contrast curve by setting
+ android.tonemap.mode to CONTRAST_CURVE, the curve is defined
+ per-channel with a set of `(in, out)` points that specify the
+ mapping from input high-bit-depth pixel value to the output
+ low-bit-depth value. Since the actual pixel ranges of both input
+ and output may change depending on the camera pipeline, the values
+ are specified by normalized floating-point numbers.
+
+ More-complex color mapping operations such as 3D color look-up
+ tables, selective chroma enhancement, or other non-linear color
+ transforms will be disabled when android.tonemap.mode is
+ CONTRAST_CURVE.
+
+ When using either FAST or HIGH_QUALITY, the camera device will
+ emit its own tonemap curve in android.tonemap.curve.
+ These values are always available, and as close as possible to the
+ actually used nonlinear/nonglobal transforms.
+
+ If a request is sent with CONTRAST_CURVE with the camera device's
+ provided curve in FAST or HIGH_QUALITY, the image's tonemap will be
+ roughly the same.</details>
+ </entry>
+ </controls>
+ <static>
+ <entry name="maxCurvePoints" type="int32" visibility="public"
+ hwlevel="full">
+ <description>Maximum number of supported points in the
+ tonemap curve that can be used for android.tonemap.curve.
+ </description>
+ <details>
+ If the actual number of points provided by the application (in android.tonemap.curve*) is
+ less than this maximum, the camera device will resample the curve to its internal
+ representation, using linear interpolation.
+
+ The output curves in the result metadata may have a different number
+ of points than the input curves, and will represent the actual
+ hardware curves used as closely as possible when linearly interpolated.
+ </details>
+ <hal_details>
+ This value must be at least 64. This should be at least 128.
+ </hal_details>
+ </entry>
+ <entry name="availableToneMapModes" type="byte" visibility="public"
+ type_notes="list of enums" container="array" typedef="enumList" hwlevel="full">
+ <array>
+ <size>n</size>
+ </array>
+ <description>
+ List of tonemapping modes for android.tonemap.mode that are supported by this camera
+ device.
+ </description>
+ <range>Any value listed in android.tonemap.mode</range>
+ <details>
+ Camera devices that support the MANUAL_POST_PROCESSING capability will always contain
+ at least one of below mode combinations:
+
+ * CONTRAST_CURVE, FAST and HIGH_QUALITY
+ * GAMMA_VALUE, PRESET_CURVE, FAST and HIGH_QUALITY
+
+ This includes all FULL level devices.
+ </details>
+ <hal_details>
+ HAL must support both FAST and HIGH_QUALITY if automatic tonemap control is available
+ on the camera device, but the underlying implementation can be the same for both modes.
+ That is, if the highest quality implementation on the camera device does not slow down
+ capture rate, then FAST and HIGH_QUALITY will generate the same output.
+ </hal_details>
+ </entry>
+ </static>
+ <dynamic>
+ <clone entry="android.tonemap.curveBlue" kind="controls">
+ </clone>
+ <clone entry="android.tonemap.curveGreen" kind="controls">
+ </clone>
+ <clone entry="android.tonemap.curveRed" kind="controls">
+ </clone>
+ <clone entry="android.tonemap.curve" kind="controls">
+ </clone>
+ <clone entry="android.tonemap.mode" kind="controls">
+ </clone>
+ </dynamic>
+ <controls>
+ <entry name="gamma" type="float" visibility="public">
+ <description> Tonemapping curve to use when android.tonemap.mode is
+ GAMMA_VALUE
+ </description>
+ <details>
+ The tonemap curve will be defined the following formula:
+ * OUT = pow(IN, 1.0 / gamma)
+ where IN and OUT is the input pixel value scaled to range [0.0, 1.0],
+ pow is the power function and gamma is the gamma value specified by this
+ key.
+
+ The same curve will be applied to all color channels. The camera device
+ may clip the input gamma value to its supported range. The actual applied
+ value will be returned in capture result.
+
+ The valid range of gamma value varies on different devices, but values
+ within [1.0, 5.0] are guaranteed not to be clipped.
+ </details>
+ </entry>
+ <entry name="presetCurve" type="byte" visibility="public" enum="true">
+ <enum>
+ <value>SRGB
+ <notes>Tonemapping curve is defined by sRGB</notes>
+ </value>
+ <value>REC709
+ <notes>Tonemapping curve is defined by ITU-R BT.709</notes>
+ </value>
+ </enum>
+ <description> Tonemapping curve to use when android.tonemap.mode is
+ PRESET_CURVE
+ </description>
+ <details>
+ The tonemap curve will be defined by specified standard.
+
+ sRGB (approximated by 16 control points):
+
+ ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png)
+
+ Rec. 709 (approximated by 16 control points):
+
+ ![Rec. 709 tonemapping curve](android.tonemap.curveRed/rec709_tonemap.png)
+
+ Note that above figures show a 16 control points approximation of preset
+ curves. Camera devices may apply a different approximation to the curve.
+ </details>
+ </entry>
+ </controls>
+ <dynamic>
+ <clone entry="android.tonemap.gamma" kind="controls">
+ </clone>
+ <clone entry="android.tonemap.presetCurve" kind="controls">
+ </clone>
+ </dynamic>
+ </section>
+ <section name="led">
+ <controls>
+ <entry name="transmit" type="byte" visibility="hidden" optional="true"
+ enum="true" typedef="boolean">
+ <enum>
+ <value>OFF</value>
+ <value>ON</value>
+ </enum>
+ <description>This LED is nominally used to indicate to the user
+ that the camera is powered on and may be streaming images back to the
+ Application Processor. In certain rare circumstances, the OS may
+ disable this when video is processed locally and not transmitted to
+ any untrusted applications.
+
+ In particular, the LED *must* always be on when the data could be
+ transmitted off the device. The LED *should* always be on whenever
+ data is stored locally on the device.
+
+ The LED *may* be off if a trusted application is using the data that
+ doesn't violate the above rules.
+ </description>
+ </entry>
+ </controls>
+ <dynamic>
+ <clone entry="android.led.transmit" kind="controls"></clone>
+ </dynamic>
+ <static>
+ <entry name="availableLeds" type="byte" visibility="hidden" optional="true"
+ enum="true"
+ container="array">
+ <array>
+ <size>n</size>
+ </array>
+ <enum>
+ <value>TRANSMIT
+ <notes>android.led.transmit control is used.</notes>
+ </value>
+ </enum>
+ <description>A list of camera LEDs that are available on this system.
+ </description>
+ </entry>
+ </static>
+ </section>
+ <section name="info">
+ <static>
+ <entry name="supportedHardwareLevel" type="byte" visibility="public"
+ enum="true" hwlevel="legacy">
+ <enum>
+ <value>
+ LIMITED
+ <notes>
+ This camera device does not have enough capabilities to qualify as a `FULL` device or
+ better.
+
+ Only the stream configurations listed in the `LEGACY` and `LIMITED` tables in the
+ {@link ACameraDevice_createCaptureSession} documentation are guaranteed to be supported.
+
+ All `LIMITED` devices support the `BACKWARDS_COMPATIBLE` capability, indicating basic
+ support for color image capture. The only exception is that the device may
+ alternatively support only the `DEPTH_OUTPUT` capability, if it can only output depth
+ measurements and not color images.
+
+ `LIMITED` devices and above require the use of android.control.aePrecaptureTrigger
+ to lock exposure metering (and calculate flash power, for cameras with flash) before
+ capturing a high-quality still image.
+
+ A `LIMITED` device that only lists the `BACKWARDS_COMPATIBLE` capability is only
+ required to support full-automatic operation and post-processing (`OFF` is not
+ supported for android.control.aeMode, android.control.afMode, or
+ android.control.awbMode)
+
+ Additional capabilities may optionally be supported by a `LIMITED`-level device, and
+ can be checked for in android.request.availableCapabilities.
+ </notes>
+ </value>
+ <value>
+ FULL
+ <notes>
+ This camera device is capable of supporting advanced imaging applications.
+
+ The stream configurations listed in the `FULL`, `LEGACY` and `LIMITED` tables in the
+ {@link ACameraDevice_createCaptureSession} documentation are guaranteed to be supported.
+
+ A `FULL` device will support below capabilities:
+
+ * `BURST_CAPTURE` capability (android.request.availableCapabilities contains
+ `BURST_CAPTURE`)
+ * Per frame control (android.sync.maxLatency `==` PER_FRAME_CONTROL)
+ * Manual sensor control (android.request.availableCapabilities contains `MANUAL_SENSOR`)
+ * Manual post-processing control (android.request.availableCapabilities contains
+ `MANUAL_POST_PROCESSING`)
+ * The required exposure time range defined in android.sensor.info.exposureTimeRange
+ * The required maxFrameDuration defined in android.sensor.info.maxFrameDuration
+
+ Note:
+ Pre-API level 23, FULL devices also supported arbitrary cropping region
+ (android.scaler.croppingType `== FREEFORM`); this requirement was relaxed in API level
+ 23, and `FULL` devices may only support `CENTERED` cropping.
+ </notes>
+ </value>
+ <value>
+ LEGACY
+ <notes>
+ This camera device is running in backward compatibility mode.
+
+ Only the stream configurations listed in the `LEGACY` table in the {@link
+ ACameraDevice_createCaptureSession} documentation are supported.
+
+ A `LEGACY` device does not support per-frame control, manual sensor control, manual
+ post-processing, arbitrary cropping regions, and has relaxed performance constraints.
+ No additional capabilities beyond `BACKWARD_COMPATIBLE` will ever be listed by a
+ `LEGACY` device in android.request.availableCapabilities.
+
+ In addition, the android.control.aePrecaptureTrigger is not functional on `LEGACY`
+ devices. Instead, every request that includes a JPEG-format output target is treated
+ as triggering a still capture, internally executing a precapture trigger. This may
+ fire the flash for flash power metering during precapture, and then fire the flash
+ for the final capture, if a flash is available on the device and the AE mode is set to
+ enable the flash.
+ </notes>
+ </value>
+ <value>
+ 3
+ <notes>
+ This camera device is capable of YUV reprocessing and RAW data capture, in addition to
+ FULL-level capabilities.
+
+ The stream configurations listed in the `LEVEL_3`, `RAW`, `FULL`, `LEGACY` and
+ `LIMITED` tables in the {@link
+ ACameraDevice_createCaptureSession}
+ documentation are guaranteed to be supported.
+
+ The following additional capabilities are guaranteed to be supported:
+
+ * `YUV_REPROCESSING` capability (android.request.availableCapabilities contains
+ `YUV_REPROCESSING`)
+ * `RAW` capability (android.request.availableCapabilities contains
+ `RAW`)
+ </notes>
+ </value>
+ </enum>
+ <description>
+ Generally classifies the overall set of the camera device functionality.
+ </description>
+ <details>
+ The supported hardware level is a high-level description of the camera device's
+ capabilities, summarizing several capabilities into one field. Each level adds additional
+ features to the previous one, and is always a strict superset of the previous level.
+ The ordering is `LEGACY < LIMITED < FULL < LEVEL_3`.
+
+ Starting from `LEVEL_3`, the level enumerations are guaranteed to be in increasing
+ numerical value as well. To check if a given device is at least at a given hardware level,
+ the following code snippet can be used:
+
+ // Returns true if the device supports the required hardware level, or better.
+ boolean isHardwareLevelSupported(CameraCharacteristics c, int requiredLevel) {
+ int deviceLevel = c.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
+ if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
+ return requiredLevel == deviceLevel;
+ }
+ // deviceLevel is not LEGACY, can use numerical sort
+ return requiredLevel <= deviceLevel;
+ }
+
+ At a high level, the levels are:
+
+ * `LEGACY` devices operate in a backwards-compatibility mode for older
+ Android devices, and have very limited capabilities.
+ * `LIMITED` devices represent the
+ baseline feature set, and may also include additional capabilities that are
+ subsets of `FULL`.
+ * `FULL` devices additionally support per-frame manual control of sensor, flash, lens and
+ post-processing settings, and image capture at a high rate.
+ * `LEVEL_3` devices additionally support YUV reprocessing and RAW image capture, along
+ with additional output stream configurations.
+
+ See the individual level enums for full descriptions of the supported capabilities. The
+ android.request.availableCapabilities entry describes the device's capabilities at a
+ finer-grain level, if needed. In addition, many controls have their available settings or
+ ranges defined in individual metadata tag entries in this document.
+
+ Some features are not part of any particular hardware level or capability and must be
+ queried separately. These include:
+
+ * Calibrated timestamps (android.sensor.info.timestampSource `==` REALTIME)
+ * Precision lens control (android.lens.info.focusDistanceCalibration `==` CALIBRATED)
+ * Face detection (android.statistics.info.availableFaceDetectModes)
+ * Optical or electrical image stabilization
+ (android.lens.info.availableOpticalStabilization,
+ android.control.availableVideoStabilizationModes)
+
+ </details>
+ <hal_details>
+ The camera 3 HAL device can implement one of three possible operational modes; LIMITED,
+ FULL, and LEVEL_3.
+
+ FULL support or better is expected from new higher-end devices. Limited
+ mode has hardware requirements roughly in line with those for a camera HAL device v1
+ implementation, and is expected from older or inexpensive devices. Each level is a strict
+ superset of the previous level, and they share the same essential operational flow.
+
+ For full details refer to "S3. Operational Modes" in camera3.h
+
+ Camera HAL3+ must not implement LEGACY mode. It is there for backwards compatibility in
+ the `android.hardware.camera2` user-facing API only on HALv1 devices, and is implemented
+ by the camera framework code.
+ </hal_details>
+ </entry>
+ </static>
+ </section>
+ <section name="blackLevel">
+ <controls>
+ <entry name="lock" type="byte" visibility="public" enum="true"
+ typedef="boolean" hwlevel="full">
+ <enum>
+ <value>OFF</value>
+ <value>ON</value>
+ </enum>
+ <description> Whether black-level compensation is locked
+ to its current values, or is free to vary.</description>
+ <details>When set to `true` (ON), the values used for black-level
+ compensation will not change until the lock is set to
+ `false` (OFF).
+
+ Since changes to certain capture parameters (such as
+ exposure time) may require resetting of black level
+ compensation, the camera device must report whether setting
+ the black level lock was successful in the output result
+ metadata.
+
+ For example, if a sequence of requests is as follows:
+
+ * Request 1: Exposure = 10ms, Black level lock = OFF
+ * Request 2: Exposure = 10ms, Black level lock = ON
+ * Request 3: Exposure = 10ms, Black level lock = ON
+ * Request 4: Exposure = 20ms, Black level lock = ON
+ * Request 5: Exposure = 20ms, Black level lock = ON
+ * Request 6: Exposure = 20ms, Black level lock = ON
+
+ And the exposure change in Request 4 requires the camera
+ device to reset the black level offsets, then the output
+ result metadata is expected to be:
+
+ * Result 1: Exposure = 10ms, Black level lock = OFF
+ * Result 2: Exposure = 10ms, Black level lock = ON
+ * Result 3: Exposure = 10ms, Black level lock = ON
+ * Result 4: Exposure = 20ms, Black level lock = OFF
+ * Result 5: Exposure = 20ms, Black level lock = ON
+ * Result 6: Exposure = 20ms, Black level lock = ON
+
+ This indicates to the application that on frame 4, black
+ levels were reset due to exposure value changes, and pixel
+ values may not be consistent across captures.
+
+ The camera device will maintain the lock to the extent
+ possible, only overriding the lock to OFF when changes to
+ other request parameters require a black level recalculation
+ or reset.
+ </details>
+ <hal_details>
+ If for some reason black level locking is no longer possible
+ (for example, the analog gain has changed, which forces
+ black level offsets to be recalculated), then the HAL must
+ override this request (and it must report 'OFF' when this
+ does happen) until the next capture for which locking is
+ possible again.</hal_details>
+ <tag id="HAL2" />
+ </entry>
+ </controls>
+ <dynamic>
+ <clone entry="android.blackLevel.lock"
+ kind="controls">
+ <details>
+ Whether the black level offset was locked for this frame. Should be
+ ON if android.blackLevel.lock was ON in the capture request, unless
+ a change in other capture settings forced the camera device to
+ perform a black level reset.
+ </details>
+ </clone>
+ </dynamic>
+ </section>
+ <section name="sync">
+ <dynamic>
+ <entry name="frameNumber" type="int64" visibility="ndk_public"
+ enum="true" hwlevel="legacy">
+ <enum>
+ <value id="-1">CONVERGING
+ <notes>
+ The current result is not yet fully synchronized to any request.
+
+ Synchronization is in progress, and reading metadata from this
+ result may include a mix of data that have taken effect since the
+ last synchronization time.
+
+ In some future result, within android.sync.maxLatency frames,
+ this value will update to the actual frame number frame number
+ the result is guaranteed to be synchronized to (as long as the
+ request settings remain constant).
+ </notes>
+ </value>
+ <value id="-2">UNKNOWN
+ <notes>
+ The current result's synchronization status is unknown.
+
+ The result may have already converged, or it may be in
+ progress. Reading from this result may include some mix
+ of settings from past requests.
+
+ After a settings change, the new settings will eventually all
+ take effect for the output buffers and results. However, this
+ value will not change when that happens. Altering settings
+ rapidly may provide outcomes using mixes of settings from recent
+ requests.
+
+ This value is intended primarily for backwards compatibility with
+ the older camera implementations (for android.hardware.Camera).
+ </notes>
+ </value>
+ </enum>
+ <description>The frame number corresponding to the last request
+ with which the output result (metadata + buffers) has been fully
+ synchronized.</description>
+ <range>Either a non-negative value corresponding to a
+ `frame_number`, or one of the two enums (CONVERGING / UNKNOWN).
+ </range>
+ <details>
+ When a request is submitted to the camera device, there is usually a
+ delay of several frames before the controls get applied. A camera
+ device may either choose to account for this delay by implementing a
+ pipeline and carefully submit well-timed atomic control updates, or
+ it may start streaming control changes that span over several frame
+ boundaries.
+
+ In the latter case, whenever a request's settings change relative to
+ the previous submitted request, the full set of changes may take
+ multiple frame durations to fully take effect. Some settings may
+ take effect sooner (in less frame durations) than others.
+
+ While a set of control changes are being propagated, this value
+ will be CONVERGING.
+
+ Once it is fully known that a set of control changes have been
+ finished propagating, and the resulting updated control settings
+ have been read back by the camera device, this value will be set
+ to a non-negative frame number (corresponding to the request to
+ which the results have synchronized to).
+
+ Older camera device implementations may not have a way to detect
+ when all camera controls have been applied, and will always set this
+ value to UNKNOWN.
+
+ FULL capability devices will always have this value set to the
+ frame number of the request corresponding to this result.
+
+ _Further details_:
+
+ * Whenever a request differs from the last request, any future
+ results not yet returned may have this value set to CONVERGING (this
+ could include any in-progress captures not yet returned by the camera
+ device, for more details see pipeline considerations below).
+ * Submitting a series of multiple requests that differ from the
+ previous request (e.g. r1, r2, r3 s.t. r1 != r2 != r3)
+ moves the new synchronization frame to the last non-repeating
+ request (using the smallest frame number from the contiguous list of
+ repeating requests).
+ * Submitting the same request repeatedly will not change this value
+ to CONVERGING, if it was already a non-negative value.
+ * When this value changes to non-negative, that means that all of the
+ metadata controls from the request have been applied, all of the
+ metadata controls from the camera device have been read to the
+ updated values (into the result), and all of the graphics buffers
+ corresponding to this result are also synchronized to the request.
+
+ _Pipeline considerations_:
+
+ Submitting a request with updated controls relative to the previously
+ submitted requests may also invalidate the synchronization state
+ of all the results corresponding to currently in-flight requests.
+
+ In other words, results for this current request and up to
+ android.request.pipelineMaxDepth prior requests may have their
+ android.sync.frameNumber change to CONVERGING.
+ </details>
+ <hal_details>
+ Using UNKNOWN here is illegal unless android.sync.maxLatency
+ is also UNKNOWN.
+
+ FULL capability devices should simply set this value to the
+ `frame_number` of the request this result corresponds to.
+ </hal_details>
+ <tag id="V1" />
+ </entry>
+ </dynamic>
+ <static>
+ <entry name="maxLatency" type="int32" visibility="public" enum="true"
+ hwlevel="legacy">
+ <enum>
+ <value id="0">PER_FRAME_CONTROL
+ <notes>
+ Every frame has the requests immediately applied.
+
+ Changing controls over multiple requests one after another will
+ produce results that have those controls applied atomically
+ each frame.
+
+ All FULL capability devices will have this as their maxLatency.
+ </notes>
+ </value>
+ <value id="-1">UNKNOWN
+ <notes>
+ Each new frame has some subset (potentially the entire set)
+ of the past requests applied to the camera settings.
+
+ By submitting a series of identical requests, the camera device
+ will eventually have the camera settings applied, but it is
+ unknown when that exact point will be.
+
+ All LEGACY capability devices will have this as their maxLatency.
+ </notes>
+ </value>
+ </enum>
+ <description>
+ The maximum number of frames that can occur after a request
+ (different than the previous) has been submitted, and before the
+ result's state becomes synchronized.
+ </description>
+ <units>Frame counts</units>
+ <range>A positive value, PER_FRAME_CONTROL, or UNKNOWN.</range>
+ <details>
+ This defines the maximum distance (in number of metadata results),
+ between the frame number of the request that has new controls to apply
+ and the frame number of the result that has all the controls applied.
+
+ In other words this acts as an upper boundary for how many frames
+ must occur before the camera device knows for a fact that the new
+ submitted camera settings have been applied in outgoing frames.
+ </details>
+ <hal_details>
+ For example if maxLatency was 2,
+
+ initial request = X (repeating)
+ request1 = X
+ request2 = Y
+ request3 = Y
+ request4 = Y
+
+ where requestN has frameNumber N, and the first of the repeating
+ initial request's has frameNumber F (and F < 1).
+
+ initial result = X' + { android.sync.frameNumber == F }
+ result1 = X' + { android.sync.frameNumber == F }
+ result2 = X' + { android.sync.frameNumber == CONVERGING }
+ result3 = X' + { android.sync.frameNumber == CONVERGING }
+ result4 = X' + { android.sync.frameNumber == 2 }
+
+ where resultN has frameNumber N.
+
+ Since `result4` has a `frameNumber == 4` and
+ `android.sync.frameNumber == 2`, the distance is clearly
+ `4 - 2 = 2`.
+
+ Use `frame_count` from camera3_request_t instead of
+ android.request.frameCount or
+ `{@link android.hardware.camera2.CaptureResult#getFrameNumber}`.
+
+ LIMITED devices are strongly encouraged to use a non-negative
+ value. If UNKNOWN is used here then app developers do not have a way
+ to know when sensor settings have been applied.
+ </hal_details>
+ <tag id="V1" />
+ </entry>
+ </static>
+ </section>
+ <section name="reprocess">
+ <controls>
+ <entry name="effectiveExposureFactor" type="float" visibility="java_public" hwlevel="limited">
+ <description>
+ The amount of exposure time increase factor applied to the original output
+ frame by the application processing before sending for reprocessing.
+ </description>
+ <units>Relative exposure time increase factor.</units>
+ <range> &gt;= 1.0</range>
+ <details>
+ This is optional, and will be supported if the camera device supports YUV_REPROCESSING
+ capability (android.request.availableCapabilities contains YUV_REPROCESSING).
+
+ For some YUV reprocessing use cases, the application may choose to filter the original
+ output frames to effectively reduce the noise to the same level as a frame that was
+ captured with longer exposure time. To be more specific, assuming the original captured
+ images were captured with a sensitivity of S and an exposure time of T, the model in
+ the camera device is that the amount of noise in the image would be approximately what
+ would be expected if the original capture parameters had been a sensitivity of
+ S/effectiveExposureFactor and an exposure time of T*effectiveExposureFactor, rather
+ than S and T respectively. If the captured images were processed by the application
+ before being sent for reprocessing, then the application may have used image processing
+ algorithms and/or multi-frame image fusion to reduce the noise in the
+ application-processed images (input images). By using the effectiveExposureFactor
+ control, the application can communicate to the camera device the actual noise level
+ improvement in the application-processed image. With this information, the camera
+ device can select appropriate noise reduction and edge enhancement parameters to avoid
+ excessive noise reduction (android.noiseReduction.mode) and insufficient edge
+ enhancement (android.edge.mode) being applied to the reprocessed frames.
+
+ For example, for multi-frame image fusion use case, the application may fuse
+ multiple output frames together to a final frame for reprocessing. When N image are
+ fused into 1 image for reprocessing, the exposure time increase factor could be up to
+ square root of N (based on a simple photon shot noise model). The camera device will
+ adjust the reprocessing noise reduction and edge enhancement parameters accordingly to
+ produce the best quality images.
+
+ This is relative factor, 1.0 indicates the application hasn't processed the input
+ buffer in a way that affects its effective exposure time.
+
+ This control is only effective for YUV reprocessing capture request. For noise
+ reduction reprocessing, it is only effective when `android.noiseReduction.mode != OFF`.
+ Similarly, for edge enhancement reprocessing, it is only effective when
+ `android.edge.mode != OFF`.
+ </details>
+ <tag id="REPROC" />
+ </entry>
+ </controls>
+ <dynamic>
+ <clone entry="android.reprocess.effectiveExposureFactor" kind="controls">
+ </clone>
+ </dynamic>
+ <static>
+ <entry name="maxCaptureStall" type="int32" visibility="java_public" hwlevel="limited">
+ <description>
+ The maximal camera capture pipeline stall (in unit of frame count) introduced by a
+ reprocess capture request.
+ </description>
+ <units>Number of frames.</units>
+ <range> &lt;= 4</range>
+ <details>
+ The key describes the maximal interference that one reprocess (input) request
+ can introduce to the camera simultaneous streaming of regular (output) capture
+ requests, including repeating requests.
+
+ When a reprocessing capture request is submitted while a camera output repeating request
+ (e.g. preview) is being served by the camera device, it may preempt the camera capture
+ pipeline for at least one frame duration so that the camera device is unable to process
+ the following capture request in time for the next sensor start of exposure boundary.
+ When this happens, the application may observe a capture time gap (longer than one frame
+ duration) between adjacent capture output frames, which usually exhibits as preview
+ glitch if the repeating request output targets include a preview surface. This key gives
+ the worst-case number of frame stall introduced by one reprocess request with any kind of
+ formats/sizes combination.
+
+ If this key reports 0, it means a reprocess request doesn't introduce any glitch to the
+ ongoing camera repeating request outputs, as if this reprocess request is never issued.
+
+ This key is supported if the camera device supports PRIVATE or YUV reprocessing (
+ i.e. android.request.availableCapabilities contains PRIVATE_REPROCESSING or
+ YUV_REPROCESSING).
+ </details>
+ <tag id="REPROC" />
+ </entry>
+ </static>
+ </section>
+ <section name="depth">
+ <static>
+ <entry name="maxDepthSamples" type="int32" visibility="system" hwlevel="limited">
+ <description>Maximum number of points that a depth point cloud may contain.
+ </description>
+ <details>
+ If a camera device supports outputting depth range data in the form of a depth point
+ cloud ({@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD}), this is the maximum
+ number of points an output buffer may contain.
+
+ Any given buffer may contain between 0 and maxDepthSamples points, inclusive.
+ If output in the depth point cloud format is not supported, this entry will
+ not be defined.
+ </details>
+ <tag id="DEPTH" />
+ </entry>
+ <entry name="availableDepthStreamConfigurations" type="int32" visibility="ndk_public"
+ enum="true" container="array" typedef="streamConfiguration" hwlevel="limited">
+ <array>
+ <size>n</size>
+ <size>4</size>
+ </array>
+ <enum>
+ <value>OUTPUT</value>
+ <value>INPUT</value>
+ </enum>
+ <description>The available depth dataspace stream
+ configurations that this camera device supports
+ (i.e. format, width, height, output/input stream).
+ </description>
+ <details>
+ These are output stream configurations for use with
+ dataSpace HAL_DATASPACE_DEPTH. The configurations are
+ listed as `(format, width, height, input?)` tuples.
+
+ Only devices that support depth output for at least
+ the HAL_PIXEL_FORMAT_Y16 dense depth map may include
+ this entry.
+
+ A device that also supports the HAL_PIXEL_FORMAT_BLOB
+ sparse depth point cloud must report a single entry for
+ the format in this list as `(HAL_PIXEL_FORMAT_BLOB,
+ android.depth.maxDepthSamples, 1, OUTPUT)` in addition to
+ the entries for HAL_PIXEL_FORMAT_Y16.
+ </details>
+ <tag id="DEPTH" />
+ </entry>
+ <entry name="availableDepthMinFrameDurations" type="int64" visibility="ndk_public"
+ container="array" typedef="streamConfigurationDuration" hwlevel="limited">
+ <array>
+ <size>4</size>
+ <size>n</size>
+ </array>
+ <description>This lists the minimum frame duration for each
+ format/size combination for depth output formats.
+ </description>
+ <units>(format, width, height, ns) x n</units>
+ <details>
+ This should correspond to the frame duration when only that
+ stream is active, with all processing (typically in android.*.mode)
+ set to either OFF or FAST.
+
+ When multiple streams are used in a request, the minimum frame
+ duration will be max(individual stream min durations).
+
+ The minimum frame duration of a stream (of a particular format, size)
+ is the same regardless of whether the stream is input or output.
+
+ See android.sensor.frameDuration and
+ android.scaler.availableStallDurations for more details about
+ calculating the max frame rate.
+ </details>
+ <tag id="DEPTH" />
+ </entry>
+ <entry name="availableDepthStallDurations" type="int64" visibility="ndk_public"
+ container="array" typedef="streamConfigurationDuration" hwlevel="limited">
+ <array>
+ <size>4</size>
+ <size>n</size>
+ </array>
+ <description>This lists the maximum stall duration for each
+ output format/size combination for depth streams.
+ </description>
+ <units>(format, width, height, ns) x n</units>
+ <details>
+ A stall duration is how much extra time would get added
+ to the normal minimum frame duration for a repeating request
+ that has streams with non-zero stall.
+
+ This functions similarly to
+ android.scaler.availableStallDurations for depth
+ streams.
+
+ All depth output stream formats may have a nonzero stall
+ duration.
+ </details>
+ <tag id="DEPTH" />
+ </entry>
+ <entry name="depthIsExclusive" type="byte" visibility="public"
+ enum="true" typedef="boolean" hwlevel="limited">
+ <enum>
+ <value>FALSE</value>
+ <value>TRUE</value>
+ </enum>
+ <description>Indicates whether a capture request may target both a
+ DEPTH16 / DEPTH_POINT_CLOUD output, and normal color outputs (such as
+ YUV_420_888, JPEG, or RAW) simultaneously.
+ </description>
+ <details>
+ If TRUE, including both depth and color outputs in a single
+ capture request is not supported. An application must interleave color
+ and depth requests. If FALSE, a single request can target both types
+ of output.
+
+ Typically, this restriction exists on camera devices that
+ need to emit a specific pattern or wavelength of light to
+ measure depth values, which causes the color image to be
+ corrupted during depth measurement.
+ </details>
+ </entry>
+ </static>
+ </section>
+ </namespace>
+</metadata>