1 <?xml version="1.0" encoding="utf-8"?>
2 <!-- Copyright (C) 2012 The Android Open Source Project
4 Licensed under the Apache License, Version 2.0 (the "License");
5 you may not use this file except in compliance with the License.
6 You may obtain a copy of the License at
8 http://www.apache.org/licenses/LICENSE-2.0
10 Unless required by applicable law or agreed to in writing, software
11 distributed under the License is distributed on an "AS IS" BASIS,
12 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 See the License for the specific language governing permissions and
14 limitations under the License.
16 <metadata xmlns="http://schemas.android.com/service/camera/metadata/"
17 xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
18 xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata_properties.xsd">
22 Needed for backwards compatibility with old Java API
25 New features for first camera 2 release (API1)
28 Needed for useful RAW image processing and DNG file support
31 Entry is only used by camera device legacy HAL 2.x
34 Entry is required for full hardware level devices, and optional for other hardware levels
37 Entry is required for the depth capability.
40 Entry is required for the YUV or PRIVATE reprocessing capability.
43 Entry is under-specified and is not required for now. This is for book-keeping purpose,
44 do not implement or use it, it may be revised for future.
49 <typedef name="pairFloatFloat">
50 <language name="java">android.util.Pair<Float,Float></language>
52 <typedef name="pairDoubleDouble">
53 <language name="java">android.util.Pair<Double,Double></language>
55 <typedef name="rectangle">
56 <language name="java">android.graphics.Rect</language>
59 <language name="java">android.util.Size</language>
61 <typedef name="string">
62 <language name="java">String</language>
64 <typedef name="boolean">
65 <language name="java">boolean</language>
67 <typedef name="imageFormat">
68 <language name="java">int</language>
70 <typedef name="streamConfigurationMap">
71 <language name="java">android.hardware.camera2.params.StreamConfigurationMap</language>
73 <typedef name="streamConfiguration">
74 <language name="java">android.hardware.camera2.params.StreamConfiguration</language>
76 <typedef name="streamConfigurationDuration">
77 <language name="java">android.hardware.camera2.params.StreamConfigurationDuration</language>
80 <language name="java">android.hardware.camera2.params.Face</language>
82 <typedef name="meteringRectangle">
83 <language name="java">android.hardware.camera2.params.MeteringRectangle</language>
85 <typedef name="rangeFloat">
86 <language name="java">android.util.Range<Float></language>
88 <typedef name="rangeInt">
89 <language name="java">android.util.Range<Integer></language>
91 <typedef name="rangeLong">
92 <language name="java">android.util.Range<Long></language>
94 <typedef name="colorSpaceTransform">
95 <language name="java">android.hardware.camera2.params.ColorSpaceTransform</language>
97 <typedef name="rggbChannelVector">
98 <language name="java">android.hardware.camera2.params.RggbChannelVector</language>
100 <typedef name="blackLevelPattern">
101 <language name="java">android.hardware.camera2.params.BlackLevelPattern</language>
103 <typedef name="enumList">
104 <language name="java">int</language>
106 <typedef name="sizeF">
107 <language name="java">android.util.SizeF</language>
109 <typedef name="point">
110 <language name="java">android.graphics.Point</language>
112 <typedef name="tonemapCurve">
113 <language name="java">android.hardware.camera2.params.TonemapCurve</language>
115 <typedef name="lensShadingMap">
116 <language name="java">android.hardware.camera2.params.LensShadingMap</language>
118 <typedef name="location">
119 <language name="java">android.location.Location</language>
121 <typedef name="highSpeedVideoConfiguration">
122 <language name="java">android.hardware.camera2.params.HighSpeedVideoConfiguration</language>
124 <typedef name="reprocessFormatsMap">
125 <language name="java">android.hardware.camera2.params.ReprocessFormatsMap</language>
129 <namespace name="android">
130 <section name="colorCorrection">
132 <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
134 <value>TRANSFORM_MATRIX
135 <notes>Use the android.colorCorrection.transform matrix
136 and android.colorCorrection.gains to do color conversion.
138 All advanced white balance adjustments (not specified
139 by our white balance pipeline) must be disabled.
141 If AWB is enabled with `android.control.awbMode != OFF`, then
142 TRANSFORM_MATRIX is ignored. The camera device will override
143 this value to either FAST or HIGH_QUALITY.
147 <notes>Color correction processing must not slow down
148 capture rate relative to sensor raw output.
150 Advanced white balance adjustments above and beyond
151 the specified white balance pipeline may be applied.
153 If AWB is enabled with `android.control.awbMode != OFF`, then
154 the camera device uses the last frame's AWB values
155 (or defaults if AWB has never been run).
159 <notes>Color correction processing operates at improved
160 quality but the capture rate might be reduced (relative to sensor
163 Advanced white balance adjustments above and beyond
164 the specified white balance pipeline may be applied.
166 If AWB is enabled with `android.control.awbMode != OFF`, then
167 the camera device uses the last frame's AWB values
168 (or defaults if AWB has never been run).
174 The mode control selects how the image data is converted from the
175 sensor's native color into linear sRGB color.
178 When auto-white balance (AWB) is enabled with android.control.awbMode, this
179 control is overridden by the AWB routine. When AWB is disabled, the
180 application controls how the color mapping is performed.
182 We define the expected processing pipeline below. For consistency
183 across devices, this is always the case with TRANSFORM_MATRIX.
185 When either FULL or HIGH_QUALITY is used, the camera device may
186 do additional processing but android.colorCorrection.gains and
187 android.colorCorrection.transform will still be provided by the
188 camera device (in the results) and be roughly correct.
190 Switching to TRANSFORM_MATRIX and using the data provided from
191 FAST or HIGH_QUALITY will yield a picture with the same white point
192 as what was produced by the camera device in the earlier frame.
194 The expected processing pipeline is as follows:
196 ![White balance processing pipeline](android.colorCorrection.mode/processing_pipeline.png)
198 The white balance is encoded by two values, a 4-channel white-balance
199 gain vector (applied in the Bayer domain), and a 3x3 color transform
200 matrix (applied after demosaic).
202 The 4-channel white-balance gains are defined as:
204 android.colorCorrection.gains = [ R G_even G_odd B ]
206 where `G_even` is the gain for green pixels on even rows of the
207 output, and `G_odd` is the gain for green pixels on the odd rows.
208 These may be identical for a given camera device implementation; if
209 the camera device does not support a separate gain for even/odd green
210 channels, it will use the `G_even` value, and write `G_odd` equal to
211 `G_even` in the output result metadata.
213 The matrices for color transforms are defined as a 9-entry vector:
215 android.colorCorrection.transform = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ]
217 which define a transform from input sensor colors, `P_in = [ r g b ]`,
218 to output linear sRGB, `P_out = [ r' g' b' ]`,
220 with colors as follows:
226 Both the input and output value ranges must match. Overflow/underflow
227 values are clipped to fit within the range.
230 HAL must support both FAST and HIGH_QUALITY if color correction control is available
231 on the camera device, but the underlying implementation can be the same for both modes.
232 That is, if the highest quality implementation on the camera device does not slow down
233 capture rate, then FAST and HIGH_QUALITY should generate the same output.
236 <entry name="transform" type="rational" visibility="public"
237 type_notes="3x3 rational matrix in row-major order"
238 container="array" typedef="colorSpaceTransform" hwlevel="full">
243 <description>A color transform matrix to use to transform
244 from sensor RGB color space to output linear sRGB color space.
246 <units>Unitless scale factors</units>
247 <details>This matrix is either set by the camera device when the request
248 android.colorCorrection.mode is not TRANSFORM_MATRIX, or
249 directly by the application in the request when the
250 android.colorCorrection.mode is TRANSFORM_MATRIX.
252 In the latter case, the camera device may round the matrix to account
253 for precision issues; the final rounded matrix should be reported back
254 in this matrix result metadata. The transform should keep the magnitude
255 of the output color values within `[0, 1.0]` (assuming input color
256 values is within the normalized range `[0, 1.0]`), or clipping may occur.
258 The valid range of each matrix element varies on different devices, but
259 values within [-1.5, 3.0] are guaranteed not to be clipped.
262 <entry name="gains" type="float" visibility="public"
263 type_notes="A 1D array of floats for 4 color channel gains"
264 container="array" typedef="rggbChannelVector" hwlevel="full">
268 <description>Gains applying to Bayer raw color channels for
269 white-balance.</description>
270 <units>Unitless gain factors</units>
272 These per-channel gains are either set by the camera device
273 when the request android.colorCorrection.mode is not
274 TRANSFORM_MATRIX, or directly by the application in the
275 request when the android.colorCorrection.mode is
278 The gains in the result metadata are the gains actually
279 applied by the camera device to the current frame.
281 The valid range of gains varies on different devices, but gains
282 between [1.0, 3.0] are guaranteed not to be clipped. Even if a given
283 device allows gains below 1.0, this is usually not recommended because
284 this can create color artifacts.
287 The 4-channel white-balance gains are defined in
288 the order of `[R G_even G_odd B]`, where `G_even` is the gain
289 for green pixels on even rows of the output, and `G_odd`
290 is the gain for green pixels on the odd rows.
292 If a HAL does not support a separate gain for even/odd green
293 channels, it must use the `G_even` value, and write
294 `G_odd` equal to `G_even` in the output result metadata.
297 <entry name="aberrationMode" type="byte" visibility="public" enum="true" hwlevel="legacy">
301 No aberration correction is applied.
306 Aberration correction will not slow down capture rate
307 relative to sensor raw output.
312 Aberration correction operates at improved quality but the capture rate might be
313 reduced (relative to sensor raw output rate)
318 Mode of operation for the chromatic aberration correction algorithm.
320 <range>android.colorCorrection.availableAberrationModes</range>
322 Chromatic (color) aberration is caused by the fact that different wavelengths of light
323 can not focus on the same point after exiting from the lens. This metadata defines
324 the high level control of chromatic aberration correction algorithm, which aims to
325 minimize the chromatic artifacts that may occur along the object boundaries in an
328 FAST/HIGH_QUALITY both mean that camera device determined aberration
329 correction will be applied. HIGH_QUALITY mode indicates that the camera device will
330 use the highest-quality aberration correction algorithms, even if it slows down
331 capture rate. FAST means the camera device will not slow down capture rate when
332 applying aberration correction.
334 LEGACY devices will always be in FAST mode.
339 <clone entry="android.colorCorrection.mode" kind="controls">
341 <clone entry="android.colorCorrection.transform" kind="controls">
343 <clone entry="android.colorCorrection.gains" kind="controls">
345 <clone entry="android.colorCorrection.aberrationMode" kind="controls">
349 <entry name="availableAberrationModes" type="byte" visibility="public"
350 type_notes="list of enums" container="array" typedef="enumList" hwlevel="legacy">
355 List of aberration correction modes for android.colorCorrection.aberrationMode that are
356 supported by this camera device.
358 <range>Any value listed in android.colorCorrection.aberrationMode</range>
360 This key lists the valid modes for android.colorCorrection.aberrationMode. If no
361 aberration correction modes are available for a device, this list will solely include
362 OFF mode. All camera devices will support either OFF or FAST mode.
364 Camera devices that support the MANUAL_POST_PROCESSING capability will always list
365 OFF mode. This includes all FULL level devices.
367 LEGACY devices will always only support FAST mode.
370 HAL must support both FAST and HIGH_QUALITY if chromatic aberration control is available
371 on the camera device, but the underlying implementation can be the same for both modes.
372 That is, if the highest quality implementation on the camera device does not slow down
373 capture rate, then FAST and HIGH_QUALITY will generate the same output.
379 <section name="control">
381 <entry name="aeAntibandingMode" type="byte" visibility="public"
382 enum="true" hwlevel="legacy">
386 The camera device will not adjust exposure duration to
387 avoid banding problems.
392 The camera device will adjust exposure duration to
393 avoid banding problems with 50Hz illumination sources.
398 The camera device will adjust exposure duration to
399 avoid banding problems with 60Hz illumination
405 The camera device will automatically adapt its
406 antibanding routine to the current illumination
407 condition. This is the default mode if AUTO is
408 available on given camera device.
413 The desired setting for the camera device's auto-exposure
414 algorithm's antibanding compensation.
417 android.control.aeAvailableAntibandingModes
420 Some kinds of lighting fixtures, such as some fluorescent
421 lights, flicker at the rate of the power supply frequency
422 (60Hz or 50Hz, depending on country). While this is
423 typically not noticeable to a person, it can be visible to
424 a camera device. If a camera sets its exposure time to the
425 wrong value, the flicker may become visible in the
426 viewfinder as flicker or in a final captured image, as a
427 set of variable-brightness bands across the image.
429 Therefore, the auto-exposure routines of camera devices
430 include antibanding routines that ensure that the chosen
431 exposure value will not cause such banding. The choice of
432 exposure time depends on the rate of flicker, which the
433 camera device can detect automatically, or the expected
434 rate can be selected by the application using this
437 A given camera device may not support all of the possible
438 options for the antibanding mode. The
439 android.control.aeAvailableAntibandingModes key contains
440 the available modes for a given camera device.
442 AUTO mode is the default if it is available on given
443 camera device. When AUTO mode is not available, the
444 default will be either 50HZ or 60HZ, and both 50HZ
445 and 60HZ will be available.
447 If manual exposure control is enabled (by setting
448 android.control.aeMode or android.control.mode to OFF),
449 then this setting has no effect, and the application must
450 ensure it selects exposure times that do not cause banding
451 issues. The android.statistics.sceneFlicker key can assist
452 the application in this.
455 For all capture request templates, this field must be set
456 to AUTO if AUTO mode is available. If AUTO is not available,
457 the default must be either 50HZ or 60HZ, and both 50HZ and
458 60HZ must be available.
460 If manual exposure control is enabled (by setting
461 android.control.aeMode or android.control.mode to OFF),
462 then the exposure values provided by the application must not be
463 adjusted for antibanding.
467 <entry name="aeExposureCompensation" type="int32" visibility="public" hwlevel="legacy">
468 <description>Adjustment to auto-exposure (AE) target image
469 brightness.</description>
470 <units>Compensation steps</units>
471 <range>android.control.aeCompensationRange</range>
473 The adjustment is measured as a count of steps, with the
474 step size defined by android.control.aeCompensationStep and the
475 allowed range by android.control.aeCompensationRange.
477 For example, if the exposure value (EV) step is 0.333, '6'
478 will mean an exposure compensation of +2 EV; -3 will mean an
479 exposure compensation of -1 EV. One EV represents a doubling
480 of image brightness. Note that this control will only be
481 effective if android.control.aeMode `!=` OFF. This control
482 will take effect even when android.control.aeLock `== true`.
484 In the event of exposure compensation value being changed, camera device
485 may take several frames to reach the newly requested exposure target.
486 During that time, android.control.aeState field will be in the SEARCHING
487 state. Once the new exposure target is reached, android.control.aeState will
488 change from SEARCHING to either CONVERGED, LOCKED (if AE lock is enabled), or
489 FLASH_REQUIRED (if the scene is too dark for still capture).
493 <entry name="aeLock" type="byte" visibility="public" enum="true"
494 typedef="boolean" hwlevel="legacy">
497 <notes>Auto-exposure lock is disabled; the AE algorithm
498 is free to update its parameters.</notes></value>
500 <notes>Auto-exposure lock is enabled; the AE algorithm
501 must not update the exposure and sensitivity parameters
502 while the lock is active.
504 android.control.aeExposureCompensation setting changes
505 will still take effect while auto-exposure is locked.
507 Some rare LEGACY devices may not support
508 this, in which case the value will always be overridden to OFF.
511 <description>Whether auto-exposure (AE) is currently locked to its latest
512 calculated values.</description>
514 When set to `true` (ON), the AE algorithm is locked to its latest parameters,
515 and will not change exposure settings until the lock is set to `false` (OFF).
517 Note that even when AE is locked, the flash may be fired if
518 the android.control.aeMode is ON_AUTO_FLASH /
519 ON_ALWAYS_FLASH / ON_AUTO_FLASH_REDEYE.
521 When android.control.aeExposureCompensation is changed, even if the AE lock
522 is ON, the camera device will still adjust its exposure value.
524 If AE precapture is triggered (see android.control.aePrecaptureTrigger)
525 when AE is already locked, the camera device will not change the exposure time
526 (android.sensor.exposureTime) and sensitivity (android.sensor.sensitivity)
527 parameters. The flash may be fired if the android.control.aeMode
528 is ON_AUTO_FLASH/ON_AUTO_FLASH_REDEYE and the scene is too dark. If the
529 android.control.aeMode is ON_ALWAYS_FLASH, the scene may become overexposed.
530 Similarly, AE precapture trigger CANCEL has no effect when AE is already locked.
532 When an AE precapture sequence is triggered, AE unlock will not be able to unlock
533 the AE if AE is locked by the camera device internally during precapture metering
534 sequence In other words, submitting requests with AE unlock has no effect for an
535 ongoing precapture metering sequence. Otherwise, the precapture metering sequence
536 will never succeed in a sequence of preview requests where AE lock is always set
539 Since the camera device has a pipeline of in-flight requests, the settings that
540 get locked do not necessarily correspond to the settings that were present in the
541 latest capture result received from the camera device, since additional captures
542 and AE updates may have occurred even before the result was sent out. If an
543 application is switching between automatic and manual control and wishes to eliminate
544 any flicker during the switch, the following procedure is recommended:
546 1. Starting in auto-AE mode:
548 3. Wait for the first result to be output that has the AE locked
549 4. Copy exposure settings from that result into a request, set the request to manual AE
550 5. Submit the capture request, proceed to run manual AE as desired.
552 See android.control.aeState for AE lock related state transition details.
556 <entry name="aeMode" type="byte" visibility="public" enum="true" hwlevel="legacy">
560 The camera device's autoexposure routine is disabled.
562 The application-selected android.sensor.exposureTime,
563 android.sensor.sensitivity and
564 android.sensor.frameDuration are used by the camera
565 device, along with android.flash.* fields, if there's
566 a flash unit for this camera device.
568 Note that auto-white balance (AWB) and auto-focus (AF)
569 behavior is device dependent when AE is in OFF mode.
570 To have consistent behavior across different devices,
571 it is recommended to either set AWB and AF to OFF mode
572 or lock AWB and AF before setting AE to OFF.
573 See android.control.awbMode, android.control.afMode,
574 android.control.awbLock, and android.control.afTrigger
577 LEGACY devices do not support the OFF mode and will
578 override attempts to use this value to ON.
583 The camera device's autoexposure routine is active,
584 with no flash control.
586 The application's values for
587 android.sensor.exposureTime,
588 android.sensor.sensitivity, and
589 android.sensor.frameDuration are ignored. The
590 application has control over the various
591 android.flash.* fields.
596 Like ON, except that the camera device also controls
597 the camera's flash unit, firing it in low-light
600 The flash may be fired during a precapture sequence
601 (triggered by android.control.aePrecaptureTrigger) and
602 may be fired for captures for which the
603 android.control.captureIntent field is set to
607 <value>ON_ALWAYS_FLASH
609 Like ON, except that the camera device also controls
610 the camera's flash unit, always firing it for still
613 The flash may be fired during a precapture sequence
614 (triggered by android.control.aePrecaptureTrigger) and
615 will always be fired for captures for which the
616 android.control.captureIntent field is set to
620 <value>ON_AUTO_FLASH_REDEYE
622 Like ON_AUTO_FLASH, but with automatic red eye
625 If deemed necessary by the camera device, a red eye
626 reduction flash will fire during the precapture
631 <description>The desired mode for the camera device's
632 auto-exposure routine.</description>
633 <range>android.control.aeAvailableModes</range>
635 This control is only effective if android.control.mode is
638 When set to any of the ON modes, the camera device's
639 auto-exposure routine is enabled, overriding the
640 application's selected exposure time, sensor sensitivity,
641 and frame duration (android.sensor.exposureTime,
642 android.sensor.sensitivity, and
643 android.sensor.frameDuration). If one of the FLASH modes
644 is selected, the camera device's flash unit controls are
647 The FLASH modes are only available if the camera device
648 has a flash unit (android.flash.info.available is `true`).
650 If flash TORCH mode is desired, this field must be set to
651 ON or OFF, and android.flash.mode set to TORCH.
653 When set to any of the ON modes, the values chosen by the
654 camera device auto-exposure routine for the overridden
655 fields for a given capture will be available in its
660 <entry name="aeRegions" type="int32" visibility="public"
661 optional="true" container="array" typedef="meteringRectangle">
664 <size>area_count</size>
666 <description>List of metering areas to use for auto-exposure adjustment.</description>
667 <units>Pixel coordinates within android.sensor.info.activeArraySize</units>
668 <range>Coordinates must be between `[(0,0), (width, height))` of
669 android.sensor.info.activeArraySize</range>
671 Not available if android.control.maxRegionsAe is 0.
672 Otherwise will always be present.
674 The maximum number of regions supported by the device is determined by the value
675 of android.control.maxRegionsAe.
677 The coordinate system is based on the active pixel array,
678 with (0,0) being the top-left pixel in the active pixel array, and
679 (android.sensor.info.activeArraySize.width - 1,
680 android.sensor.info.activeArraySize.height - 1) being the
681 bottom-right pixel in the active pixel array.
683 The weight must be within `[0, 1000]`, and represents a weight
684 for every pixel in the area. This means that a large metering area
685 with the same weight as a smaller area will have more effect in
686 the metering result. Metering areas can partially overlap and the
687 camera device will add the weights in the overlap region.
689 The weights are relative to weights of other exposure metering regions, so if only one
690 region is used, all non-zero weights will have the same effect. A region with 0
693 If all regions have 0 weight, then no specific metering area needs to be used by the
696 If the metering region is outside the used android.scaler.cropRegion returned in
697 capture result metadata, the camera device will ignore the sections outside the crop
698 region and output only the intersection rectangle as the metering region in the result
699 metadata. If the region is entirely outside the crop region, it will be ignored and
700 not reported in the result metadata.
703 The data representation is `int[5 * area_count]`.
704 Every five elements represent a metering region of `(xmin, ymin, xmax, ymax, weight)`.
705 The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
709 The HAL level representation of MeteringRectangle[] is a
711 Every five elements represent a metering region of
712 (xmin, ymin, xmax, ymax, weight).
713 The rectangle is defined to be inclusive on xmin and ymin, but
714 exclusive on xmax and ymax.
718 <entry name="aeTargetFpsRange" type="int32" visibility="public"
719 container="array" typedef="rangeInt" hwlevel="legacy">
723 <description>Range over which the auto-exposure routine can
724 adjust the capture frame rate to maintain good
725 exposure.</description>
726 <units>Frames per second (FPS)</units>
727 <range>Any of the entries in android.control.aeAvailableTargetFpsRanges</range>
728 <details>Only constrains auto-exposure (AE) algorithm, not
729 manual control of android.sensor.exposureTime and
730 android.sensor.frameDuration.</details>
733 <entry name="aePrecaptureTrigger" type="byte" visibility="public"
734 enum="true" hwlevel="limited">
737 <notes>The trigger is idle.</notes>
740 <notes>The precapture metering sequence will be started
741 by the camera device.
743 The exact effect of the precapture trigger depends on
744 the current AE mode and state.</notes>
747 <notes>The camera device will cancel any currently active or completed
748 precapture metering sequence, the auto-exposure routine will return to its
749 initial state.</notes>
752 <description>Whether the camera device will trigger a precapture
753 metering sequence when it processes this request.</description>
754 <details>This entry is normally set to IDLE, or is not
755 included at all in the request settings. When included and
756 set to START, the camera device will trigger the auto-exposure (AE)
757 precapture metering sequence.
759 When set to CANCEL, the camera device will cancel any active
760 precapture metering trigger, and return to its initial AE state.
761 If a precapture metering sequence is already completed, and the camera
762 device has implicitly locked the AE for subsequent still capture, the
763 CANCEL trigger will unlock the AE and return to its initial AE state.
765 The precapture sequence should be triggered before starting a
766 high-quality still capture for final metering decisions to
767 be made, and for firing pre-capture flash pulses to estimate
768 scene brightness and required final capture flash power, when
769 the flash is enabled.
771 Normally, this entry should be set to START for only a
772 single request, and the application should wait until the
773 sequence completes before starting a new one.
775 When a precapture metering sequence is finished, the camera device
776 may lock the auto-exposure routine internally to be able to accurately expose the
777 subsequent still capture image (`android.control.captureIntent == STILL_CAPTURE`).
778 For this case, the AE may not resume normal scan if no subsequent still capture is
779 submitted. To ensure that the AE routine restarts normal scan, the application should
780 submit a request with `android.control.aeLock == true`, followed by a request
781 with `android.control.aeLock == false`, if the application decides not to submit a
782 still capture request after the precapture sequence completes. Alternatively, for
783 API level 23 or newer devices, the CANCEL can be used to unlock the camera device
784 internally locked AE if the application doesn't submit a still capture request after
785 the AE precapture trigger. Note that, the CANCEL was added in API level 23, and must not
786 be used in devices that have earlier API levels.
788 The exact effect of auto-exposure (AE) precapture trigger
789 depends on the current AE mode and state; see
790 android.control.aeState for AE precapture state transition
793 On LEGACY-level devices, the precapture trigger is not supported;
794 capturing a high-resolution JPEG image will automatically trigger a
795 precapture sequence before the high-resolution capture, including
796 potentially firing a pre-capture flash.
798 Using the precapture trigger and the auto-focus trigger android.control.afTrigger
799 simultaneously is allowed. However, since these triggers often require cooperation between
800 the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
801 focus sweep), the camera device may delay acting on a later trigger until the previous
802 trigger has been fully handled. This may lead to longer intervals between the trigger and
803 changes to android.control.aeState indicating the start of the precapture sequence, for
806 If both the precapture and the auto-focus trigger are activated on the same request, then
807 the camera device will complete them in the optimal order for that device.
810 The HAL must support triggering the AE precapture trigger while an AF trigger is active
811 (and vice versa), or at the same time as the AF trigger. It is acceptable for the HAL to
812 treat these as two consecutive triggers, for example handling the AF trigger and then the
813 AE trigger. Or the HAL may choose to optimize the case with both triggers fired at once,
814 to minimize the latency for converging both focus and exposure/flash usage.
818 <entry name="afMode" type="byte" visibility="public" enum="true"
822 <notes>The auto-focus routine does not control the lens;
823 android.lens.focusDistance is controlled by the
824 application.</notes></value>
826 <notes>Basic automatic focus mode.
828 In this mode, the lens does not move unless
829 the autofocus trigger action is called. When that trigger
830 is activated, AF will transition to ACTIVE_SCAN, then to
831 the outcome of the scan (FOCUSED or NOT_FOCUSED).
833 Always supported if lens is not fixed focus.
835 Use android.lens.info.minimumFocusDistance to determine if lens
838 Triggering AF_CANCEL resets the lens position to default,
839 and sets the AF state to INACTIVE.</notes></value>
841 <notes>Close-up focusing mode.
843 In this mode, the lens does not move unless the
844 autofocus trigger action is called. When that trigger is
845 activated, AF will transition to ACTIVE_SCAN, then to
846 the outcome of the scan (FOCUSED or NOT_FOCUSED). This
847 mode is optimized for focusing on objects very close to
850 When that trigger is activated, AF will transition to
851 ACTIVE_SCAN, then to the outcome of the scan (FOCUSED or
852 NOT_FOCUSED). Triggering cancel AF resets the lens
853 position to default, and sets the AF state to
854 INACTIVE.</notes></value>
855 <value>CONTINUOUS_VIDEO
856 <notes>In this mode, the AF algorithm modifies the lens
857 position continually to attempt to provide a
858 constantly-in-focus image stream.
860 The focusing behavior should be suitable for good quality
861 video recording; typically this means slower focus
862 movement and no overshoots. When the AF trigger is not
863 involved, the AF algorithm should start in INACTIVE state,
864 and then transition into PASSIVE_SCAN and PASSIVE_FOCUSED
865 states as appropriate. When the AF trigger is activated,
866 the algorithm should immediately transition into
867 AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the
868 lens position until a cancel AF trigger is received.
870 Once cancel is received, the algorithm should transition
871 back to INACTIVE and resume passive scan. Note that this
872 behavior is not identical to CONTINUOUS_PICTURE, since an
873 ongoing PASSIVE_SCAN must immediately be
874 canceled.</notes></value>
875 <value>CONTINUOUS_PICTURE
876 <notes>In this mode, the AF algorithm modifies the lens
877 position continually to attempt to provide a
878 constantly-in-focus image stream.
880 The focusing behavior should be suitable for still image
881 capture; typically this means focusing as fast as
882 possible. When the AF trigger is not involved, the AF
883 algorithm should start in INACTIVE state, and then
884 transition into PASSIVE_SCAN and PASSIVE_FOCUSED states as
885 appropriate as it attempts to maintain focus. When the AF
886 trigger is activated, the algorithm should finish its
887 PASSIVE_SCAN if active, and then transition into
888 AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the
889 lens position until a cancel AF trigger is received.
891 When the AF cancel trigger is activated, the algorithm
892 should transition back to INACTIVE and then act as if it
893 has just been started.</notes></value>
895 <notes>Extended depth of field (digital focus) mode.
897 The camera device will produce images with an extended
898 depth of field automatically; no special focusing
899 operations need to be done before taking a picture.
901 AF triggers are ignored, and the AF state will always be
902 INACTIVE.</notes></value>
904 <description>Whether auto-focus (AF) is currently enabled, and what
905 mode it is set to.</description>
906 <range>android.control.afAvailableModes</range>
907 <details>Only effective if android.control.mode = AUTO and the lens is not fixed focus
908 (i.e. `android.lens.info.minimumFocusDistance > 0`). Also note that
909 when android.control.aeMode is OFF, the behavior of AF is device
910 dependent. It is recommended to lock AF by using android.control.afTrigger before
911 setting android.control.aeMode to OFF, or set AF mode to OFF when AE is OFF.
913 If the lens is controlled by the camera device auto-focus algorithm,
914 the camera device will report the current AF status in android.control.afState
915 in result metadata.</details>
917 When afMode is AUTO or MACRO, the lens must not move until an AF trigger is sent in a
918 request (android.control.afTrigger `==` START). After an AF trigger, the afState will end
919 up with either FOCUSED_LOCKED or NOT_FOCUSED_LOCKED state (see
920 android.control.afState for detailed state transitions), which indicates that the lens is
921 locked and will not move. If camera movement (e.g. tilting camera) causes the lens to move
922 after the lens is locked, the HAL must compensate this movement appropriately such that
923 the same focal plane remains in focus.
925 When afMode is one of the continuous auto focus modes, the HAL is free to start a AF
926 scan whenever it's not locked. When the lens is locked after an AF trigger
927 (see android.control.afState for detailed state transitions), the HAL should maintain the
928 same lock behavior as above.
930 When afMode is OFF, the application controls focus manually. The accuracy of the
931 focus distance control depends on the android.lens.info.focusDistanceCalibration.
932 However, the lens must not move regardless of the camera movement for any focus distance
935 To put this in concrete terms, if the camera has lens elements which may move based on
936 camera orientation or motion (e.g. due to gravity), then the HAL must drive the lens to
937 remain in a fixed position invariant to the camera's orientation or motion, for example,
938 by using accelerometer measurements in the lens control logic. This is a typical issue
939 that will arise on camera modules with open-loop VCMs.
943 <entry name="afRegions" type="int32" visibility="public"
944 optional="true" container="array" typedef="meteringRectangle">
947 <size>area_count</size>
949 <description>List of metering areas to use for auto-focus.</description>
950 <units>Pixel coordinates within android.sensor.info.activeArraySize</units>
951 <range>Coordinates must be between `[(0,0), (width, height))` of
952 android.sensor.info.activeArraySize</range>
954 Not available if android.control.maxRegionsAf is 0.
955 Otherwise will always be present.
957 The maximum number of focus areas supported by the device is determined by the value
958 of android.control.maxRegionsAf.
960 The coordinate system is based on the active pixel array,
961 with (0,0) being the top-left pixel in the active pixel array, and
962 (android.sensor.info.activeArraySize.width - 1,
963 android.sensor.info.activeArraySize.height - 1) being the
964 bottom-right pixel in the active pixel array.
966 The weight must be within `[0, 1000]`, and represents a weight
967 for every pixel in the area. This means that a large metering area
968 with the same weight as a smaller area will have more effect in
969 the metering result. Metering areas can partially overlap and the
970 camera device will add the weights in the overlap region.
972 The weights are relative to weights of other metering regions, so if only one region
973 is used, all non-zero weights will have the same effect. A region with 0 weight is
976 If all regions have 0 weight, then no specific metering area needs to be used by the
979 If the metering region is outside the used android.scaler.cropRegion returned in
980 capture result metadata, the camera device will ignore the sections outside the crop
981 region and output only the intersection rectangle as the metering region in the result
982 metadata. If the region is entirely outside the crop region, it will be ignored and
983 not reported in the result metadata.
986 The data representation is `int[5 * area_count]`.
987 Every five elements represent a metering region of `(xmin, ymin, xmax, ymax, weight)`.
988 The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
992 The HAL level representation of MeteringRectangle[] is a
994 Every five elements represent a metering region of
995 (xmin, ymin, xmax, ymax, weight).
996 The rectangle is defined to be inclusive on xmin and ymin, but
997 exclusive on xmax and ymax.
1001 <entry name="afTrigger" type="byte" visibility="public" enum="true"
1005 <notes>The trigger is idle.</notes>
1008 <notes>Autofocus will trigger now.</notes>
1011 <notes>Autofocus will return to its initial
1012 state, and cancel any currently active trigger.</notes>
1016 Whether the camera device will trigger autofocus for this request.
1018 <details>This entry is normally set to IDLE, or is not
1019 included at all in the request settings.
1021 When included and set to START, the camera device will trigger the
1022 autofocus algorithm. If autofocus is disabled, this trigger has no effect.
1024 When set to CANCEL, the camera device will cancel any active trigger,
1025 and return to its initial AF state.
1027 Generally, applications should set this entry to START or CANCEL for only a
1028 single capture, and then return it to IDLE (or not set at all). Specifying
1029 START for multiple captures in a row means restarting the AF operation over
1032 See android.control.afState for what the trigger means for each AF mode.
1034 Using the autofocus trigger and the precapture trigger android.control.aePrecaptureTrigger
1035 simultaneously is allowed. However, since these triggers often require cooperation between
1036 the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
1037 focus sweep), the camera device may delay acting on a later trigger until the previous
1038 trigger has been fully handled. This may lead to longer intervals between the trigger and
1039 changes to android.control.afState, for example.
1042 The HAL must support triggering the AF trigger while an AE precapture trigger is active
1043 (and vice versa), or at the same time as the AE trigger. It is acceptable for the HAL to
1044 treat these as two consecutive triggers, for example handling the AF trigger and then the
1045 AE trigger. Or the HAL may choose to optimize the case with both triggers fired at once,
1046 to minimize the latency for converging both focus and exposure/flash usage.
1050 <entry name="awbLock" type="byte" visibility="public" enum="true"
1051 typedef="boolean" hwlevel="legacy">
1054 <notes>Auto-white balance lock is disabled; the AWB
1055 algorithm is free to update its parameters if in AUTO
1056 mode.</notes></value>
1058 <notes>Auto-white balance lock is enabled; the AWB
1059 algorithm will not update its parameters while the lock
1060 is active.</notes></value>
1062 <description>Whether auto-white balance (AWB) is currently locked to its
1063 latest calculated values.</description>
1065 When set to `true` (ON), the AWB algorithm is locked to its latest parameters,
1066 and will not change color balance settings until the lock is set to `false` (OFF).
1068 Since the camera device has a pipeline of in-flight requests, the settings that
1069 get locked do not necessarily correspond to the settings that were present in the
1070 latest capture result received from the camera device, since additional captures
1071 and AWB updates may have occurred even before the result was sent out. If an
1072 application is switching between automatic and manual control and wishes to eliminate
1073 any flicker during the switch, the following procedure is recommended:
1075 1. Starting in auto-AWB mode:
1077 3. Wait for the first result to be output that has the AWB locked
1078 4. Copy AWB settings from that result into a request, set the request to manual AWB
1079 5. Submit the capture request, proceed to run manual AWB as desired.
1081 Note that AWB lock is only meaningful when
1082 android.control.awbMode is in the AUTO mode; in other modes,
1083 AWB is already fixed to a specific setting.
1085 Some LEGACY devices may not support ON; the value is then overridden to OFF.
1089 <entry name="awbMode" type="byte" visibility="public" enum="true"
1094 The camera device's auto-white balance routine is disabled.
1096 The application-selected color transform matrix
1097 (android.colorCorrection.transform) and gains
1098 (android.colorCorrection.gains) are used by the camera
1099 device for manual white balance control.
1104 The camera device's auto-white balance routine is active.
1106 The application's values for android.colorCorrection.transform
1107 and android.colorCorrection.gains are ignored.
1108 For devices that support the MANUAL_POST_PROCESSING capability, the
1109 values used by the camera device for the transform and gains
1110 will be available in the capture result for this request.
1115 The camera device's auto-white balance routine is disabled;
1116 the camera device uses incandescent light as the assumed scene
1117 illumination for white balance.
1119 While the exact white balance transforms are up to the
1120 camera device, they will approximately match the CIE
1121 standard illuminant A.
1123 The application's values for android.colorCorrection.transform
1124 and android.colorCorrection.gains are ignored.
1125 For devices that support the MANUAL_POST_PROCESSING capability, the
1126 values used by the camera device for the transform and gains
1127 will be available in the capture result for this request.
1132 The camera device's auto-white balance routine is disabled;
1133 the camera device uses fluorescent light as the assumed scene
1134 illumination for white balance.
1136 While the exact white balance transforms are up to the
1137 camera device, they will approximately match the CIE
1138 standard illuminant F2.
1140 The application's values for android.colorCorrection.transform
1141 and android.colorCorrection.gains are ignored.
1142 For devices that support the MANUAL_POST_PROCESSING capability, the
1143 values used by the camera device for the transform and gains
1144 will be available in the capture result for this request.
1147 <value>WARM_FLUORESCENT
1149 The camera device's auto-white balance routine is disabled;
1150 the camera device uses warm fluorescent light as the assumed scene
1151 illumination for white balance.
1153 While the exact white balance transforms are up to the
1154 camera device, they will approximately match the CIE
1155 standard illuminant F4.
1157 The application's values for android.colorCorrection.transform
1158 and android.colorCorrection.gains are ignored.
1159 For devices that support the MANUAL_POST_PROCESSING capability, the
1160 values used by the camera device for the transform and gains
1161 will be available in the capture result for this request.
1166 The camera device's auto-white balance routine is disabled;
1167 the camera device uses daylight light as the assumed scene
1168 illumination for white balance.
1170 While the exact white balance transforms are up to the
1171 camera device, they will approximately match the CIE
1172 standard illuminant D65.
1174 The application's values for android.colorCorrection.transform
1175 and android.colorCorrection.gains are ignored.
1176 For devices that support the MANUAL_POST_PROCESSING capability, the
1177 values used by the camera device for the transform and gains
1178 will be available in the capture result for this request.
1181 <value>CLOUDY_DAYLIGHT
1183 The camera device's auto-white balance routine is disabled;
1184 the camera device uses cloudy daylight light as the assumed scene
1185 illumination for white balance.
1187 The application's values for android.colorCorrection.transform
1188 and android.colorCorrection.gains are ignored.
1189 For devices that support the MANUAL_POST_PROCESSING capability, the
1190 values used by the camera device for the transform and gains
1191 will be available in the capture result for this request.
1196 The camera device's auto-white balance routine is disabled;
1197 the camera device uses twilight light as the assumed scene
1198 illumination for white balance.
1200 The application's values for android.colorCorrection.transform
1201 and android.colorCorrection.gains are ignored.
1202 For devices that support the MANUAL_POST_PROCESSING capability, the
1203 values used by the camera device for the transform and gains
1204 will be available in the capture result for this request.
1209 The camera device's auto-white balance routine is disabled;
1210 the camera device uses shade light as the assumed scene
1211 illumination for white balance.
1213 The application's values for android.colorCorrection.transform
1214 and android.colorCorrection.gains are ignored.
1215 For devices that support the MANUAL_POST_PROCESSING capability, the
1216 values used by the camera device for the transform and gains
1217 will be available in the capture result for this request.
1221 <description>Whether auto-white balance (AWB) is currently setting the color
1222 transform fields, and what its illumination target
1224 <range>android.control.awbAvailableModes</range>
1226 This control is only effective if android.control.mode is AUTO.
1228 When set to the ON mode, the camera device's auto-white balance
1229 routine is enabled, overriding the application's selected
1230 android.colorCorrection.transform, android.colorCorrection.gains and
1231 android.colorCorrection.mode. Note that when android.control.aeMode
1232 is OFF, the behavior of AWB is device dependent. It is recommened to
1233 also set AWB mode to OFF or lock AWB by using android.control.awbLock before
1234 setting AE mode to OFF.
1236 When set to the OFF mode, the camera device's auto-white balance
1237 routine is disabled. The application manually controls the white
1238 balance by android.colorCorrection.transform, android.colorCorrection.gains
1239 and android.colorCorrection.mode.
1241 When set to any other modes, the camera device's auto-white
1242 balance routine is disabled. The camera device uses each
1243 particular illumination target for white balance
1244 adjustment. The application's values for
1245 android.colorCorrection.transform,
1246 android.colorCorrection.gains and
1247 android.colorCorrection.mode are ignored.
1251 <entry name="awbRegions" type="int32" visibility="public"
1252 optional="true" container="array" typedef="meteringRectangle">
1255 <size>area_count</size>
1257 <description>List of metering areas to use for auto-white-balance illuminant
1258 estimation.</description>
1259 <units>Pixel coordinates within android.sensor.info.activeArraySize</units>
1260 <range>Coordinates must be between `[(0,0), (width, height))` of
1261 android.sensor.info.activeArraySize</range>
1263 Not available if android.control.maxRegionsAwb is 0.
1264 Otherwise will always be present.
1266 The maximum number of regions supported by the device is determined by the value
1267 of android.control.maxRegionsAwb.
1269 The coordinate system is based on the active pixel array,
1270 with (0,0) being the top-left pixel in the active pixel array, and
1271 (android.sensor.info.activeArraySize.width - 1,
1272 android.sensor.info.activeArraySize.height - 1) being the
1273 bottom-right pixel in the active pixel array.
1275 The weight must range from 0 to 1000, and represents a weight
1276 for every pixel in the area. This means that a large metering area
1277 with the same weight as a smaller area will have more effect in
1278 the metering result. Metering areas can partially overlap and the
1279 camera device will add the weights in the overlap region.
1281 The weights are relative to weights of other white balance metering regions, so if
1282 only one region is used, all non-zero weights will have the same effect. A region with
1283 0 weight is ignored.
1285 If all regions have 0 weight, then no specific metering area needs to be used by the
1288 If the metering region is outside the used android.scaler.cropRegion returned in
1289 capture result metadata, the camera device will ignore the sections outside the crop
1290 region and output only the intersection rectangle as the metering region in the result
1291 metadata. If the region is entirely outside the crop region, it will be ignored and
1292 not reported in the result metadata.
1295 The data representation is `int[5 * area_count]`.
1296 Every five elements represent a metering region of `(xmin, ymin, xmax, ymax, weight)`.
1297 The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
1301 The HAL level representation of MeteringRectangle[] is a
1302 int[5 * area_count].
1303 Every five elements represent a metering region of
1304 (xmin, ymin, xmax, ymax, weight).
1305 The rectangle is defined to be inclusive on xmin and ymin, but
1306 exclusive on xmax and ymax.
1310 <entry name="captureIntent" type="byte" visibility="public" enum="true"
1314 <notes>The goal of this request doesn't fall into the other
1315 categories. The camera device will default to preview-like
1316 behavior.</notes></value>
1318 <notes>This request is for a preview-like use case.
1320 The precapture trigger may be used to start off a metering
1323 <value>STILL_CAPTURE
1324 <notes>This request is for a still capture-type
1327 If the flash unit is under automatic control, it may fire as needed.
1330 <notes>This request is for a video recording
1331 use case.</notes></value>
1332 <value>VIDEO_SNAPSHOT
1333 <notes>This request is for a video snapshot (still
1334 image while recording video) use case.
1336 The camera device should take the highest-quality image
1337 possible (given the other settings) without disrupting the
1338 frame rate of video recording. </notes></value>
1339 <value>ZERO_SHUTTER_LAG
1340 <notes>This request is for a ZSL usecase; the
1341 application will stream full-resolution images and
1342 reprocess one or several later for a final
1346 <notes>This request is for manual capture use case where
1347 the applications want to directly control the capture parameters.
1349 For example, the application may wish to manually control
1350 android.sensor.exposureTime, android.sensor.sensitivity, etc.
1353 <description>Information to the camera device 3A (auto-exposure,
1354 auto-focus, auto-white balance) routines about the purpose
1355 of this capture, to help the camera device to decide optimal 3A
1356 strategy.</description>
1357 <details>This control (except for MANUAL) is only effective if
1358 `android.control.mode != OFF` and any 3A routine is active.
1360 ZERO_SHUTTER_LAG will be supported if android.request.availableCapabilities
1361 contains PRIVATE_REPROCESSING or YUV_REPROCESSING. MANUAL will be supported if
1362 android.request.availableCapabilities contains MANUAL_SENSOR. Other intent values are
1367 <entry name="effectMode" type="byte" visibility="public" enum="true"
1372 No color effect will be applied.
1375 <value optional="true">MONO
1377 A "monocolor" effect where the image is mapped into
1380 This will typically be grayscale.
1383 <value optional="true">NEGATIVE
1385 A "photo-negative" effect where the image's colors
1389 <value optional="true">SOLARIZE
1391 A "solarisation" effect (Sabattier effect) where the
1392 image is wholly or partially reversed in
1396 <value optional="true">SEPIA
1398 A "sepia" effect where the image is mapped into warm
1399 gray, red, and brown tones.
1402 <value optional="true">POSTERIZE
1404 A "posterization" effect where the image uses
1405 discrete regions of tone rather than a continuous
1409 <value optional="true">WHITEBOARD
1411 A "whiteboard" effect where the image is typically displayed
1412 as regions of white, with black or grey details.
1415 <value optional="true">BLACKBOARD
1417 A "blackboard" effect where the image is typically displayed
1418 as regions of black, with white or grey details.
1421 <value optional="true">AQUA
1423 An "aqua" effect where a blue hue is added to the image.
1427 <description>A special color effect to apply.</description>
1428 <range>android.control.availableEffects</range>
1430 When this mode is set, a color effect will be applied
1431 to images produced by the camera device. The interpretation
1432 and implementation of these color effects is left to the
1433 implementor of the camera device, and should not be
1434 depended on to be consistent (or present) across all
1439 <entry name="mode" type="byte" visibility="public" enum="true"
1443 <notes>Full application control of pipeline.
1445 All control by the device's metering and focusing (3A)
1446 routines is disabled, and no other settings in
1447 android.control.* have any effect, except that
1448 android.control.captureIntent may be used by the camera
1449 device to select post-processing values for processing
1450 blocks that do not allow for manual control, or are not
1451 exposed by the camera API.
1453 However, the camera device's 3A routines may continue to
1454 collect statistics and update their internal state so that
1455 when control is switched to AUTO mode, good control values
1456 can be immediately applied.
1459 <notes>Use settings for each individual 3A routine.
1461 Manual control of capture parameters is disabled. All
1462 controls in android.control.* besides sceneMode take
1463 effect.</notes></value>
1464 <value optional="true">USE_SCENE_MODE
1465 <notes>Use a specific scene mode.
1467 Enabling this disables control.aeMode, control.awbMode and
1468 control.afMode controls; the camera device will ignore
1469 those settings while USE_SCENE_MODE is active (except for
1470 FACE_PRIORITY scene mode). Other control entries are still active.
1471 This setting can only be used if scene mode is supported (i.e.
1472 android.control.availableSceneModes
1473 contain some modes other than DISABLED).</notes></value>
1474 <value optional="true">OFF_KEEP_STATE
1475 <notes>Same as OFF mode, except that this capture will not be
1476 used by camera device background auto-exposure, auto-white balance and
1477 auto-focus algorithms (3A) to update their statistics.
1479 Specifically, the 3A routines are locked to the last
1480 values set from a request with AUTO, OFF, or
1481 USE_SCENE_MODE, and any statistics or state updates
1482 collected from manual captures with OFF_KEEP_STATE will be
1483 discarded by the camera device.
1486 <description>Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control
1487 routines.</description>
1488 <range>android.control.availableModes</range>
1490 This is a top-level 3A control switch. When set to OFF, all 3A control
1491 by the camera device is disabled. The application must set the fields for
1492 capture parameters itself.
1494 When set to AUTO, the individual algorithm controls in
1495 android.control.* are in effect, such as android.control.afMode.
1497 When set to USE_SCENE_MODE, the individual controls in
1498 android.control.* are mostly disabled, and the camera device
1499 implements one of the scene mode settings (such as ACTION,
1500 SUNSET, or PARTY) as it wishes. The camera device scene mode
1501 3A settings are provided by {@link
1502 android.hardware.camera2.CaptureResult|ACameraCaptureSession_captureCallback_result
1505 When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference
1506 is that this frame will not be used by camera device background 3A statistics
1507 update, as if this frame is never captured. This mode can be used in the scenario
1508 where the application doesn't want a 3A manual control capture to affect
1509 the subsequent auto 3A capture results.
1513 <entry name="sceneMode" type="byte" visibility="public" enum="true"
1516 <value id="0">DISABLED
1518 Indicates that no scene modes are set for a given capture request.
1521 <value>FACE_PRIORITY
1522 <notes>If face detection support exists, use face
1523 detection data for auto-focus, auto-white balance, and
1524 auto-exposure routines.
1526 If face detection statistics are disabled
1527 (i.e. android.statistics.faceDetectMode is set to OFF),
1528 this should still operate correctly (but will not return
1529 face detection statistics to the framework).
1531 Unlike the other scene modes, android.control.aeMode,
1532 android.control.awbMode, and android.control.afMode
1533 remain active when FACE_PRIORITY is set.
1536 <value optional="true">ACTION
1538 Optimized for photos of quickly moving objects.
1543 <value optional="true">PORTRAIT
1545 Optimized for still photos of people.
1548 <value optional="true">LANDSCAPE
1550 Optimized for photos of distant macroscopic objects.
1553 <value optional="true">NIGHT
1555 Optimized for low-light settings.
1558 <value optional="true">NIGHT_PORTRAIT
1560 Optimized for still photos of people in low-light
1564 <value optional="true">THEATRE
1566 Optimized for dim, indoor settings where flash must
1570 <value optional="true">BEACH
1572 Optimized for bright, outdoor beach settings.
1575 <value optional="true">SNOW
1577 Optimized for bright, outdoor settings containing snow.
1580 <value optional="true">SUNSET
1582 Optimized for scenes of the setting sun.
1585 <value optional="true">STEADYPHOTO
1587 Optimized to avoid blurry photos due to small amounts of
1588 device motion (for example: due to hand shake).
1591 <value optional="true">FIREWORKS
1593 Optimized for nighttime photos of fireworks.
1596 <value optional="true">SPORTS
1598 Optimized for photos of quickly moving people.
1603 <value optional="true">PARTY
1605 Optimized for dim, indoor settings with multiple moving
1609 <value optional="true">CANDLELIGHT
1611 Optimized for dim settings where the main light source
1615 <value optional="true">BARCODE
1617 Optimized for accurately capturing a photo of barcode
1618 for use by camera applications that wish to read the
1622 <value deprecated="true" optional="true" ndk_hidden="true">HIGH_SPEED_VIDEO
1624 This is deprecated, please use {@link
1625 android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}
1627 android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
1628 for high speed video recording.
1630 Optimized for high speed video recording (frame rate >=60fps) use case.
1632 The supported high speed video sizes and fps ranges are specified in
1633 android.control.availableHighSpeedVideoConfigurations. To get desired
1634 output frame rates, the application is only allowed to select video size
1635 and fps range combinations listed in this static metadata. The fps range
1636 can be control via android.control.aeTargetFpsRange.
1638 In this mode, the camera device will override aeMode, awbMode, and afMode to
1639 ON, ON, and CONTINUOUS_VIDEO, respectively. All post-processing block mode
1640 controls will be overridden to be FAST. Therefore, no manual control of capture
1641 and post-processing parameters is possible. All other controls operate the
1642 same as when android.control.mode == AUTO. This means that all other
1643 android.control.* fields continue to work, such as
1645 * android.control.aeTargetFpsRange
1646 * android.control.aeExposureCompensation
1647 * android.control.aeLock
1648 * android.control.awbLock
1649 * android.control.effectMode
1650 * android.control.aeRegions
1651 * android.control.afRegions
1652 * android.control.awbRegions
1653 * android.control.afTrigger
1654 * android.control.aePrecaptureTrigger
1656 Outside of android.control.*, the following controls will work:
1658 * android.flash.mode (automatic flash for still capture will not work since aeMode is ON)
1659 * android.lens.opticalStabilizationMode (if it is supported)
1660 * android.scaler.cropRegion
1661 * android.statistics.faceDetectMode
1663 For high speed recording use case, the actual maximum supported frame rate may
1664 be lower than what camera can output, depending on the destination Surfaces for
1665 the image data. For example, if the destination surface is from video encoder,
1666 the application need check if the video encoder is capable of supporting the
1667 high frame rate for a given video size, or it will end up with lower recording
1668 frame rate. If the destination surface is from preview window, the preview frame
1669 rate will be bounded by the screen refresh rate.
1671 The camera device will only support up to 2 output high speed streams
1672 (processed non-stalling format defined in android.request.maxNumOutputStreams)
1673 in this mode. This control will be effective only if all of below conditions are true:
1675 * The application created no more than maxNumHighSpeedStreams processed non-stalling
1676 format output streams, where maxNumHighSpeedStreams is calculated as
1677 min(2, android.request.maxNumOutputStreams[Processed (but not-stalling)]).
1678 * The stream sizes are selected from the sizes reported by
1679 android.control.availableHighSpeedVideoConfigurations.
1680 * No processed non-stalling or raw streams are configured.
1682 When above conditions are NOT satistied, the controls of this mode and
1683 android.control.aeTargetFpsRange will be ignored by the camera device,
1684 the camera device will fall back to android.control.mode `==` AUTO,
1685 and the returned capture result metadata will give the fps range choosen
1686 by the camera device.
1688 Switching into or out of this mode may trigger some camera ISP/sensor
1689 reconfigurations, which may introduce extra latency. It is recommended that
1690 the application avoids unnecessary scene mode switch as much as possible.
1693 <value optional="true">HDR
1695 Turn on a device-specific high dynamic range (HDR) mode.
1697 In this scene mode, the camera device captures images
1698 that keep a larger range of scene illumination levels
1699 visible in the final image. For example, when taking a
1700 picture of a object in front of a bright window, both
1701 the object and the scene through the window may be
1702 visible when using HDR mode, while in normal AUTO mode,
1703 one or the other may be poorly exposed. As a tradeoff,
1704 HDR mode generally takes much longer to capture a single
1705 image, has no user control, and may have other artifacts
1706 depending on the HDR method used.
1708 Therefore, HDR captures operate at a much slower rate
1709 than regular captures.
1711 In this mode, on LIMITED or FULL devices, when a request
1712 is made with a android.control.captureIntent of
1713 STILL_CAPTURE, the camera device will capture an image
1714 using a high dynamic range capture technique. On LEGACY
1715 devices, captures that target a JPEG-format output will
1716 be captured with HDR, and the capture intent is not
1719 The HDR capture may involve the device capturing a burst
1720 of images internally and combining them into one, or it
1721 may involve the device using specialized high dynamic
1722 range capture hardware. In all cases, a single image is
1723 produced in response to a capture request submitted
1726 Since substantial post-processing is generally needed to
1727 produce an HDR image, only YUV, PRIVATE, and JPEG
1728 outputs are supported for LIMITED/FULL device HDR
1729 captures, and only JPEG outputs are supported for LEGACY
1730 HDR captures. Using a RAW output for HDR capture is not
1733 Some devices may also support always-on HDR, which
1734 applies HDR processing at full frame rate. For these
1735 devices, intents other than STILL_CAPTURE will also
1736 produce an HDR output with no frame rate impact compared
1737 to normal operation, though the quality may be lower
1738 than for STILL_CAPTURE intents.
1740 If SCENE_MODE_HDR is used with unsupported output types
1741 or capture intents, the images captured will be as if
1742 the SCENE_MODE was not enabled at all.
1745 <value optional="true" hidden="true">FACE_PRIORITY_LOW_LIGHT
1746 <notes>Same as FACE_PRIORITY scene mode, except that the camera
1747 device will choose higher sensitivity values (android.sensor.sensitivity)
1748 under low light conditions.
1750 The camera device may be tuned to expose the images in a reduced
1751 sensitivity range to produce the best quality images. For example,
1752 if the android.sensor.info.sensitivityRange gives range of [100, 1600],
1753 the camera device auto-exposure routine tuning process may limit the actual
1754 exposure sensitivity range to [100, 1200] to ensure that the noise level isn't
1755 exessive in order to preserve the image quality. Under this situation, the image under
1756 low light may be under-exposed when the sensor max exposure time (bounded by the
1757 android.control.aeTargetFpsRange when android.control.aeMode is one of the
1758 ON_* modes) and effective max sensitivity are reached. This scene mode allows the
1759 camera device auto-exposure routine to increase the sensitivity up to the max
1760 sensitivity specified by android.sensor.info.sensitivityRange when the scene is too
1761 dark and the max exposure time is reached. The captured images may be noisier
1762 compared with the images captured in normal FACE_PRIORITY mode; therefore, it is
1763 recommended that the application only use this scene mode when it is capable of
1764 reducing the noise level of the captured images.
1766 Unlike the other scene modes, android.control.aeMode,
1767 android.control.awbMode, and android.control.afMode
1768 remain active when FACE_PRIORITY_LOW_LIGHT is set.
1771 <value optional="true" hidden="true" id="100">DEVICE_CUSTOM_START
1773 Scene mode values within the range of
1774 `[DEVICE_CUSTOM_START, DEVICE_CUSTOM_END]` are reserved for device specific
1775 customized scene modes.
1778 <value optional="true" hidden="true" id="127">DEVICE_CUSTOM_END
1780 Scene mode values within the range of
1781 `[DEVICE_CUSTOM_START, DEVICE_CUSTOM_END]` are reserved for device specific
1782 customized scene modes.
1787 Control for which scene mode is currently active.
1789 <range>android.control.availableSceneModes</range>
1791 Scene modes are custom camera modes optimized for a certain set of conditions and
1794 This is the mode that that is active when
1795 `android.control.mode == USE_SCENE_MODE`. Aside from FACE_PRIORITY, these modes will
1796 disable android.control.aeMode, android.control.awbMode, and android.control.afMode
1799 The interpretation and implementation of these scene modes is left
1800 to the implementor of the camera device. Their behavior will not be
1801 consistent across all devices, and any given device may only implement
1802 a subset of these modes.
1805 HAL implementations that include scene modes are expected to provide
1806 the per-scene settings to use for android.control.aeMode,
1807 android.control.awbMode, and android.control.afMode in
1808 android.control.sceneModeOverrides.
1810 For HIGH_SPEED_VIDEO mode, if it is included in android.control.availableSceneModes, the
1811 HAL must list supported video size and fps range in
1812 android.control.availableHighSpeedVideoConfigurations. For a given size, e.g. 1280x720,
1813 if the HAL has two different sensor configurations for normal streaming mode and high
1814 speed streaming, when this scene mode is set/reset in a sequence of capture requests, the
1815 HAL may have to switch between different sensor modes. This mode is deprecated in legacy
1816 HAL3.3, to support high speed video recording, please implement
1817 android.control.availableHighSpeedVideoConfigurations and CONSTRAINED_HIGH_SPEED_VIDEO
1818 capbility defined in android.request.availableCapabilities.
1822 <entry name="videoStabilizationMode" type="byte" visibility="public"
1823 enum="true" hwlevel="legacy">
1827 Video stabilization is disabled.
1831 Video stabilization is enabled.
1834 <description>Whether video stabilization is
1835 active.</description>
1837 Video stabilization automatically warps images from
1838 the camera in order to stabilize motion between consecutive frames.
1840 If enabled, video stabilization can modify the
1841 android.scaler.cropRegion to keep the video stream stabilized.
1843 Switching between different video stabilization modes may take several
1844 frames to initialize, the camera device will report the current mode
1845 in capture result metadata. For example, When "ON" mode is requested,
1846 the video stabilization modes in the first several capture results may
1847 still be "OFF", and it will become "ON" when the initialization is
1850 In addition, not all recording sizes or frame rates may be supported for
1851 stabilization by a device that reports stabilization support. It is guaranteed
1852 that an output targeting a MediaRecorder or MediaCodec will be stabilized if
1853 the recording resolution is less than or equal to 1920 x 1080 (width less than
1854 or equal to 1920, height less than or equal to 1080), and the recording
1855 frame rate is less than or equal to 30fps. At other sizes, the CaptureResult
1856 android.control.videoStabilizationMode field will return
1857 OFF if the recording output is not stabilized, or if there are no output
1858 Surface types that can be stabilized.
1860 If a camera device supports both this mode and OIS
1861 (android.lens.opticalStabilizationMode), turning both modes on may
1862 produce undesirable interaction, so it is recommended not to enable
1863 both at the same time.
1869 <entry name="aeAvailableAntibandingModes" type="byte" visibility="public"
1870 type_notes="list of enums" container="array" typedef="enumList"
1876 List of auto-exposure antibanding modes for android.control.aeAntibandingMode that are
1877 supported by this camera device.
1879 <range>Any value listed in android.control.aeAntibandingMode</range>
1881 Not all of the auto-exposure anti-banding modes may be
1882 supported by a given camera device. This field lists the
1883 valid anti-banding modes that the application may request
1884 for this camera device with the
1885 android.control.aeAntibandingMode control.
1889 <entry name="aeAvailableModes" type="byte" visibility="public"
1890 type_notes="list of enums" container="array" typedef="enumList"
1896 List of auto-exposure modes for android.control.aeMode that are supported by this camera
1899 <range>Any value listed in android.control.aeMode</range>
1901 Not all the auto-exposure modes may be supported by a
1902 given camera device, especially if no flash unit is
1903 available. This entry lists the valid modes for
1904 android.control.aeMode for this camera device.
1906 All camera devices support ON, and all camera devices with flash
1907 units support ON_AUTO_FLASH and ON_ALWAYS_FLASH.
1909 FULL mode camera devices always support OFF mode,
1910 which enables application control of camera exposure time,
1911 sensitivity, and frame duration.
1913 LEGACY mode camera devices never support OFF mode.
1914 LIMITED mode devices support OFF if they support the MANUAL_SENSOR
1919 <entry name="aeAvailableTargetFpsRanges" type="int32" visibility="public"
1920 type_notes="list of pairs of frame rates"
1921 container="array" typedef="rangeInt"
1927 <description>List of frame rate ranges for android.control.aeTargetFpsRange supported by
1928 this camera device.</description>
1929 <units>Frames per second (FPS)</units>
1931 For devices at the LEGACY level or above:
1933 * For constant-framerate recording, for each normal
1934 {@link android.media.CamcorderProfile CamcorderProfile}, that is, a
1935 {@link android.media.CamcorderProfile CamcorderProfile} that has
1936 {@link android.media.CamcorderProfile#quality quality} in
1937 the range [{@link android.media.CamcorderProfile#QUALITY_LOW QUALITY_LOW},
1938 {@link android.media.CamcorderProfile#QUALITY_2160P QUALITY_2160P}], if the profile is
1939 supported by the device and has
1940 {@link android.media.CamcorderProfile#videoFrameRate videoFrameRate} `x`, this list will
1941 always include (`x`,`x`).
1943 * Also, a camera device must either not support any
1944 {@link android.media.CamcorderProfile CamcorderProfile},
1945 or support at least one
1946 normal {@link android.media.CamcorderProfile CamcorderProfile} that has
1947 {@link android.media.CamcorderProfile#videoFrameRate videoFrameRate} `x` >= 24.
1949 For devices at the LIMITED level or above:
1951 * For YUV_420_888 burst capture use case, this list will always include (`min`, `max`)
1952 and (`max`, `max`) where `min` <= 15 and `max` = the maximum output frame rate of the
1953 maximum YUV_420_888 output size.
1957 <entry name="aeCompensationRange" type="int32" visibility="public"
1958 container="array" typedef="rangeInt"
1963 <description>Maximum and minimum exposure compensation values for
1964 android.control.aeExposureCompensation, in counts of android.control.aeCompensationStep,
1965 that are supported by this camera device.</description>
1967 Range [0,0] indicates that exposure compensation is not supported.
1969 For LIMITED and FULL devices, range must follow below requirements if exposure
1970 compensation is supported (`range != [0, 0]`):
1972 `Min.exposure compensation * android.control.aeCompensationStep <= -2 EV`
1974 `Max.exposure compensation * android.control.aeCompensationStep >= 2 EV`
1976 LEGACY devices may support a smaller range than this.
1980 <entry name="aeCompensationStep" type="rational" visibility="public"
1982 <description>Smallest step by which the exposure compensation
1983 can be changed.</description>
1984 <units>Exposure Value (EV)</units>
1986 This is the unit for android.control.aeExposureCompensation. For example, if this key has
1987 a value of `1/2`, then a setting of `-2` for android.control.aeExposureCompensation means
1988 that the target EV offset for the auto-exposure routine is -1 EV.
1990 One unit of EV compensation changes the brightness of the captured image by a factor
1991 of two. +1 EV doubles the image brightness, while -1 EV halves the image brightness.
1994 This must be less than or equal to 1/2.
1998 <entry name="afAvailableModes" type="byte" visibility="public"
1999 type_notes="List of enums" container="array" typedef="enumList"
2005 List of auto-focus (AF) modes for android.control.afMode that are
2006 supported by this camera device.
2008 <range>Any value listed in android.control.afMode</range>
2010 Not all the auto-focus modes may be supported by a
2011 given camera device. This entry lists the valid modes for
2012 android.control.afMode for this camera device.
2014 All LIMITED and FULL mode camera devices will support OFF mode, and all
2015 camera devices with adjustable focuser units
2016 (`android.lens.info.minimumFocusDistance > 0`) will support AUTO mode.
2018 LEGACY devices will support OFF mode only if they support
2019 focusing to infinity (by also setting android.lens.focusDistance to
2024 <entry name="availableEffects" type="byte" visibility="public"
2025 type_notes="List of enums (android.control.effectMode)." container="array"
2026 typedef="enumList" hwlevel="legacy">
2031 List of color effects for android.control.effectMode that are supported by this camera
2034 <range>Any value listed in android.control.effectMode</range>
2036 This list contains the color effect modes that can be applied to
2037 images produced by the camera device.
2038 Implementations are not expected to be consistent across all devices.
2039 If no color effect modes are available for a device, this will only list
2042 A color effect will only be applied if
2043 android.control.mode != OFF. OFF is always included in this list.
2045 This control has no effect on the operation of other control routines such
2046 as auto-exposure, white balance, or focus.
2050 <entry name="availableSceneModes" type="byte" visibility="public"
2051 type_notes="List of enums (android.control.sceneMode)."
2052 container="array" typedef="enumList" hwlevel="legacy">
2057 List of scene modes for android.control.sceneMode that are supported by this camera
2060 <range>Any value listed in android.control.sceneMode</range>
2062 This list contains scene modes that can be set for the camera device.
2063 Only scene modes that have been fully implemented for the
2064 camera device may be included here. Implementations are not expected
2065 to be consistent across all devices.
2067 If no scene modes are supported by the camera device, this
2068 will be set to DISABLED. Otherwise DISABLED will not be listed.
2070 FACE_PRIORITY is always listed if face detection is
2071 supported (i.e.`android.statistics.info.maxFaceCount >
2076 <entry name="availableVideoStabilizationModes" type="byte"
2077 visibility="public" type_notes="List of enums." container="array"
2078 typedef="enumList" hwlevel="legacy">
2083 List of video stabilization modes for android.control.videoStabilizationMode
2084 that are supported by this camera device.
2086 <range>Any value listed in android.control.videoStabilizationMode</range>
2088 OFF will always be listed.
2092 <entry name="awbAvailableModes" type="byte" visibility="public"
2093 type_notes="List of enums"
2094 container="array" typedef="enumList" hwlevel="legacy">
2099 List of auto-white-balance modes for android.control.awbMode that are supported by this
2102 <range>Any value listed in android.control.awbMode</range>
2104 Not all the auto-white-balance modes may be supported by a
2105 given camera device. This entry lists the valid modes for
2106 android.control.awbMode for this camera device.
2108 All camera devices will support ON mode.
2110 Camera devices that support the MANUAL_POST_PROCESSING capability will always support OFF
2111 mode, which enables application control of white balance, by using
2112 android.colorCorrection.transform and android.colorCorrection.gains
2113 (android.colorCorrection.mode must be set to TRANSFORM_MATRIX). This includes all FULL
2114 mode camera devices.
2118 <entry name="maxRegions" type="int32" visibility="ndk_public"
2119 container="array" hwlevel="legacy">
2124 List of the maximum number of regions that can be used for metering in
2125 auto-exposure (AE), auto-white balance (AWB), and auto-focus (AF);
2126 this corresponds to the the maximum number of elements in
2127 android.control.aeRegions, android.control.awbRegions,
2128 and android.control.afRegions.
2131 Value must be &gt;= 0 for each element. For full-capability devices
2132 this value must be &gt;= 1 for AE and AF. The order of the elements is:
2133 `(AE, AWB, AF)`.</range>
2136 <entry name="maxRegionsAe" type="int32" visibility="java_public"
2137 synthetic="true" hwlevel="legacy">
2139 The maximum number of metering regions that can be used by the auto-exposure (AE)
2142 <range>Value will be &gt;= 0. For FULL-capability devices, this
2143 value will be &gt;= 1.
2146 This corresponds to the the maximum allowed number of elements in
2147 android.control.aeRegions.
2149 <hal_details>This entry is private to the framework. Fill in
2150 maxRegions to have this entry be automatically populated.
2153 <entry name="maxRegionsAwb" type="int32" visibility="java_public"
2154 synthetic="true" hwlevel="legacy">
2156 The maximum number of metering regions that can be used by the auto-white balance (AWB)
2159 <range>Value will be &gt;= 0.
2162 This corresponds to the the maximum allowed number of elements in
2163 android.control.awbRegions.
2165 <hal_details>This entry is private to the framework. Fill in
2166 maxRegions to have this entry be automatically populated.
2169 <entry name="maxRegionsAf" type="int32" visibility="java_public"
2170 synthetic="true" hwlevel="legacy">
2172 The maximum number of metering regions that can be used by the auto-focus (AF) routine.
2174 <range>Value will be &gt;= 0. For FULL-capability devices, this
2175 value will be &gt;= 1.
2178 This corresponds to the the maximum allowed number of elements in
2179 android.control.afRegions.
2181 <hal_details>This entry is private to the framework. Fill in
2182 maxRegions to have this entry be automatically populated.
2185 <entry name="sceneModeOverrides" type="byte" visibility="system"
2186 container="array" hwlevel="limited">
2189 <size>length(availableSceneModes)</size>
2192 Ordered list of auto-exposure, auto-white balance, and auto-focus
2193 settings to use with each available scene mode.
2196 For each available scene mode, the list must contain three
2197 entries containing the android.control.aeMode,
2198 android.control.awbMode, and android.control.afMode values used
2199 by the camera device. The entry order is `(aeMode, awbMode, afMode)`
2200 where aeMode has the lowest index position.
2203 When a scene mode is enabled, the camera device is expected
2204 to override android.control.aeMode, android.control.awbMode,
2205 and android.control.afMode with its preferred settings for
2208 The order of this list matches that of availableSceneModes,
2209 with 3 entries for each mode. The overrides listed
2210 for FACE_PRIORITY and FACE_PRIORITY_LOW_LIGHT (if supported) are ignored,
2211 since for that mode the application-set android.control.aeMode,
2212 android.control.awbMode, and android.control.afMode values are
2213 used instead, matching the behavior when android.control.mode
2214 is set to AUTO. It is recommended that the FACE_PRIORITY and
2215 FACE_PRIORITY_LOW_LIGHT (if supported) overrides should be set to 0.
2217 For example, if availableSceneModes contains
2218 `(FACE_PRIORITY, ACTION, NIGHT)`, then the camera framework
2219 expects sceneModeOverrides to have 9 entries formatted like:
2220 `(0, 0, 0, ON_AUTO_FLASH, AUTO, CONTINUOUS_PICTURE,
2221 ON_AUTO_FLASH, INCANDESCENT, AUTO)`.
2224 To maintain backward compatibility, this list will be made available
2225 in the static metadata of the camera service. The camera service will
2226 use these values to set android.control.aeMode,
2227 android.control.awbMode, and android.control.afMode when using a scene
2228 mode other than FACE_PRIORITY and FACE_PRIORITY_LOW_LIGHT (if supported).
2234 <entry name="aePrecaptureId" type="int32" visibility="system" deprecated="true">
2235 <description>The ID sent with the latest
2236 CAMERA2_TRIGGER_PRECAPTURE_METERING call</description>
2237 <details>Must be 0 if no
2238 CAMERA2_TRIGGER_PRECAPTURE_METERING trigger received yet
2239 by HAL. Always updated even if AE algorithm ignores the
2242 <clone entry="android.control.aeAntibandingMode" kind="controls">
2244 <clone entry="android.control.aeExposureCompensation" kind="controls">
2246 <clone entry="android.control.aeLock" kind="controls">
2248 <clone entry="android.control.aeMode" kind="controls">
2250 <clone entry="android.control.aeRegions" kind="controls">
2252 <clone entry="android.control.aeTargetFpsRange" kind="controls">
2254 <clone entry="android.control.aePrecaptureTrigger" kind="controls">
2256 <entry name="aeState" type="byte" visibility="public" enum="true"
2260 <notes>AE is off or recently reset.
2262 When a camera device is opened, it starts in
2263 this state. This is a transient state, the camera device may skip reporting
2264 this state in capture result.</notes></value>
2266 <notes>AE doesn't yet have a good set of control values
2267 for the current scene.
2269 This is a transient state, the camera device may skip
2270 reporting this state in capture result.</notes></value>
2272 <notes>AE has a good set of control values for the
2273 current scene.</notes></value>
2275 <notes>AE has been locked.</notes></value>
2276 <value>FLASH_REQUIRED
2277 <notes>AE has a good set of control values, but flash
2278 needs to be fired for good quality still
2279 capture.</notes></value>
2281 <notes>AE has been asked to do a precapture sequence
2282 and is currently executing it.
2284 Precapture can be triggered through setting
2285 android.control.aePrecaptureTrigger to START. Currently
2286 active and completed (if it causes camera device internal AE lock) precapture
2287 metering sequence can be canceled through setting
2288 android.control.aePrecaptureTrigger to CANCEL.
2290 Once PRECAPTURE completes, AE will transition to CONVERGED
2291 or FLASH_REQUIRED as appropriate. This is a transient
2292 state, the camera device may skip reporting this state in
2293 capture result.</notes></value>
2295 <description>Current state of the auto-exposure (AE) algorithm.</description>
2296 <details>Switching between or enabling AE modes (android.control.aeMode) always
2297 resets the AE state to INACTIVE. Similarly, switching between android.control.mode,
2298 or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all
2299 the algorithm states to INACTIVE.
2301 The camera device can do several state transitions between two results, if it is
2302 allowed by the state transition table. For example: INACTIVE may never actually be
2305 The state in the result is the state for this image (in sync with this image): if
2306 AE state becomes CONVERGED, then the image data associated with this result should
2309 Below are state transition tables for different AE modes.
2311 State | Transition Cause | New State | Notes
2312 :------------:|:----------------:|:---------:|:-----------------------:
2313 INACTIVE | | INACTIVE | Camera device auto exposure algorithm is disabled
2315 When android.control.aeMode is AE_MODE_ON_*:
2317 State | Transition Cause | New State | Notes
2318 :-------------:|:--------------------------------------------:|:--------------:|:-----------------:
2319 INACTIVE | Camera device initiates AE scan | SEARCHING | Values changing
2320 INACTIVE | android.control.aeLock is ON | LOCKED | Values locked
2321 SEARCHING | Camera device finishes AE scan | CONVERGED | Good values, not changing
2322 SEARCHING | Camera device finishes AE scan | FLASH_REQUIRED | Converged but too dark w/o flash
2323 SEARCHING | android.control.aeLock is ON | LOCKED | Values locked
2324 CONVERGED | Camera device initiates AE scan | SEARCHING | Values changing
2325 CONVERGED | android.control.aeLock is ON | LOCKED | Values locked
2326 FLASH_REQUIRED | Camera device initiates AE scan | SEARCHING | Values changing
2327 FLASH_REQUIRED | android.control.aeLock is ON | LOCKED | Values locked
2328 LOCKED | android.control.aeLock is OFF | SEARCHING | Values not good after unlock
2329 LOCKED | android.control.aeLock is OFF | CONVERGED | Values good after unlock
2330 LOCKED | android.control.aeLock is OFF | FLASH_REQUIRED | Exposure good, but too dark
2331 PRECAPTURE | Sequence done. android.control.aeLock is OFF | CONVERGED | Ready for high-quality capture
2332 PRECAPTURE | Sequence done. android.control.aeLock is ON | LOCKED | Ready for high-quality capture
2333 LOCKED | aeLock is ON and aePrecaptureTrigger is START | LOCKED | Precapture trigger is ignored when AE is already locked
2334 LOCKED | aeLock is ON and aePrecaptureTrigger is CANCEL| LOCKED | Precapture trigger is ignored when AE is already locked
2335 Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START | PRECAPTURE | Start AE precapture metering sequence
2336 Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL| INACTIVE | Currently active precapture metering sequence is canceled
2338 For the above table, the camera device may skip reporting any state changes that happen
2339 without application intervention (i.e. mode switch, trigger, locking). Any state that
2340 can be skipped in that manner is called a transient state.
2342 For example, for above AE modes (AE_MODE_ON_*), in addition to the state transitions
2343 listed in above table, it is also legal for the camera device to skip one or more
2344 transient states between two results. See below table for examples:
2346 State | Transition Cause | New State | Notes
2347 :-------------:|:-----------------------------------------------------------:|:--------------:|:-----------------:
2348 INACTIVE | Camera device finished AE scan | CONVERGED | Values are already good, transient states are skipped by camera device.
2349 Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START, sequence done | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device.
2350 Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START, sequence done | CONVERGED | Converged after a precapture sequence, transient states are skipped by camera device.
2351 Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL, converged | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device.
2352 Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL, converged | CONVERGED | Converged after a precapture sequenceis canceled, transient states are skipped by camera device.
2353 CONVERGED | Camera device finished AE scan | FLASH_REQUIRED | Converged but too dark w/o flash after a new scan, transient states are skipped by camera device.
2354 FLASH_REQUIRED | Camera device finished AE scan | CONVERGED | Converged after a new scan, transient states are skipped by camera device.
2357 <clone entry="android.control.afMode" kind="controls">
2359 <clone entry="android.control.afRegions" kind="controls">
2361 <clone entry="android.control.afTrigger" kind="controls">
2363 <entry name="afState" type="byte" visibility="public" enum="true"
2367 <notes>AF is off or has not yet tried to scan/been asked
2370 When a camera device is opened, it starts in this
2371 state. This is a transient state, the camera device may
2372 skip reporting this state in capture
2373 result.</notes></value>
2375 <notes>AF is currently performing an AF scan initiated the
2376 camera device in a continuous autofocus mode.
2378 Only used by CONTINUOUS_* AF modes. This is a transient
2379 state, the camera device may skip reporting this state in
2380 capture result.</notes></value>
2381 <value>PASSIVE_FOCUSED
2382 <notes>AF currently believes it is in focus, but may
2383 restart scanning at any time.
2385 Only used by CONTINUOUS_* AF modes. This is a transient
2386 state, the camera device may skip reporting this state in
2387 capture result.</notes></value>
2389 <notes>AF is performing an AF scan because it was
2390 triggered by AF trigger.
2392 Only used by AUTO or MACRO AF modes. This is a transient
2393 state, the camera device may skip reporting this state in
2394 capture result.</notes></value>
2395 <value>FOCUSED_LOCKED
2396 <notes>AF believes it is focused correctly and has locked
2399 This state is reached only after an explicit START AF trigger has been
2400 sent (android.control.afTrigger), when good focus has been obtained.
2402 The lens will remain stationary until the AF mode (android.control.afMode) is changed or
2403 a new AF trigger is sent to the camera device (android.control.afTrigger).
2405 <value>NOT_FOCUSED_LOCKED
2406 <notes>AF has failed to focus successfully and has locked
2409 This state is reached only after an explicit START AF trigger has been
2410 sent (android.control.afTrigger), when good focus cannot be obtained.
2412 The lens will remain stationary until the AF mode (android.control.afMode) is changed or
2413 a new AF trigger is sent to the camera device (android.control.afTrigger).
2415 <value>PASSIVE_UNFOCUSED
2416 <notes>AF finished a passive scan without finding focus,
2417 and may restart scanning at any time.
2419 Only used by CONTINUOUS_* AF modes. This is a transient state, the camera
2420 device may skip reporting this state in capture result.
2422 LEGACY camera devices do not support this state. When a passive
2423 scan has finished, it will always go to PASSIVE_FOCUSED.
2426 <description>Current state of auto-focus (AF) algorithm.</description>
2428 Switching between or enabling AF modes (android.control.afMode) always
2429 resets the AF state to INACTIVE. Similarly, switching between android.control.mode,
2430 or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all
2431 the algorithm states to INACTIVE.
2433 The camera device can do several state transitions between two results, if it is
2434 allowed by the state transition table. For example: INACTIVE may never actually be
2437 The state in the result is the state for this image (in sync with this image): if
2438 AF state becomes FOCUSED, then the image data associated with this result should
2441 Below are state transition tables for different AF modes.
2443 When android.control.afMode is AF_MODE_OFF or AF_MODE_EDOF:
2445 State | Transition Cause | New State | Notes
2446 :------------:|:----------------:|:---------:|:-----------:
2447 INACTIVE | | INACTIVE | Never changes
2449 When android.control.afMode is AF_MODE_AUTO or AF_MODE_MACRO:
2451 State | Transition Cause | New State | Notes
2452 :-----------------:|:----------------:|:------------------:|:--------------:
2453 INACTIVE | AF_TRIGGER | ACTIVE_SCAN | Start AF sweep, Lens now moving
2454 ACTIVE_SCAN | AF sweep done | FOCUSED_LOCKED | Focused, Lens now locked
2455 ACTIVE_SCAN | AF sweep done | NOT_FOCUSED_LOCKED | Not focused, Lens now locked
2456 ACTIVE_SCAN | AF_CANCEL | INACTIVE | Cancel/reset AF, Lens now locked
2457 FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Cancel/reset AF
2458 FOCUSED_LOCKED | AF_TRIGGER | ACTIVE_SCAN | Start new sweep, Lens now moving
2459 NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Cancel/reset AF
2460 NOT_FOCUSED_LOCKED | AF_TRIGGER | ACTIVE_SCAN | Start new sweep, Lens now moving
2461 Any state | Mode change | INACTIVE |
2463 For the above table, the camera device may skip reporting any state changes that happen
2464 without application intervention (i.e. mode switch, trigger, locking). Any state that
2465 can be skipped in that manner is called a transient state.
2467 For example, for these AF modes (AF_MODE_AUTO and AF_MODE_MACRO), in addition to the
2468 state transitions listed in above table, it is also legal for the camera device to skip
2469 one or more transient states between two results. See below table for examples:
2471 State | Transition Cause | New State | Notes
2472 :-----------------:|:----------------:|:------------------:|:--------------:
2473 INACTIVE | AF_TRIGGER | FOCUSED_LOCKED | Focus is already good or good after a scan, lens is now locked.
2474 INACTIVE | AF_TRIGGER | NOT_FOCUSED_LOCKED | Focus failed after a scan, lens is now locked.
2475 FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | Focus is already good or good after a scan, lens is now locked.
2476 NOT_FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | Focus is good after a scan, lens is not locked.
2479 When android.control.afMode is AF_MODE_CONTINUOUS_VIDEO:
2481 State | Transition Cause | New State | Notes
2482 :-----------------:|:-----------------------------------:|:------------------:|:--------------:
2483 INACTIVE | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving
2484 INACTIVE | AF_TRIGGER | NOT_FOCUSED_LOCKED | AF state query, Lens now locked
2485 PASSIVE_SCAN | Camera device completes current scan| PASSIVE_FOCUSED | End AF scan, Lens now locked
2486 PASSIVE_SCAN | Camera device fails current scan | PASSIVE_UNFOCUSED | End AF scan, Lens now locked
2487 PASSIVE_SCAN | AF_TRIGGER | FOCUSED_LOCKED | Immediate transition, if focus is good. Lens now locked
2488 PASSIVE_SCAN | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate transition, if focus is bad. Lens now locked
2489 PASSIVE_SCAN | AF_CANCEL | INACTIVE | Reset lens position, Lens now locked
2490 PASSIVE_FOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving
2491 PASSIVE_UNFOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving
2492 PASSIVE_FOCUSED | AF_TRIGGER | FOCUSED_LOCKED | Immediate transition, lens now locked
2493 PASSIVE_UNFOCUSED | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate transition, lens now locked
2494 FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | No effect
2495 FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan
2496 NOT_FOCUSED_LOCKED | AF_TRIGGER | NOT_FOCUSED_LOCKED | No effect
2497 NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan
2499 When android.control.afMode is AF_MODE_CONTINUOUS_PICTURE:
2501 State | Transition Cause | New State | Notes
2502 :-----------------:|:------------------------------------:|:------------------:|:--------------:
2503 INACTIVE | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving
2504 INACTIVE | AF_TRIGGER | NOT_FOCUSED_LOCKED | AF state query, Lens now locked
2505 PASSIVE_SCAN | Camera device completes current scan | PASSIVE_FOCUSED | End AF scan, Lens now locked
2506 PASSIVE_SCAN | Camera device fails current scan | PASSIVE_UNFOCUSED | End AF scan, Lens now locked
2507 PASSIVE_SCAN | AF_TRIGGER | FOCUSED_LOCKED | Eventual transition once the focus is good. Lens now locked
2508 PASSIVE_SCAN | AF_TRIGGER | NOT_FOCUSED_LOCKED | Eventual transition if cannot find focus. Lens now locked
2509 PASSIVE_SCAN | AF_CANCEL | INACTIVE | Reset lens position, Lens now locked
2510 PASSIVE_FOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving
2511 PASSIVE_UNFOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving
2512 PASSIVE_FOCUSED | AF_TRIGGER | FOCUSED_LOCKED | Immediate trans. Lens now locked
2513 PASSIVE_UNFOCUSED | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate trans. Lens now locked
2514 FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | No effect
2515 FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan
2516 NOT_FOCUSED_LOCKED | AF_TRIGGER | NOT_FOCUSED_LOCKED | No effect
2517 NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan
2519 When switch between AF_MODE_CONTINUOUS_* (CAF modes) and AF_MODE_AUTO/AF_MODE_MACRO
2520 (AUTO modes), the initial INACTIVE or PASSIVE_SCAN states may be skipped by the
2521 camera device. When a trigger is included in a mode switch request, the trigger
2522 will be evaluated in the context of the new mode in the request.
2523 See below table for examples:
2525 State | Transition Cause | New State | Notes
2526 :-----------:|:--------------------------------------:|:----------------------------------------:|:--------------:
2527 any state | CAF-->AUTO mode switch | INACTIVE | Mode switch without trigger, initial state must be INACTIVE
2528 any state | CAF-->AUTO mode switch with AF_TRIGGER | trigger-reachable states from INACTIVE | Mode switch with trigger, INACTIVE is skipped
2529 any state | AUTO-->CAF mode switch | passively reachable states from INACTIVE | Mode switch without trigger, passive transient state is skipped
2532 <entry name="afTriggerId" type="int32" visibility="system" deprecated="true">
2533 <description>The ID sent with the latest
2534 CAMERA2_TRIGGER_AUTOFOCUS call</description>
2535 <details>Must be 0 if no CAMERA2_TRIGGER_AUTOFOCUS trigger
2536 received yet by HAL. Always updated even if AF algorithm
2537 ignores the trigger</details>
2539 <clone entry="android.control.awbLock" kind="controls">
2541 <clone entry="android.control.awbMode" kind="controls">
2543 <clone entry="android.control.awbRegions" kind="controls">
2545 <clone entry="android.control.captureIntent" kind="controls">
2547 <entry name="awbState" type="byte" visibility="public" enum="true"
2551 <notes>AWB is not in auto mode, or has not yet started metering.
2553 When a camera device is opened, it starts in this
2554 state. This is a transient state, the camera device may
2555 skip reporting this state in capture
2556 result.</notes></value>
2558 <notes>AWB doesn't yet have a good set of control
2559 values for the current scene.
2561 This is a transient state, the camera device
2562 may skip reporting this state in capture result.</notes></value>
2564 <notes>AWB has a good set of control values for the
2565 current scene.</notes></value>
2567 <notes>AWB has been locked.
2570 <description>Current state of auto-white balance (AWB) algorithm.</description>
2571 <details>Switching between or enabling AWB modes (android.control.awbMode) always
2572 resets the AWB state to INACTIVE. Similarly, switching between android.control.mode,
2573 or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all
2574 the algorithm states to INACTIVE.
2576 The camera device can do several state transitions between two results, if it is
2577 allowed by the state transition table. So INACTIVE may never actually be seen in
2580 The state in the result is the state for this image (in sync with this image): if
2581 AWB state becomes CONVERGED, then the image data associated with this result should
2584 Below are state transition tables for different AWB modes.
2586 When `android.control.awbMode != AWB_MODE_AUTO`:
2588 State | Transition Cause | New State | Notes
2589 :------------:|:----------------:|:---------:|:-----------------------:
2590 INACTIVE | |INACTIVE |Camera device auto white balance algorithm is disabled
2592 When android.control.awbMode is AWB_MODE_AUTO:
2594 State | Transition Cause | New State | Notes
2595 :-------------:|:--------------------------------:|:-------------:|:-----------------:
2596 INACTIVE | Camera device initiates AWB scan | SEARCHING | Values changing
2597 INACTIVE | android.control.awbLock is ON | LOCKED | Values locked
2598 SEARCHING | Camera device finishes AWB scan | CONVERGED | Good values, not changing
2599 SEARCHING | android.control.awbLock is ON | LOCKED | Values locked
2600 CONVERGED | Camera device initiates AWB scan | SEARCHING | Values changing
2601 CONVERGED | android.control.awbLock is ON | LOCKED | Values locked
2602 LOCKED | android.control.awbLock is OFF | SEARCHING | Values not good after unlock
2604 For the above table, the camera device may skip reporting any state changes that happen
2605 without application intervention (i.e. mode switch, trigger, locking). Any state that
2606 can be skipped in that manner is called a transient state.
2608 For example, for this AWB mode (AWB_MODE_AUTO), in addition to the state transitions
2609 listed in above table, it is also legal for the camera device to skip one or more
2610 transient states between two results. See below table for examples:
2612 State | Transition Cause | New State | Notes
2613 :-------------:|:--------------------------------:|:-------------:|:-----------------:
2614 INACTIVE | Camera device finished AWB scan | CONVERGED | Values are already good, transient states are skipped by camera device.
2615 LOCKED | android.control.awbLock is OFF | CONVERGED | Values good after unlock, transient states are skipped by camera device.
2618 <clone entry="android.control.effectMode" kind="controls">
2620 <clone entry="android.control.mode" kind="controls">
2622 <clone entry="android.control.sceneMode" kind="controls">
2624 <clone entry="android.control.videoStabilizationMode" kind="controls">
2628 <entry name="availableHighSpeedVideoConfigurations" type="int32" visibility="hidden"
2629 container="array" typedef="highSpeedVideoConfiguration" hwlevel="limited">
2635 List of available high speed video size, fps range and max batch size configurations
2636 supported by the camera device, in the format of (width, height, fps_min, fps_max, batch_size_max).
2639 For each configuration, the fps_max &gt;= 120fps.
2642 When CONSTRAINED_HIGH_SPEED_VIDEO is supported in android.request.availableCapabilities,
2643 this metadata will list the supported high speed video size, fps range and max batch size
2644 configurations. All the sizes listed in this configuration will be a subset of the sizes
2645 reported by {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes}
2646 for processed non-stalling formats.
2648 For the high speed video use case, the application must
2649 select the video size and fps range from this metadata to configure the recording and
2650 preview streams and setup the recording requests. For example, if the application intends
2651 to do high speed recording, it can select the maximum size reported by this metadata to
2652 configure output streams. Once the size is selected, application can filter this metadata
2653 by selected size and get the supported fps ranges, and use these fps ranges to setup the
2654 recording requests. Note that for the use case of multiple output streams, application
2655 must select one unique size from this metadata to use (e.g., preview and recording streams
2656 must have the same size). Otherwise, the high speed capture session creation will fail.
2658 The min and max fps will be multiple times of 30fps.
2660 High speed video streaming extends significant performance pressue to camera hardware,
2661 to achieve efficient high speed streaming, the camera device may have to aggregate
2662 multiple frames together and send to camera device for processing where the request
2663 controls are same for all the frames in this batch. Max batch size indicates
2664 the max possible number of frames the camera device will group together for this high
2665 speed stream configuration. This max batch size will be used to generate a high speed
2666 recording request list by
2667 {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
2668 The max batch size for each configuration will satisfy below conditions:
2670 * Each max batch size will be a divisor of its corresponding fps_max / 30. For example,
2671 if max_fps is 300, max batch size will only be 1, 2, 5, or 10.
2672 * The camera device may choose smaller internal batch size for each configuration, but
2673 the actual batch size will be a divisor of max batch size. For example, if the max batch
2674 size is 8, the actual batch size used by camera device will only be 1, 2, 4, or 8.
2675 * The max batch size in each configuration entry must be no larger than 32.
2677 The camera device doesn't have to support batch mode to achieve high speed video recording,
2678 in such case, batch_size_max will be reported as 1 in each configuration entry.
2680 This fps ranges in this configuration list can only be used to create requests
2681 that are submitted to a high speed camera capture session created by
2682 {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}.
2683 The fps ranges reported in this metadata must not be used to setup capture requests for
2684 normal capture session, or it will cause request error.
2687 All the sizes listed in this configuration will be a subset of the sizes reported by
2688 android.scaler.availableStreamConfigurations for processed non-stalling output formats.
2689 Note that for all high speed video configurations, HAL must be able to support a minimum
2690 of two streams, though the application might choose to configure just one stream.
2692 The HAL may support multiple sensor modes for high speed outputs, for example, 120fps
2693 sensor mode and 120fps recording, 240fps sensor mode for 240fps recording. The application
2694 usually starts preview first, then starts recording. To avoid sensor mode switch caused
2695 stutter when starting recording as much as possible, the application may want to ensure
2696 the same sensor mode is used for preview and recording. Therefore, The HAL must advertise
2697 the variable fps range [30, fps_max] for each fixed fps range in this configuration list.
2698 For example, if the HAL advertises [120, 120] and [240, 240], the HAL must also advertise
2699 [30, 120] and [30, 240] for each configuration. In doing so, if the application intends to
2700 do 120fps recording, it can select [30, 120] to start preview, and [120, 120] to start
2701 recording. For these variable fps ranges, it's up to the HAL to decide the actual fps
2702 values that are suitable for smooth preview streaming. If the HAL sees different max_fps
2703 values that fall into different sensor modes in a sequence of requests, the HAL must
2704 switch the sensor mode as quick as possible to minimize the mode switch caused stutter.
2708 <entry name="aeLockAvailable" type="byte" visibility="public" enum="true"
2709 typedef="boolean" hwlevel="legacy">
2711 <value>FALSE</value>
2714 <description>Whether the camera device supports android.control.aeLock</description>
2716 Devices with MANUAL_SENSOR capability or BURST_CAPTURE capability will always
2717 list `true`. This includes FULL devices.
2721 <entry name="awbLockAvailable" type="byte" visibility="public" enum="true"
2722 typedef="boolean" hwlevel="legacy">
2724 <value>FALSE</value>
2727 <description>Whether the camera device supports android.control.awbLock</description>
2729 Devices with MANUAL_POST_PROCESSING capability or BURST_CAPTURE capability will
2730 always list `true`. This includes FULL devices.
2734 <entry name="availableModes" type="byte" visibility="public"
2735 type_notes="List of enums (android.control.mode)." container="array"
2736 typedef="enumList" hwlevel="legacy">
2741 List of control modes for android.control.mode that are supported by this camera
2744 <range>Any value listed in android.control.mode</range>
2746 This list contains control modes that can be set for the camera device.
2747 LEGACY mode devices will always support AUTO mode. LIMITED and FULL
2748 devices will always support OFF, AUTO modes.
2751 <entry name="postRawSensitivityBoostRange" type="int32" visibility="public"
2752 type_notes="Range of supported post RAW sensitivitiy boosts"
2753 container="array" typedef="rangeInt">
2757 <description>Range of boosts for android.control.postRawSensitivityBoost supported
2758 by this camera device.
2760 <units>ISO arithmetic units, the same as android.sensor.sensitivity</units>
2762 Devices support post RAW sensitivity boost will advertise
2763 android.control.postRawSensitivityBoost key for controling
2764 post RAW sensitivity boost.
2766 This key will be `null` for devices that do not support any RAW format
2767 outputs. For devices that do support RAW format outputs, this key will always
2768 present, and if a device does not support post RAW sensitivity boost, it will
2769 list `(100, 100)` in this key.
2772 This key is added in legacy HAL3.4. For legacy HAL3.3 or earlier devices, camera
2773 framework will generate this key as `(100, 100)` if device supports any of RAW output
2774 formats. All legacy HAL3.4 and above devices should list this key if device supports
2775 any of RAW output formats.
2780 <entry name="postRawSensitivityBoost" type="int32" visibility="public">
2781 <description>The amount of additional sensitivity boost applied to output images
2782 after RAW sensor data is captured.
2784 <units>ISO arithmetic units, the same as android.sensor.sensitivity</units>
2785 <range>android.control.postRawSensitivityBoostRange</range>
2787 Some camera devices support additional digital sensitivity boosting in the
2788 camera processing pipeline after sensor RAW image is captured.
2789 Such a boost will be applied to YUV/JPEG format output images but will not
2790 have effect on RAW output formats like RAW_SENSOR, RAW10, RAW12 or RAW_OPAQUE.
2792 This key will be `null` for devices that do not support any RAW format
2793 outputs. For devices that do support RAW format outputs, this key will always
2794 present, and if a device does not support post RAW sensitivity boost, it will
2795 list `100` in this key.
2797 If the camera device cannot apply the exact boost requested, it will reduce the
2798 boost to the nearest supported value.
2799 The final boost value used will be available in the output capture result.
2801 For devices that support post RAW sensitivity boost, the YUV/JPEG output images
2802 of such device will have the total sensitivity of
2803 `android.sensor.sensitivity * android.control.postRawSensitivityBoost / 100`
2804 The sensitivity of RAW format images will always be `android.sensor.sensitivity`
2806 This control is only effective if android.control.aeMode or android.control.mode is set to
2807 OFF; otherwise the auto-exposure algorithm will override this value.
2812 <clone entry="android.control.postRawSensitivityBoost" kind="controls">
2816 <entry name="enableZsl" type="byte" visibility="public" enum="true" typedef="boolean">
2819 <notes>Requests with android.control.captureIntent == STILL_CAPTURE must be captured
2820 after previous requests.</notes></value>
2822 <notes>Requests with android.control.captureIntent == STILL_CAPTURE may or may not be
2823 captured before previous requests.</notes></value>
2825 <description>Allow camera device to enable zero-shutter-lag mode for requests with
2826 android.control.captureIntent == STILL_CAPTURE.
2829 If enableZsl is `true`, the camera device may enable zero-shutter-lag mode for requests with
2830 STILL_CAPTURE capture intent. The camera device may use images captured in the past to
2831 produce output images for a zero-shutter-lag request. The result metadata including the
2832 android.sensor.timestamp reflects the source frames used to produce output images.
2833 Therefore, the contents of the output images and the result metadata may be out of order
2834 compared to previous regular requests. enableZsl does not affect requests with other
2837 For example, when requests are submitted in the following order:
2838 Request A: enableZsl is ON, android.control.captureIntent is PREVIEW
2839 Request B: enableZsl is ON, android.control.captureIntent is STILL_CAPTURE
2841 The output images for request B may have contents captured before the output images for
2842 request A, and the result metadata for request B may be older than the result metadata for
2845 Note that when enableZsl is `true`, it is not guaranteed to get output images captured in
2846 the past for requests with STILL_CAPTURE capture intent.
2848 For applications targeting SDK versions O and newer, the value of enableZsl in
2849 TEMPLATE_STILL_CAPTURE template may be `true`. The value in other templates is always
2852 For applications targeting SDK versions older than O, the value of enableZsl in all
2853 capture templates is always `false` if present.
2855 For application-operated ZSL, use CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.
2858 It is valid for HAL to produce regular output images for requests with STILL_CAPTURE
2864 <clone entry="android.control.enableZsl" kind="controls">
2866 <entry name="afSceneChange" type="int32" visibility="public" enum="true" hal_version="3.3">
2869 <notes>Scene change is not detected within the AF region(s).</notes></value>
2871 <notes>Scene change is detected within the AF region(s).</notes></value>
2873 <description>Whether a significant scene change is detected within the currently-set AF
2874 region(s).</description>
2875 <details>When the camera focus routine detects a change in the scene it is looking at,
2876 such as a large shift in camera viewpoint, significant motion in the scene, or a
2877 significant illumination change, this value will be set to DETECTED for a single capture
2878 result. Otherwise the value will be NOT_DETECTED. The threshold for detection is similar
2879 to what would trigger a new passive focus scan to begin in CONTINUOUS autofocus modes.
2881 afSceneChange may be DETECTED only if afMode is AF_MODE_CONTINUOUS_VIDEO or
2882 AF_MODE_CONTINUOUS_PICTURE. In other AF modes, afSceneChange must be NOT_DETECTED.
2884 This key will be available if the camera device advertises this key via {@link
2885 android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys|ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}.
2890 <section name="demosaic">
2892 <entry name="mode" type="byte" enum="true">
2895 <notes>Minimal or no slowdown of frame rate compared to
2896 Bayer RAW output.</notes></value>
2898 <notes>Improved processing quality but the frame rate might be slowed down
2899 relative to raw output.</notes></value>
2901 <description>Controls the quality of the demosaicing
2902 processing.</description>
2907 <section name="edge">
2909 <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
2912 <notes>No edge enhancement is applied.</notes></value>
2914 <notes>Apply edge enhancement at a quality level that does not slow down frame rate
2915 relative to sensor output. It may be the same as OFF if edge enhancement will
2916 slow down frame rate relative to sensor.</notes></value>
2918 <notes>Apply high-quality edge enhancement, at a cost of possibly reduced output frame rate.
2920 <value optional="true">ZERO_SHUTTER_LAG <notes>Edge enhancement is applied at different
2921 levels for different output streams, based on resolution. Streams at maximum recording
2922 resolution (see {@link
2923 android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession})
2924 or below have edge enhancement applied, while higher-resolution streams have no edge
2925 enhancement applied. The level of edge enhancement for low-resolution streams is tuned
2926 so that frame rate is not impacted, and the quality is equal to or better than FAST
2927 (since it is only applied to lower-resolution outputs, quality may improve from FAST).
2929 This mode is intended to be used by applications operating in a zero-shutter-lag mode
2930 with YUV or PRIVATE reprocessing, where the application continuously captures
2931 high-resolution intermediate buffers into a circular buffer, from which a final image is
2932 produced via reprocessing when a user takes a picture. For such a use case, the
2933 high-resolution buffers must not have edge enhancement applied to maximize efficiency of
2934 preview and to avoid double-applying enhancement when reprocessed, while low-resolution
2935 buffers (used for recording or preview, generally) need edge enhancement applied for
2936 reasonable preview quality.
2938 This mode is guaranteed to be supported by devices that support either the
2939 YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities
2940 (android.request.availableCapabilities lists either of those capabilities) and it will
2941 be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.
2944 <description>Operation mode for edge
2945 enhancement.</description>
2946 <range>android.edge.availableEdgeModes</range>
2947 <details>Edge enhancement improves sharpness and details in the captured image. OFF means
2948 no enhancement will be applied by the camera device.
2950 FAST/HIGH_QUALITY both mean camera device determined enhancement
2951 will be applied. HIGH_QUALITY mode indicates that the
2952 camera device will use the highest-quality enhancement algorithms,
2953 even if it slows down capture rate. FAST means the camera device will
2954 not slow down capture rate when applying edge enhancement. FAST may be the same as OFF if
2955 edge enhancement will slow down capture rate. Every output stream will have a similar
2956 amount of enhancement applied.
2958 ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
2959 buffer of high-resolution images during preview and reprocess image(s) from that buffer
2960 into a final capture when triggered by the user. In this mode, the camera device applies
2961 edge enhancement to low-resolution streams (below maximum recording resolution) to
2962 maximize preview quality, but does not apply edge enhancement to high-resolution streams,
2963 since those will be reprocessed later if necessary.
2965 For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera
2966 device will apply FAST/HIGH_QUALITY YUV-domain edge enhancement, respectively.
2967 The camera device may adjust its internal edge enhancement parameters for best
2968 image quality based on the android.reprocess.effectiveExposureFactor, if it is set.
2971 For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to
2972 adjust the internal edge enhancement reduction parameters appropriately to get the best
2978 <entry name="strength" type="byte">
2979 <description>Control the amount of edge enhancement
2980 applied to the images</description>
2981 <units>1-10; 10 is maximum sharpening</units>
2986 <entry name="availableEdgeModes" type="byte" visibility="public"
2987 type_notes="list of enums" container="array" typedef="enumList"
2993 List of edge enhancement modes for android.edge.mode that are supported by this camera
2996 <range>Any value listed in android.edge.mode</range>
2998 Full-capability camera devices must always support OFF; camera devices that support
2999 YUV_REPROCESSING or PRIVATE_REPROCESSING will list ZERO_SHUTTER_LAG; all devices will
3003 HAL must support both FAST and HIGH_QUALITY if edge enhancement control is available
3004 on the camera device, but the underlying implementation can be the same for both modes.
3005 That is, if the highest quality implementation on the camera device does not slow down
3006 capture rate, then FAST and HIGH_QUALITY will generate the same output.
3013 <clone entry="android.edge.mode" kind="controls">
3019 <section name="flash">
3021 <entry name="firingPower" type="byte">
3022 <description>Power for flash firing/torch</description>
3023 <units>10 is max power; 0 is no flash. Linear</units>
3024 <range>0 - 10</range>
3025 <details>Power for snapshot may use a different scale than
3026 for torch mode. Only one entry for torch mode will be
3030 <entry name="firingTime" type="int64">
3031 <description>Firing time of flash relative to start of
3032 exposure</description>
3033 <units>nanoseconds</units>
3034 <range>0-(exposure time-flash duration)</range>
3035 <details>Clamped to (0, exposure time - flash
3036 duration).</details>
3039 <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="legacy">
3043 Do not fire the flash for this capture.
3048 If the flash is available and charged, fire flash
3054 Transition flash to continuously on.
3058 <description>The desired mode for for the camera device's flash control.</description>
3060 This control is only effective when flash unit is available
3061 (`android.flash.info.available == true`).
3063 When this control is used, the android.control.aeMode must be set to ON or OFF.
3064 Otherwise, the camera device auto-exposure related flash control (ON_AUTO_FLASH,
3065 ON_ALWAYS_FLASH, or ON_AUTO_FLASH_REDEYE) will override this control.
3067 When set to OFF, the camera device will not fire flash for this capture.
3069 When set to SINGLE, the camera device will fire flash regardless of the camera
3070 device's auto-exposure routine's result. When used in still capture case, this
3071 control should be used along with auto-exposure (AE) precapture metering sequence
3072 (android.control.aePrecaptureTrigger), otherwise, the image may be incorrectly exposed.
3074 When set to TORCH, the flash will be on continuously. This mode can be used
3075 for use cases such as preview, auto-focus assist, still capture, or video recording.
3077 The flash status will be reported by android.flash.state in the capture result metadata.
3083 <namespace name="info">
3084 <entry name="available" type="byte" visibility="public" enum="true"
3085 typedef="boolean" hwlevel="legacy">
3087 <value>FALSE</value>
3090 <description>Whether this camera device has a
3091 flash unit.</description>
3093 Will be `false` if no flash is available.
3095 If there is no flash unit, none of the flash controls do
3099 <entry name="chargeDuration" type="int64">
3100 <description>Time taken before flash can fire
3102 <units>nanoseconds</units>
3103 <range>0-1e9</range>
3104 <details>1 second too long/too short for recharge? Should
3105 this be power-dependent?</details>
3109 <entry name="colorTemperature" type="byte">
3110 <description>The x,y whitepoint of the
3112 <units>pair of floats</units>
3113 <range>0-1 for both</range>
3116 <entry name="maxEnergy" type="byte">
3117 <description>Max energy output of the flash for a full
3118 power single flash</description>
3119 <units>lumen-seconds</units>
3120 <range>&gt;= 0</range>
3125 <clone entry="android.flash.firingPower" kind="controls">
3127 <clone entry="android.flash.firingTime" kind="controls">
3129 <clone entry="android.flash.mode" kind="controls"></clone>
3130 <entry name="state" type="byte" visibility="public" enum="true"
3134 <notes>No flash on camera.</notes></value>
3136 <notes>Flash is charging and cannot be fired.</notes></value>
3138 <notes>Flash is ready to fire.</notes></value>
3140 <notes>Flash fired for this capture.</notes></value>
3142 <notes>Flash partially illuminated this frame.
3144 This is usually due to the next or previous frame having
3145 the flash fire, and the flash spilling into this capture
3146 due to hardware limitations.</notes></value>
3148 <description>Current state of the flash
3151 When the camera device doesn't have flash unit
3152 (i.e. `android.flash.info.available == false`), this state will always be UNAVAILABLE.
3153 Other states indicate the current flash status.
3155 In certain conditions, this will be available on LEGACY devices:
3157 * Flash-less cameras always return UNAVAILABLE.
3158 * Using android.control.aeMode `==` ON_ALWAYS_FLASH
3159 will always return FIRED.
3160 * Using android.flash.mode `==` TORCH
3161 will always return FIRED.
3163 In all other conditions the state will not be available on
3164 LEGACY devices (i.e. it will be `null`).
3169 <section name="hotPixel">
3171 <entry name="mode" type="byte" visibility="public" enum="true">
3175 No hot pixel correction is applied.
3177 The frame rate must not be reduced relative to sensor raw output
3180 The hotpixel map may be returned in android.statistics.hotPixelMap.
3185 Hot pixel correction is applied, without reducing frame
3186 rate relative to sensor raw output.
3188 The hotpixel map may be returned in android.statistics.hotPixelMap.
3193 High-quality hot pixel correction is applied, at a cost
3194 of possibly reduced frame rate relative to sensor raw output.
3196 The hotpixel map may be returned in android.statistics.hotPixelMap.
3201 Operational mode for hot pixel correction.
3203 <range>android.hotPixel.availableHotPixelModes</range>
3205 Hotpixel correction interpolates out, or otherwise removes, pixels
3206 that do not accurately measure the incoming light (i.e. pixels that
3207 are stuck at an arbitrary value or are oversensitive).
3214 <entry name="availableHotPixelModes" type="byte" visibility="public"
3215 type_notes="list of enums" container="array" typedef="enumList">
3220 List of hot pixel correction modes for android.hotPixel.mode that are supported by this
3223 <range>Any value listed in android.hotPixel.mode</range>
3225 FULL mode camera devices will always support FAST.
3228 To avoid performance issues, there will be significantly fewer hot
3229 pixels than actual pixels on the camera sensor.
3230 HAL must support both FAST and HIGH_QUALITY if hot pixel correction control is available
3231 on the camera device, but the underlying implementation can be the same for both modes.
3232 That is, if the highest quality implementation on the camera device does not slow down
3233 capture rate, then FAST and HIGH_QUALITY will generate the same output.
3240 <clone entry="android.hotPixel.mode" kind="controls">
3246 <section name="jpeg">
3248 <entry name="gpsLocation" type="byte" visibility="java_public" synthetic="true"
3249 typedef="location" hwlevel="legacy">
3251 A location object to use when generating image GPS metadata.
3254 Setting a location object in a request will include the GPS coordinates of the location
3255 into any JPEG images captured based on the request. These coordinates can then be
3256 viewed by anyone who receives the JPEG image.
3259 <entry name="gpsCoordinates" type="double" visibility="ndk_public"
3260 type_notes="latitude, longitude, altitude. First two in degrees, the third in meters"
3261 container="array" hwlevel="legacy">
3265 <description>GPS coordinates to include in output JPEG
3267 <range>(-180 - 180], [-90,90], [-inf, inf]</range>
3270 <entry name="gpsProcessingMethod" type="byte" visibility="ndk_public"
3271 typedef="string" hwlevel="legacy">
3272 <description>32 characters describing GPS algorithm to
3273 include in EXIF.</description>
3274 <units>UTF-8 null-terminated string</units>
3277 <entry name="gpsTimestamp" type="int64" visibility="ndk_public" hwlevel="legacy">
3278 <description>Time GPS fix was made to include in
3280 <units>UTC in seconds since January 1, 1970</units>
3283 <entry name="orientation" type="int32" visibility="public" hwlevel="legacy">
3284 <description>The orientation for a JPEG image.</description>
3285 <units>Degrees in multiples of 90</units>
3286 <range>0, 90, 180, 270</range>
3288 The clockwise rotation angle in degrees, relative to the orientation
3289 to the camera, that the JPEG picture needs to be rotated by, to be viewed
3292 Camera devices may either encode this value into the JPEG EXIF header, or
3293 rotate the image data to match this orientation. When the image data is rotated,
3294 the thumbnail data will also be rotated.
3296 Note that this orientation is relative to the orientation of the camera sensor, given
3297 by android.sensor.orientation.
3299 To translate from the device orientation given by the Android sensor APIs, the following
3300 sample code may be used:
3302 private int getJpegOrientation(CameraCharacteristics c, int deviceOrientation) {
3303 if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0;
3304 int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
3306 // Round device orientation to a multiple of 90
3307 deviceOrientation = (deviceOrientation + 45) / 90 * 90;
3309 // Reverse device orientation for front-facing cameras
3310 boolean facingFront = c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT;
3311 if (facingFront) deviceOrientation = -deviceOrientation;
3313 // Calculate desired JPEG orientation relative to camera orientation to make
3314 // the image upright relative to the device orientation
3315 int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
3317 return jpegOrientation;
3322 <entry name="quality" type="byte" visibility="public" hwlevel="legacy">
3323 <description>Compression quality of the final JPEG
3324 image.</description>
3325 <range>1-100; larger is higher quality</range>
3326 <details>85-95 is typical usage range.</details>
3329 <entry name="thumbnailQuality" type="byte" visibility="public" hwlevel="legacy">
3330 <description>Compression quality of JPEG
3331 thumbnail.</description>
3332 <range>1-100; larger is higher quality</range>
3335 <entry name="thumbnailSize" type="int32" visibility="public"
3336 container="array" typedef="size" hwlevel="legacy">
3340 <description>Resolution of embedded JPEG thumbnail.</description>
3341 <range>android.jpeg.availableThumbnailSizes</range>
3342 <details>When set to (0, 0) value, the JPEG EXIF will not contain thumbnail,
3343 but the captured JPEG will still be a valid image.
3345 For best results, when issuing a request for a JPEG image, the thumbnail size selected
3346 should have the same aspect ratio as the main JPEG output.
3348 If the thumbnail image aspect ratio differs from the JPEG primary image aspect
3349 ratio, the camera device creates the thumbnail by cropping it from the primary image.
3350 For example, if the primary image has 4:3 aspect ratio, the thumbnail image has
3351 16:9 aspect ratio, the primary image will be cropped vertically (letterbox) to
3352 generate the thumbnail image. The thumbnail image will always have a smaller Field
3353 Of View (FOV) than the primary image when aspect ratios differ.
3355 When an android.jpeg.orientation of non-zero degree is requested,
3356 the camera device will handle thumbnail rotation in one of the following ways:
3358 * Set the {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}
3359 and keep jpeg and thumbnail image data unrotated.
3360 * Rotate the jpeg and thumbnail image data and not set
3361 {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}. In this
3362 case, LIMITED or FULL hardware level devices will report rotated thumnail size in
3363 capture result, so the width and height will be interchanged if 90 or 270 degree
3364 orientation is requested. LEGACY device will always report unrotated thumbnail
3368 The HAL must not squeeze or stretch the downscaled primary image to generate thumbnail.
3369 The cropping must be done on the primary jpeg image rather than the sensor active array.
3370 The stream cropping rule specified by "S5. Cropping" in camera3.h doesn't apply to the
3371 thumbnail image cropping.
3377 <entry name="availableThumbnailSizes" type="int32" visibility="public"
3378 container="array" typedef="size" hwlevel="legacy">
3383 <description>List of JPEG thumbnail sizes for android.jpeg.thumbnailSize supported by this
3384 camera device.</description>
3386 This list will include at least one non-zero resolution, plus `(0,0)` for indicating no
3387 thumbnail should be generated.
3389 Below condiditions will be satisfied for this size list:
3391 * The sizes will be sorted by increasing pixel area (width x height).
3392 If several resolutions have the same area, they will be sorted by increasing width.
3393 * The aspect ratio of the largest thumbnail size will be same as the
3394 aspect ratio of largest JPEG output size in android.scaler.availableStreamConfigurations.
3395 The largest size is defined as the size that has the largest pixel area
3396 in a given size list.
3397 * Each output JPEG size in android.scaler.availableStreamConfigurations will have at least
3398 one corresponding size that has the same aspect ratio in availableThumbnailSizes,
3400 * All non-`(0, 0)` sizes will have non-zero widths and heights.</details>
3403 <entry name="maxSize" type="int32" visibility="system">
3404 <description>Maximum size in bytes for the compressed
3405 JPEG buffer</description>
3406 <range>Must be large enough to fit any JPEG produced by
3408 <details>This is used for sizing the gralloc buffers for
3413 <clone entry="android.jpeg.gpsLocation" kind="controls">
3415 <clone entry="android.jpeg.gpsCoordinates" kind="controls">
3417 <clone entry="android.jpeg.gpsProcessingMethod"
3418 kind="controls"></clone>
3419 <clone entry="android.jpeg.gpsTimestamp" kind="controls">
3421 <clone entry="android.jpeg.orientation" kind="controls">
3423 <clone entry="android.jpeg.quality" kind="controls">
3425 <entry name="size" type="int32">
3426 <description>The size of the compressed JPEG image, in
3428 <range>&gt;= 0</range>
3429 <details>If no JPEG output is produced for the request,
3432 Otherwise, this describes the real size of the compressed
3433 JPEG image placed in the output stream. More specifically,
3434 if android.jpeg.maxSize = 1000000, and a specific capture
3435 has android.jpeg.size = 500000, then the output buffer from
3436 the JPEG stream will be 1000000 bytes, of which the first
3437 500000 make up the real data.</details>
3440 <clone entry="android.jpeg.thumbnailQuality"
3441 kind="controls"></clone>
3442 <clone entry="android.jpeg.thumbnailSize" kind="controls">
3446 <section name="lens">
3448 <entry name="aperture" type="float" visibility="public" hwlevel="full">
3449 <description>The desired lens aperture size, as a ratio of lens focal length to the
3450 effective aperture diameter.</description>
3451 <units>The f-number (f/N)</units>
3452 <range>android.lens.info.availableApertures</range>
3453 <details>Setting this value is only supported on the camera devices that have a variable
3456 When this is supported and android.control.aeMode is OFF,
3457 this can be set along with android.sensor.exposureTime,
3458 android.sensor.sensitivity, and android.sensor.frameDuration
3459 to achieve manual exposure control.
3461 The requested aperture value may take several frames to reach the
3462 requested value; the camera device will report the current (intermediate)
3463 aperture size in capture result metadata while the aperture is changing.
3464 While the aperture is still changing, android.lens.state will be set to MOVING.
3466 When this is supported and android.control.aeMode is one of
3467 the ON modes, this will be overridden by the camera device
3468 auto-exposure algorithm, the overridden values are then provided
3469 back to the user in the corresponding result.</details>
3472 <entry name="filterDensity" type="float" visibility="public" hwlevel="full">
3474 The desired setting for the lens neutral density filter(s).
3476 <units>Exposure Value (EV)</units>
3477 <range>android.lens.info.availableFilterDensities</range>
3479 This control will not be supported on most camera devices.
3481 Lens filters are typically used to lower the amount of light the
3482 sensor is exposed to (measured in steps of EV). As used here, an EV
3483 step is the standard logarithmic representation, which are
3484 non-negative, and inversely proportional to the amount of light
3485 hitting the sensor. For example, setting this to 0 would result
3486 in no reduction of the incoming light, and setting this to 2 would
3487 mean that the filter is set to reduce incoming light by two stops
3488 (allowing 1/4 of the prior amount of light to the sensor).
3490 It may take several frames before the lens filter density changes
3491 to the requested value. While the filter density is still changing,
3492 android.lens.state will be set to MOVING.
3496 <entry name="focalLength" type="float" visibility="public" hwlevel="legacy">
3498 The desired lens focal length; used for optical zoom.
3500 <units>Millimeters</units>
3501 <range>android.lens.info.availableFocalLengths</range>
3503 This setting controls the physical focal length of the camera
3504 device's lens. Changing the focal length changes the field of
3505 view of the camera device, and is usually used for optical zoom.
3507 Like android.lens.focusDistance and android.lens.aperture, this
3508 setting won't be applied instantaneously, and it may take several
3509 frames before the lens can change to the requested focal length.
3510 While the focal length is still changing, android.lens.state will
3513 Optical zoom will not be supported on most devices.
3517 <entry name="focusDistance" type="float" visibility="public" hwlevel="full">
3518 <description>Desired distance to plane of sharpest focus,
3519 measured from frontmost surface of the lens.</description>
3520 <units>See android.lens.info.focusDistanceCalibration for details</units>
3521 <range>&gt;= 0</range>
3523 This control can be used for setting manual focus, on devices that support
3524 the MANUAL_SENSOR capability and have a variable-focus lens (see
3525 android.lens.info.minimumFocusDistance).
3527 A value of `0.0f` means infinity focus. The value set will be clamped to
3528 `[0.0f, android.lens.info.minimumFocusDistance]`.
3530 Like android.lens.focalLength, this setting won't be applied
3531 instantaneously, and it may take several frames before the lens
3532 can move to the requested focus distance. While the lens is still moving,
3533 android.lens.state will be set to MOVING.
3535 LEGACY devices support at most setting this to `0.0f`
3541 <entry name="opticalStabilizationMode" type="byte" visibility="public"
3542 enum="true" hwlevel="limited">
3545 <notes>Optical stabilization is unavailable.</notes>
3547 <value optional="true">ON
3548 <notes>Optical stabilization is enabled.</notes>
3552 Sets whether the camera device uses optical image stabilization (OIS)
3553 when capturing images.
3555 <range>android.lens.info.availableOpticalStabilization</range>
3557 OIS is used to compensate for motion blur due to small
3558 movements of the camera during capture. Unlike digital image
3559 stabilization (android.control.videoStabilizationMode), OIS
3560 makes use of mechanical elements to stabilize the camera
3561 sensor, and thus allows for longer exposure times before
3562 camera shake becomes apparent.
3564 Switching between different optical stabilization modes may take several
3565 frames to initialize, the camera device will report the current mode in
3566 capture result metadata. For example, When "ON" mode is requested, the
3567 optical stabilization modes in the first several capture results may still
3568 be "OFF", and it will become "ON" when the initialization is done.
3570 If a camera device supports both OIS and digital image stabilization
3571 (android.control.videoStabilizationMode), turning both modes on may produce undesirable
3572 interaction, so it is recommended not to enable both at the same time.
3574 Not all devices will support OIS; see
3575 android.lens.info.availableOpticalStabilization for
3582 <namespace name="info">
3583 <entry name="availableApertures" type="float" visibility="public"
3584 container="array" hwlevel="full">
3588 <description>List of aperture size values for android.lens.aperture that are
3589 supported by this camera device.</description>
3590 <units>The aperture f-number</units>
3591 <details>If the camera device doesn't support a variable lens aperture,
3592 this list will contain only one value, which is the fixed aperture size.
3594 If the camera device supports a variable aperture, the aperture values
3595 in this list will be sorted in ascending order.</details>
3598 <entry name="availableFilterDensities" type="float" visibility="public"
3599 container="array" hwlevel="full">
3604 List of neutral density filter values for
3605 android.lens.filterDensity that are supported by this camera device.
3607 <units>Exposure value (EV)</units>
3609 Values are &gt;= 0
3612 If a neutral density filter is not supported by this camera device,
3613 this list will contain only 0. Otherwise, this list will include every
3614 filter density supported by the camera device, in ascending order.
3618 <entry name="availableFocalLengths" type="float" visibility="public"
3619 type_notes="The list of available focal lengths"
3620 container="array" hwlevel="legacy">
3625 List of focal lengths for android.lens.focalLength that are supported by this camera
3628 <units>Millimeters</units>
3630 Values are &gt; 0
3633 If optical zoom is not supported, this list will only contain
3634 a single value corresponding to the fixed focal length of the
3635 device. Otherwise, this list will include every focal length supported
3636 by the camera device, in ascending order.
3641 <entry name="availableOpticalStabilization" type="byte"
3642 visibility="public" type_notes="list of enums" container="array"
3643 typedef="enumList" hwlevel="limited">
3648 List of optical image stabilization (OIS) modes for
3649 android.lens.opticalStabilizationMode that are supported by this camera device.
3651 <range>Any value listed in android.lens.opticalStabilizationMode</range>
3653 If OIS is not supported by a given camera device, this list will
3658 <entry name="hyperfocalDistance" type="float" visibility="public" optional="true"
3660 <description>Hyperfocal distance for this lens.</description>
3661 <units>See android.lens.info.focusDistanceCalibration for details</units>
3662 <range>If lens is fixed focus, &gt;= 0. If lens has focuser unit, the value is
3663 within `(0.0f, android.lens.info.minimumFocusDistance]`</range>
3665 If the lens is not fixed focus, the camera device will report this
3666 field when android.lens.info.focusDistanceCalibration is APPROXIMATE or CALIBRATED.
3669 <entry name="minimumFocusDistance" type="float" visibility="public" optional="true"
3671 <description>Shortest distance from frontmost surface
3672 of the lens that can be brought into sharp focus.</description>
3673 <units>See android.lens.info.focusDistanceCalibration for details</units>
3674 <range>&gt;= 0</range>
3675 <details>If the lens is fixed-focus, this will be
3677 <hal_details>Mandatory for FULL devices; LIMITED devices
3678 must always set this value to 0 for fixed-focus; and may omit
3679 the minimum focus distance otherwise.
3681 This field is also mandatory for all devices advertising
3682 the MANUAL_SENSOR capability.</hal_details>
3685 <entry name="shadingMapSize" type="int32" visibility="ndk_public"
3686 type_notes="width and height (N, M) of lens shading map provided by the camera device."
3687 container="array" typedef="size" hwlevel="full">
3691 <description>Dimensions of lens shading map.</description>
3692 <range>Both values &gt;= 1</range>
3694 The map should be on the order of 30-40 rows and columns, and
3695 must be smaller than 64x64.
3699 <entry name="focusDistanceCalibration" type="byte" visibility="public"
3700 enum="true" hwlevel="limited">
3704 The lens focus distance is not accurate, and the units used for
3705 android.lens.focusDistance do not correspond to any physical units.
3707 Setting the lens to the same focus distance on separate occasions may
3708 result in a different real focus distance, depending on factors such
3709 as the orientation of the device, the age of the focusing mechanism,
3710 and the device temperature. The focus distance value will still be
3711 in the range of `[0, android.lens.info.minimumFocusDistance]`, where 0
3712 represents the farthest focus.
3717 The lens focus distance is measured in diopters.
3719 However, setting the lens to the same focus distance
3720 on separate occasions may result in a different real
3721 focus distance, depending on factors such as the
3722 orientation of the device, the age of the focusing
3723 mechanism, and the device temperature.
3728 The lens focus distance is measured in diopters, and
3731 The lens mechanism is calibrated so that setting the
3732 same focus distance is repeatable on multiple
3733 occasions with good accuracy, and the focus distance
3734 corresponds to the real physical distance to the plane
3739 <description>The lens focus distance calibration quality.</description>
3741 The lens focus distance calibration quality determines the reliability of
3742 focus related metadata entries, i.e. android.lens.focusDistance,
3743 android.lens.focusRange, android.lens.info.hyperfocalDistance, and
3744 android.lens.info.minimumFocusDistance.
3746 APPROXIMATE and CALIBRATED devices report the focus metadata in
3747 units of diopters (1/meter), so `0.0f` represents focusing at infinity,
3748 and increasing positive numbers represent focusing closer and closer
3749 to the camera device. The focus distance control also uses diopters
3752 UNCALIBRATED devices do not use units that are directly comparable
3753 to any real physical measurement, but `0.0f` still represents farthest
3754 focus, and android.lens.info.minimumFocusDistance represents the
3755 nearest focus the device can achieve.
3758 For devices advertise APPROXIMATE quality or higher, diopters 0 (infinity
3759 focus) must work. When autofocus is disabled (android.control.afMode == OFF)
3760 and the lens focus distance is set to 0 diopters
3761 (android.lens.focusDistance == 0), the lens will move to focus at infinity
3762 and is stably focused at infinity even if the device tilts. It may take the
3763 lens some time to move; during the move the lens state should be MOVING and
3764 the output diopter value should be changing toward 0.
3769 <entry name="facing" type="byte" visibility="public" enum="true" hwlevel="legacy">
3773 The camera device faces the same direction as the device's screen.
3777 The camera device faces the opposite direction as the device's screen.
3781 The camera device is an external camera, and has no fixed facing relative to the
3785 <description>Direction the camera faces relative to
3786 device screen.</description>
3788 <entry name="poseRotation" type="float" visibility="public"
3794 The orientation of the camera relative to the sensor
3798 Quaternion coefficients
3801 The four coefficients that describe the quaternion
3802 rotation from the Android sensor coordinate system to a
3803 camera-aligned coordinate system where the X-axis is
3804 aligned with the long side of the image sensor, the Y-axis
3805 is aligned with the short side of the image sensor, and
3806 the Z-axis is aligned with the optical axis of the sensor.
3808 To convert from the quaternion coefficients `(x,y,z,w)`
3809 to the axis of rotation `(a_x, a_y, a_z)` and rotation
3810 amount `theta`, the following formulas can be used:
3813 a_x = x / sin(theta/2)
3814 a_y = y / sin(theta/2)
3815 a_z = z / sin(theta/2)
3817 To create a 3x3 rotation matrix that applies the rotation
3818 defined by this quaternion, the following matrix can be
3821 R = [ 1 - 2y^2 - 2z^2, 2xy - 2zw, 2xz + 2yw,
3822 2xy + 2zw, 1 - 2x^2 - 2z^2, 2yz - 2xw,
3823 2xz - 2yw, 2yz + 2xw, 1 - 2x^2 - 2y^2 ]
3825 This matrix can then be used to apply the rotation to a
3826 column vector point with
3830 where `p` is in the device sensor coordinate system, and
3831 `p'` is in the camera-oriented coordinate system.
3835 <entry name="poseTranslation" type="float" visibility="public"
3840 <description>Position of the camera optical center.</description>
3841 <units>Meters</units>
3843 The position of the camera device's lens optical center,
3844 as a three-dimensional vector `(x,y,z)`, relative to the
3845 optical center of the largest camera device facing in the
3846 same direction as this camera, in the {@link
3847 android.hardware.SensorEvent Android sensor coordinate
3848 axes}. Note that only the axis definitions are shared with
3849 the sensor coordinate system, but not the origin.
3851 If this device is the largest or only camera device with a
3852 given facing, then this position will be `(0, 0, 0)`; a
3853 camera device with a lens optical center located 3 cm from
3854 the main sensor along the +X axis (to the right from the
3855 user's perspective) will report `(0.03, 0, 0)`.
3857 To transform a pixel coordinates between two cameras
3858 facing the same direction, first the source camera
3859 android.lens.radialDistortion must be corrected for. Then
3860 the source camera android.lens.intrinsicCalibration needs
3861 to be applied, followed by the android.lens.poseRotation
3862 of the source camera, the translation of the source camera
3863 relative to the destination camera, the
3864 android.lens.poseRotation of the destination camera, and
3865 finally the inverse of android.lens.intrinsicCalibration
3866 of the destination camera. This obtains a
3867 radial-distortion-free coordinate in the destination
3868 camera pixel coordinates.
3870 To compare this against a real image from the destination
3871 camera, the destination camera image then needs to be
3872 corrected for radial distortion before comparison or
3879 <clone entry="android.lens.aperture" kind="controls">
3882 <clone entry="android.lens.filterDensity" kind="controls">
3885 <clone entry="android.lens.focalLength" kind="controls">
3888 <clone entry="android.lens.focusDistance" kind="controls">
3889 <details>Should be zero for fixed-focus cameras</details>
3892 <entry name="focusRange" type="float" visibility="public"
3893 type_notes="Range of scene distances that are in focus"
3894 container="array" typedef="pairFloatFloat" hwlevel="limited">
3898 <description>The range of scene distances that are in
3899 sharp focus (depth of field).</description>
3900 <units>A pair of focus distances in diopters: (near,
3901 far); see android.lens.info.focusDistanceCalibration for details.</units>
3902 <range>&gt;=0</range>
3903 <details>If variable focus not supported, can still report
3904 fixed depth of field range</details>
3907 <clone entry="android.lens.opticalStabilizationMode"
3911 <entry name="state" type="byte" visibility="public" enum="true" hwlevel="limited">
3915 The lens parameters (android.lens.focalLength, android.lens.focusDistance,
3916 android.lens.filterDensity and android.lens.aperture) are not changing.
3921 One or several of the lens parameters
3922 (android.lens.focalLength, android.lens.focusDistance,
3923 android.lens.filterDensity or android.lens.aperture) is
3928 <description>Current lens status.</description>
3930 For lens parameters android.lens.focalLength, android.lens.focusDistance,
3931 android.lens.filterDensity and android.lens.aperture, when changes are requested,
3932 they may take several frames to reach the requested values. This state indicates
3933 the current status of the lens parameters.
3935 When the state is STATIONARY, the lens parameters are not changing. This could be
3936 either because the parameters are all fixed, or because the lens has had enough
3937 time to reach the most recently-requested values.
3938 If all these lens parameters are not changable for a camera device, as listed below:
3940 * Fixed focus (`android.lens.info.minimumFocusDistance == 0`), which means
3941 android.lens.focusDistance parameter will always be 0.
3942 * Fixed focal length (android.lens.info.availableFocalLengths contains single value),
3943 which means the optical zoom is not supported.
3944 * No ND filter (android.lens.info.availableFilterDensities contains only 0).
3945 * Fixed aperture (android.lens.info.availableApertures contains single value).
3947 Then this state will always be STATIONARY.
3949 When the state is MOVING, it indicates that at least one of the lens parameters
3954 <clone entry="android.lens.poseRotation" kind="static">
3956 <clone entry="android.lens.poseTranslation" kind="static">
3960 <entry name="intrinsicCalibration" type="float" visibility="public"
3966 The parameters for this camera device's intrinsic
3971 android.sensor.info.preCorrectionActiveArraySize
3975 The five calibration parameters that describe the
3976 transform from camera-centric 3D coordinates to sensor
3979 [f_x, f_y, c_x, c_y, s]
3981 Where `f_x` and `f_y` are the horizontal and vertical
3982 focal lengths, `[c_x, c_y]` is the position of the optical
3983 axis, and `s` is a skew parameter for the sensor plane not
3984 being aligned with the lens plane.
3986 These are typically used within a transformation matrix K:
3992 which can then be combined with the camera pose rotation
3993 `R` and translation `t` (android.lens.poseRotation and
3994 android.lens.poseTranslation, respective) to calculate the
3995 complete transform from world coordinates to pixel
4001 and with `p_w` being a point in the world coordinate system
4002 and `p_s` being a point in the camera active pixel array
4003 coordinate system, and with the mapping including the
4004 homogeneous division by z:
4006 p_h = (x_h, y_h, z_h) = P p_w
4009 so `[x_s, y_s]` is the pixel coordinates of the world
4010 point, `z_s = 1`, and `w_s` is a measurement of disparity
4011 (depth) in pixel coordinates.
4013 Note that the coordinate system for this transform is the
4014 android.sensor.info.preCorrectionActiveArraySize system,
4015 where `(0,0)` is the top-left of the
4016 preCorrectionActiveArraySize rectangle. Once the pose and
4017 intrinsic calibration transforms have been applied to a
4018 world point, then the android.lens.radialDistortion
4019 transform needs to be applied, and the result adjusted to
4020 be in the android.sensor.info.activeArraySize coordinate
4021 system (where `(0, 0)` is the top-left of the
4022 activeArraySize rectangle), to determine the final pixel
4023 coordinate of the world point for processed (non-RAW)
4028 <entry name="radialDistortion" type="float" visibility="public"
4034 The correction coefficients to correct for this camera device's
4035 radial and tangential lens distortion.
4038 Unitless coefficients.
4041 Four radial distortion coefficients `[kappa_0, kappa_1, kappa_2,
4042 kappa_3]` and two tangential distortion coefficients
4043 `[kappa_4, kappa_5]` that can be used to correct the
4044 lens's geometric distortion with the mapping equations:
4046 x_c = x_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
4047 kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 )
4048 y_c = y_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
4049 kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 )
4051 Here, `[x_c, y_c]` are the coordinates to sample in the
4052 input image that correspond to the pixel values in the
4053 corrected image at the coordinate `[x_i, y_i]`:
4055 correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage)
4057 The pixel coordinates are defined in a normalized
4058 coordinate system related to the
4059 android.lens.intrinsicCalibration calibration fields.
4060 Both `[x_i, y_i]` and `[x_c, y_c]` have `(0,0)` at the
4061 lens optical center `[c_x, c_y]`. The maximum magnitudes
4062 of both x and y coordinates are normalized to be 1 at the
4063 edge further from the optical center, so the range
4064 for both dimensions is `-1 <= x <= 1`.
4066 Finally, `r` represents the radial distance from the
4067 optical center, `r^2 = x_i^2 + y_i^2`, and its magnitude
4068 is therefore no larger than `|r| <= sqrt(2)`.
4070 The distortion model used is the Brown-Conrady model.
4076 <clone entry="android.lens.intrinsicCalibration" kind="static">
4078 <clone entry="android.lens.radialDistortion" kind="static">
4082 <section name="noiseReduction">
4084 <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
4087 <notes>No noise reduction is applied.</notes></value>
4089 <notes>Noise reduction is applied without reducing frame rate relative to sensor
4090 output. It may be the same as OFF if noise reduction will reduce frame rate
4091 relative to sensor.</notes></value>
4093 <notes>High-quality noise reduction is applied, at the cost of possibly reduced frame
4094 rate relative to sensor output.</notes></value>
4095 <value optional="true">MINIMAL
4096 <notes>MINIMAL noise reduction is applied without reducing frame rate relative to
4097 sensor output. </notes></value>
4098 <value optional="true">ZERO_SHUTTER_LAG
4100 <notes>Noise reduction is applied at different levels for different output streams,
4101 based on resolution. Streams at maximum recording resolution (see {@link
4102 android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession})
4103 or below have noise reduction applied, while higher-resolution streams have MINIMAL (if
4104 supported) or no noise reduction applied (if MINIMAL is not supported.) The degree of
4105 noise reduction for low-resolution streams is tuned so that frame rate is not impacted,
4106 and the quality is equal to or better than FAST (since it is only applied to
4107 lower-resolution outputs, quality may improve from FAST).
4109 This mode is intended to be used by applications operating in a zero-shutter-lag mode
4110 with YUV or PRIVATE reprocessing, where the application continuously captures
4111 high-resolution intermediate buffers into a circular buffer, from which a final image is
4112 produced via reprocessing when a user takes a picture. For such a use case, the
4113 high-resolution buffers must not have noise reduction applied to maximize efficiency of
4114 preview and to avoid over-applying noise filtering when reprocessing, while
4115 low-resolution buffers (used for recording or preview, generally) need noise reduction
4116 applied for reasonable preview quality.
4118 This mode is guaranteed to be supported by devices that support either the
4119 YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities
4120 (android.request.availableCapabilities lists either of those capabilities) and it will
4121 be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.
4124 <description>Mode of operation for the noise reduction algorithm.</description>
4125 <range>android.noiseReduction.availableNoiseReductionModes</range>
4126 <details>The noise reduction algorithm attempts to improve image quality by removing
4127 excessive noise added by the capture process, especially in dark conditions.
4129 OFF means no noise reduction will be applied by the camera device, for both raw and
4132 MINIMAL means that only sensor raw domain basic noise reduction is enabled ,to remove
4133 demosaicing or other processing artifacts. For YUV_REPROCESSING, MINIMAL is same as OFF.
4134 This mode is optional, may not be support by all devices. The application should check
4135 android.noiseReduction.availableNoiseReductionModes before using it.
4137 FAST/HIGH_QUALITY both mean camera device determined noise filtering
4138 will be applied. HIGH_QUALITY mode indicates that the camera device
4139 will use the highest-quality noise filtering algorithms,
4140 even if it slows down capture rate. FAST means the camera device will not
4141 slow down capture rate when applying noise filtering. FAST may be the same as MINIMAL if
4142 MINIMAL is listed, or the same as OFF if any noise filtering will slow down capture rate.
4143 Every output stream will have a similar amount of enhancement applied.
4145 ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
4146 buffer of high-resolution images during preview and reprocess image(s) from that buffer
4147 into a final capture when triggered by the user. In this mode, the camera device applies
4148 noise reduction to low-resolution streams (below maximum recording resolution) to maximize
4149 preview quality, but does not apply noise reduction to high-resolution streams, since
4150 those will be reprocessed later if necessary.
4152 For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera device
4153 will apply FAST/HIGH_QUALITY YUV domain noise reduction, respectively. The camera device
4154 may adjust the noise reduction parameters for best image quality based on the
4155 android.reprocess.effectiveExposureFactor if it is set.
4158 For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to
4159 adjust the internal noise reduction parameters appropriately to get the best quality
4165 <entry name="strength" type="byte">
4166 <description>Control the amount of noise reduction
4167 applied to the images</description>
4168 <units>1-10; 10 is max noise reduction</units>
4169 <range>1 - 10</range>
4174 <entry name="availableNoiseReductionModes" type="byte" visibility="public"
4175 type_notes="list of enums" container="array" typedef="enumList" hwlevel="limited">
4180 List of noise reduction modes for android.noiseReduction.mode that are supported
4181 by this camera device.
4183 <range>Any value listed in android.noiseReduction.mode</range>
4185 Full-capability camera devices will always support OFF and FAST.
4187 Camera devices that support YUV_REPROCESSING or PRIVATE_REPROCESSING will support
4190 Legacy-capability camera devices will only support FAST mode.
4193 HAL must support both FAST and HIGH_QUALITY if noise reduction control is available
4194 on the camera device, but the underlying implementation can be the same for both modes.
4195 That is, if the highest quality implementation on the camera device does not slow down
4196 capture rate, then FAST and HIGH_QUALITY will generate the same output.
4203 <clone entry="android.noiseReduction.mode" kind="controls">
4209 <section name="quirks">
4211 <entry name="meteringCropRegion" type="byte" visibility="system" deprecated="true" optional="true">
4212 <description>If set to 1, the camera service does not
4213 scale 'normalized' coordinates with respect to the crop
4214 region. This applies to metering input (a{e,f,wb}Region
4215 and output (face rectangles).</description>
4216 <details>Normalized coordinates refer to those in the
4217 (-1000,1000) range mentioned in the
4218 android.hardware.Camera API.
4220 HAL implementations should instead always use and emit
4221 sensor array-relative coordinates for all region data. Does
4222 not need to be listed in static metadata. Support will be
4223 removed in future versions of camera service.</details>
4225 <entry name="triggerAfWithAuto" type="byte" visibility="system" deprecated="true" optional="true">
4226 <description>If set to 1, then the camera service always
4227 switches to FOCUS_MODE_AUTO before issuing a AF
4228 trigger.</description>
4229 <details>HAL implementations should implement AF trigger
4230 modes for AUTO, MACRO, CONTINUOUS_FOCUS, and
4231 CONTINUOUS_PICTURE modes instead of using this flag. Does
4232 not need to be listed in static metadata. Support will be
4233 removed in future versions of camera service</details>
4235 <entry name="useZslFormat" type="byte" visibility="system" deprecated="true" optional="true">
4236 <description>If set to 1, the camera service uses
4237 CAMERA2_PIXEL_FORMAT_ZSL instead of
4238 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED for the zero
4239 shutter lag stream</description>
4240 <details>HAL implementations should use gralloc usage flags
4241 to determine that a stream will be used for
4242 zero-shutter-lag, instead of relying on an explicit
4243 format setting. Does not need to be listed in static
4244 metadata. Support will be removed in future versions of
4245 camera service.</details>
4247 <entry name="usePartialResult" type="byte" visibility="hidden" deprecated="true" optional="true">
4249 If set to 1, the HAL will always split result
4250 metadata for a single capture into multiple buffers,
4251 returned using multiple process_capture_result calls.
4254 Does not need to be listed in static
4255 metadata. Support for partial results will be reworked in
4256 future versions of camera service. This quirk will stop
4257 working at that point; DO NOT USE without careful
4258 consideration of future support.
4261 Refer to `camera3_capture_result::partial_result`
4262 for information on how to implement partial results.
4267 <entry name="partialResult" type="byte" visibility="hidden" deprecated="true" optional="true" enum="true" typedef="boolean">
4270 <notes>The last or only metadata result buffer
4271 for this capture.</notes>
4274 <notes>A partial buffer of result metadata for this
4275 capture. More result buffers for this capture will be sent
4276 by the camera device, the last of which will be marked
4281 Whether a result given to the framework is the
4282 final one for the capture, or only a partial that contains a
4283 subset of the full set of dynamic metadata
4284 values.</description>
4285 <range>Optional. Default value is FINAL.</range>
4287 The entries in the result metadata buffers for a
4288 single capture may not overlap, except for this entry. The
4289 FINAL buffers must retain FIFO ordering relative to the
4290 requests that generate them, so the FINAL buffer for frame 3 must
4291 always be sent to the framework after the FINAL buffer for frame 2, and
4292 before the FINAL buffer for frame 4. PARTIAL buffers may be returned
4293 in any order relative to other frames, but all PARTIAL buffers for a given
4294 capture must arrive before the FINAL buffer for that capture. This entry may
4295 only be used by the camera device if quirks.usePartialResult is set to 1.
4298 Refer to `camera3_capture_result::partial_result`
4299 for information on how to implement partial results.
4304 <section name="request">
4306 <entry name="frameCount" type="int32" visibility="system" deprecated="true">
4307 <description>A frame counter set by the framework. Must
4308 be maintained unchanged in output frame. This value monotonically
4309 increases with every new result (that is, each new result has a unique
4312 <units>incrementing integer</units>
4313 <range>Any int.</range>
4315 <entry name="id" type="int32" visibility="hidden">
4316 <description>An application-specified ID for the current
4317 request. Must be maintained unchanged in output
4319 <units>arbitrary integer assigned by application</units>
4320 <range>Any int</range>
4323 <entry name="inputStreams" type="int32" visibility="system" deprecated="true"
4328 <description>List which camera reprocess stream is used
4329 for the source of reprocessing data.</description>
4330 <units>List of camera reprocess stream IDs</units>
4332 Typically, only one entry allowed, must be a valid reprocess stream ID.
4334 <details>Only meaningful when android.request.type ==
4335 REPROCESS. Ignored otherwise</details>
4338 <entry name="metadataMode" type="byte" visibility="system"
4342 <notes>No metadata should be produced on output, except
4343 for application-bound buffer data. If no
4344 application-bound streams exist, no frame should be
4345 placed in the output frame queue. If such streams
4346 exist, a frame should be placed on the output queue
4347 with null metadata but with the necessary output buffer
4348 information. Timestamp information should still be
4349 included with any output stream buffers</notes></value>
4351 <notes>All metadata should be produced. Statistics will
4352 only be produced if they are separately
4353 enabled</notes></value>
4355 <description>How much metadata to produce on
4356 output</description>
4359 <entry name="outputStreams" type="int32" visibility="system" deprecated="true"
4364 <description>Lists which camera output streams image data
4365 from this capture must be sent to</description>
4366 <units>List of camera stream IDs</units>
4367 <range>List must only include streams that have been
4369 <details>If no output streams are listed, then the image
4370 data should simply be discarded. The image data must
4371 still be captured for metadata and statistics production,
4372 and the lens and flash must operate as requested.</details>
4375 <entry name="type" type="byte" visibility="system" deprecated="true" enum="true">
4378 <notes>Capture a new image from the imaging hardware,
4379 and process it according to the
4380 settings</notes></value>
4382 <notes>Process previously captured data; the
4383 android.request.inputStreams parameter determines the
4384 source reprocessing stream. TODO: Mark dynamic metadata
4385 needed for reprocessing with [RP]</notes></value>
4387 <description>The type of the request; either CAPTURE or
4388 REPROCESS. For legacy HAL3, this tag is redundant.
4394 <entry name="maxNumOutputStreams" type="int32" visibility="ndk_public"
4395 container="array" hwlevel="legacy">
4399 <description>The maximum numbers of different types of output streams
4400 that can be configured and used simultaneously by a camera device.
4403 For processed (and stalling) format streams, &gt;= 1.
4405 For Raw format (either stalling or non-stalling) streams, &gt;= 0.
4407 For processed (but not stalling) format streams, &gt;= 3
4408 for FULL mode devices (`android.info.supportedHardwareLevel == FULL`);
4409 &gt;= 2 for LIMITED mode devices (`android.info.supportedHardwareLevel == LIMITED`).
4412 This is a 3 element tuple that contains the max number of output simultaneous
4413 streams for raw sensor, processed (but not stalling), and processed (and stalling)
4414 formats respectively. For example, assuming that JPEG is typically a processed and
4415 stalling stream, if max raw sensor format output stream number is 1, max YUV streams
4416 number is 3, and max JPEG stream number is 2, then this tuple should be `(1, 3, 2)`.
4418 This lists the upper bound of the number of output streams supported by
4419 the camera device. Using more streams simultaneously may require more hardware and
4420 CPU resources that will consume more power. The image format for an output stream can
4421 be any supported format provided by android.scaler.availableStreamConfigurations.
4422 The formats defined in android.scaler.availableStreamConfigurations can be catergorized
4423 into the 3 stream types as below:
4425 * Processed (but stalling): any non-RAW format with a stallDurations &gt; 0.
4426 Typically {@link android.graphics.ImageFormat#JPEG|AIMAGE_FORMAT_JPEG JPEG format}.
4427 * Raw formats: {@link android.graphics.ImageFormat#RAW_SENSOR|AIMAGE_FORMAT_RAW16
4428 RAW_SENSOR}, {@link android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10 RAW10}, or
4429 {@link android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12 RAW12}.
4430 * Processed (but not-stalling): any non-RAW format without a stall duration. Typically
4431 {@link android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888 YUV_420_888},
4432 {@link android.graphics.ImageFormat#NV21 NV21}, or {@link
4433 android.graphics.ImageFormat#YV12 YV12}.
4437 <entry name="maxNumOutputRaw" type="int32" visibility="java_public" synthetic="true"
4439 <description>The maximum numbers of different types of output streams
4440 that can be configured and used simultaneously by a camera device
4441 for any `RAW` formats.
4447 This value contains the max number of output simultaneous
4448 streams from the raw sensor.
4450 This lists the upper bound of the number of output streams supported by
4451 the camera device. Using more streams simultaneously may require more hardware and
4452 CPU resources that will consume more power. The image format for this kind of an output stream can
4453 be any `RAW` and supported format provided by android.scaler.streamConfigurationMap.
4455 In particular, a `RAW` format is typically one of:
4457 * {@link android.graphics.ImageFormat#RAW_SENSOR|AIMAGE_FORMAT_RAW16 RAW_SENSOR}
4458 * {@link android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10 RAW10}
4459 * {@link android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12 RAW12}
4461 LEGACY mode devices (android.info.supportedHardwareLevel `==` LEGACY)
4462 never support raw streams.
4465 <entry name="maxNumOutputProc" type="int32" visibility="java_public" synthetic="true"
4467 <description>The maximum numbers of different types of output streams
4468 that can be configured and used simultaneously by a camera device
4469 for any processed (but not-stalling) formats.
4473 for FULL mode devices (`android.info.supportedHardwareLevel == FULL`);
4474 &gt;= 2 for LIMITED mode devices (`android.info.supportedHardwareLevel == LIMITED`).
4477 This value contains the max number of output simultaneous
4478 streams for any processed (but not-stalling) formats.
4480 This lists the upper bound of the number of output streams supported by
4481 the camera device. Using more streams simultaneously may require more hardware and
4482 CPU resources that will consume more power. The image format for this kind of an output stream can
4483 be any non-`RAW` and supported format provided by android.scaler.streamConfigurationMap.
4485 Processed (but not-stalling) is defined as any non-RAW format without a stall duration.
4488 * {@link android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888 YUV_420_888}
4489 * {@link android.graphics.ImageFormat#NV21 NV21}
4490 * {@link android.graphics.ImageFormat#YV12 YV12}
4491 * Implementation-defined formats, i.e. {@link
4492 android.hardware.camera2.params.StreamConfigurationMap#isOutputSupportedFor(Class)}
4494 For full guarantees, query {@link
4495 android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} with a
4496 processed format -- it will return 0 for a non-stalling stream.
4498 LEGACY devices will support at least 2 processing/non-stalling streams.
4501 <entry name="maxNumOutputProcStalling" type="int32" visibility="java_public" synthetic="true"
4503 <description>The maximum numbers of different types of output streams
4504 that can be configured and used simultaneously by a camera device
4505 for any processed (and stalling) formats.
4511 This value contains the max number of output simultaneous
4512 streams for any processed (but not-stalling) formats.
4514 This lists the upper bound of the number of output streams supported by
4515 the camera device. Using more streams simultaneously may require more hardware and
4516 CPU resources that will consume more power. The image format for this kind of an output stream can
4517 be any non-`RAW` and supported format provided by android.scaler.streamConfigurationMap.
4519 A processed and stalling format is defined as any non-RAW format with a stallDurations
4520 &gt; 0. Typically only the {@link
4521 android.graphics.ImageFormat#JPEG|AIMAGE_FORMAT_JPEG JPEG format} is a stalling format.
4523 For full guarantees, query {@link
4524 android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} with a
4525 processed format -- it will return a non-0 value for a stalling stream.
4527 LEGACY devices will support up to 1 processing/stalling stream.
4530 <entry name="maxNumReprocessStreams" type="int32" visibility="system"
4531 deprecated="true" container="array">
4535 <description>How many reprocessing streams of any type
4536 can be allocated at the same time.</description>
4537 <range>&gt;= 0</range>
4539 Only used by HAL2.x.
4541 When set to 0, it means no reprocess stream is supported.
4545 <entry name="maxNumInputStreams" type="int32" visibility="java_public" hwlevel="full">
4547 The maximum numbers of any type of input streams
4548 that can be configured and used simultaneously by a camera device.
4553 <details>When set to 0, it means no input stream is supported.
4555 The image format for a input stream can be any supported format returned by {@link
4556 android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}. When using an
4557 input stream, there must be at least one output stream configured to to receive the
4560 When an input stream and some output streams are used in a reprocessing request,
4561 only the input buffer will be used to produce these output stream buffers, and a
4562 new sensor image will not be captured.
4564 For example, for Zero Shutter Lag (ZSL) still capture use case, the input
4565 stream image format will be PRIVATE, the associated output stream image format
4569 For the reprocessing flow and controls, see
4570 hardware/libhardware/include/hardware/camera3.h Section 10 for more details.
4576 <entry name="frameCount" type="int32" visibility="hidden" deprecated="true">
4577 <description>A frame counter set by the framework. This value monotonically
4578 increases with every new result (that is, each new result has a unique
4579 frameCount value).</description>
4580 <units>count of frames</units>
4581 <range>&gt; 0</range>
4582 <details>Reset on release()</details>
4584 <clone entry="android.request.id" kind="controls"></clone>
4585 <clone entry="android.request.metadataMode"
4586 kind="controls"></clone>
4587 <clone entry="android.request.outputStreams"
4588 kind="controls"></clone>
4589 <entry name="pipelineDepth" type="byte" visibility="public" hwlevel="legacy">
4590 <description>Specifies the number of pipeline stages the frame went
4591 through from when it was exposed to when the final completed result
4592 was available to the framework.</description>
4593 <range>&lt;= android.request.pipelineMaxDepth</range>
4594 <details>Depending on what settings are used in the request, and
4595 what streams are configured, the data may undergo less processing,
4596 and some pipeline stages skipped.
4598 See android.request.pipelineMaxDepth for more details.
4601 This value must always represent the accurate count of how many
4602 pipeline stages were actually used.
4607 <entry name="pipelineMaxDepth" type="byte" visibility="public" hwlevel="legacy">
4608 <description>Specifies the number of maximum pipeline stages a frame
4609 has to go through from when it's exposed to when it's available
4610 to the framework.</description>
4611 <details>A typical minimum value for this is 2 (one stage to expose,
4612 one stage to readout) from the sensor. The ISP then usually adds
4613 its own stages to do custom HW processing. Further stages may be
4614 added by SW processing.
4616 Depending on what settings are used (e.g. YUV, JPEG) and what
4617 processing is enabled (e.g. face detection), the actual pipeline
4618 depth (specified by android.request.pipelineDepth) may be less than
4619 the max pipeline depth.
4621 A pipeline depth of X stages is equivalent to a pipeline latency of
4624 This value will normally be 8 or less, however, for high speed capture session,
4625 the max pipeline depth will be up to 8 x size of high speed capture request list.
4628 This value should be 4 or less, expect for the high speed recording session, where the
4629 max batch sizes may be larger than 1.
4632 <entry name="partialResultCount" type="int32" visibility="public" optional="true">
4633 <description>Defines how many sub-components
4634 a result will be composed of.
4636 <range>&gt;= 1</range>
4637 <details>In order to combat the pipeline latency, partial results
4638 may be delivered to the application layer from the camera device as
4639 soon as they are available.
4641 Optional; defaults to 1. A value of 1 means that partial
4642 results are not supported, and only the final TotalCaptureResult will
4643 be produced by the camera device.
4645 A typical use case for this might be: after requesting an
4646 auto-focus (AF) lock the new AF state might be available 50%
4647 of the way through the pipeline. The camera device could
4648 then immediately dispatch this state via a partial result to
4649 the application, and the rest of the metadata via later
4653 <entry name="availableCapabilities" type="byte" visibility="public"
4654 enum="true" container="array" hwlevel="legacy">
4659 <value>BACKWARD_COMPATIBLE
4660 <notes>The minimal set of capabilities that every camera
4661 device (regardless of android.info.supportedHardwareLevel)
4664 This capability is listed by all normal devices, and
4665 indicates that the camera device has a feature set
4666 that's comparable to the baseline requirements for the
4667 older android.hardware.Camera API.
4669 Devices with the DEPTH_OUTPUT capability might not list this
4670 capability, indicating that they support only depth measurement,
4671 not standard color output.
4674 <value optional="true">MANUAL_SENSOR
4676 The camera device can be manually controlled (3A algorithms such
4677 as auto-exposure, and auto-focus can be bypassed).
4678 The camera device supports basic manual control of the sensor image
4679 acquisition related stages. This means the following controls are
4680 guaranteed to be supported:
4682 * Manual frame duration control
4683 * android.sensor.frameDuration
4684 * android.sensor.info.maxFrameDuration
4685 * Manual exposure control
4686 * android.sensor.exposureTime
4687 * android.sensor.info.exposureTimeRange
4688 * Manual sensitivity control
4689 * android.sensor.sensitivity
4690 * android.sensor.info.sensitivityRange
4691 * Manual lens control (if the lens is adjustable)
4693 * Manual flash control (if a flash unit is present)
4695 * Manual black level locking
4696 * android.blackLevel.lock
4697 * Auto exposure lock
4698 * android.control.aeLock
4700 If any of the above 3A algorithms are enabled, then the camera
4701 device will accurately report the values applied by 3A in the
4704 A given camera device may also support additional manual sensor controls,
4705 but this capability only covers the above list of controls.
4707 If this is supported, android.scaler.streamConfigurationMap will
4708 additionally return a min frame duration that is greater than
4709 zero for each supported size-format combination.
4712 <value optional="true">MANUAL_POST_PROCESSING
4714 The camera device post-processing stages can be manually controlled.
4715 The camera device supports basic manual control of the image post-processing
4716 stages. This means the following controls are guaranteed to be supported:
4718 * Manual tonemap control
4719 * android.tonemap.curve
4720 * android.tonemap.mode
4721 * android.tonemap.maxCurvePoints
4722 * android.tonemap.gamma
4723 * android.tonemap.presetCurve
4725 * Manual white balance control
4726 * android.colorCorrection.transform
4727 * android.colorCorrection.gains
4728 * Manual lens shading map control
4729 * android.shading.mode
4730 * android.statistics.lensShadingMapMode
4731 * android.statistics.lensShadingMap
4732 * android.lens.info.shadingMapSize
4733 * Manual aberration correction control (if aberration correction is supported)
4734 * android.colorCorrection.aberrationMode
4735 * android.colorCorrection.availableAberrationModes
4736 * Auto white balance lock
4737 * android.control.awbLock
4739 If auto white balance is enabled, then the camera device
4740 will accurately report the values applied by AWB in the result.
4742 A given camera device may also support additional post-processing
4743 controls, but this capability only covers the above list of controls.
4746 <value optional="true">RAW
4748 The camera device supports outputting RAW buffers and
4749 metadata for interpreting them.
4751 Devices supporting the RAW capability allow both for
4752 saving DNG files, and for direct application processing of
4755 * RAW_SENSOR is supported as an output format.
4756 * The maximum available resolution for RAW_SENSOR streams
4757 will match either the value in
4758 android.sensor.info.pixelArraySize or
4759 android.sensor.info.preCorrectionActiveArraySize.
4760 * All DNG-related optional metadata entries are provided
4761 by the camera device.
4764 <value optional="true" ndk_hidden="true">PRIVATE_REPROCESSING
4766 The camera device supports the Zero Shutter Lag reprocessing use case.
4768 * One input stream is supported, that is, `android.request.maxNumInputStreams == 1`.
4769 * {@link android.graphics.ImageFormat#PRIVATE} is supported as an output/input format,
4770 that is, {@link android.graphics.ImageFormat#PRIVATE} is included in the lists of
4771 formats returned by {@link
4772 android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and {@link
4773 android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}.
4774 * {@link android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput}
4775 returns non empty int[] for each supported input format returned by {@link
4776 android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}.
4777 * Each size returned by {@link
4778 android.hardware.camera2.params.StreamConfigurationMap#getInputSizes
4779 getInputSizes(ImageFormat.PRIVATE)} is also included in {@link
4780 android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes
4781 getOutputSizes(ImageFormat.PRIVATE)}
4782 * Using {@link android.graphics.ImageFormat#PRIVATE} does not cause a frame rate drop
4783 relative to the sensor's maximum capture rate (at that resolution).
4784 * {@link android.graphics.ImageFormat#PRIVATE} will be reprocessable into both
4785 {@link android.graphics.ImageFormat#YUV_420_888} and
4786 {@link android.graphics.ImageFormat#JPEG} formats.
4787 * The maximum available resolution for PRIVATE streams
4788 (both input/output) will match the maximum available
4789 resolution of JPEG streams.
4790 * Static metadata android.reprocess.maxCaptureStall.
4791 * Only below controls are effective for reprocessing requests and
4792 will be present in capture results, other controls in reprocess
4793 requests will be ignored by the camera device.
4795 * android.noiseReduction.mode
4797 * android.noiseReduction.availableNoiseReductionModes and
4798 android.edge.availableEdgeModes will both list ZERO_SHUTTER_LAG as a supported mode.
4801 <value optional="true">READ_SENSOR_SETTINGS
4803 The camera device supports accurately reporting the sensor settings for many of
4804 the sensor controls while the built-in 3A algorithm is running. This allows
4805 reporting of sensor settings even when these settings cannot be manually changed.
4807 The values reported for the following controls are guaranteed to be available
4808 in the CaptureResult, including when 3A is enabled:
4811 * android.sensor.exposureTime
4812 * Sensitivity control
4813 * android.sensor.sensitivity
4814 * Lens controls (if the lens is adjustable)
4815 * android.lens.focusDistance
4816 * android.lens.aperture
4818 This capability is a subset of the MANUAL_SENSOR control capability, and will
4819 always be included if the MANUAL_SENSOR capability is available.
4822 <value optional="true">BURST_CAPTURE
4824 The camera device supports capturing high-resolution images at >= 20 frames per
4825 second, in at least the uncompressed YUV format, when post-processing settings are set
4826 to FAST. Additionally, maximum-resolution images can be captured at >= 10 frames
4827 per second. Here, 'high resolution' means at least 8 megapixels, or the maximum
4828 resolution of the device, whichever is smaller.
4831 More specifically, this means that a size matching the camera device's active array
4832 size is listed as a supported size for the {@link
4833 android.graphics.ImageFormat#YUV_420_888} format in either {@link
4834 android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} or {@link
4835 android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes},
4836 with a minimum frame duration for that format and size of either <= 1/20 s, or
4837 <= 1/10 s, respectively; and the android.control.aeAvailableTargetFpsRanges entry
4838 lists at least one FPS range where the minimum FPS is >= 1 / minimumFrameDuration
4839 for the maximum-size YUV_420_888 format. If that maximum size is listed in {@link
4840 android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes},
4841 then the list of resolutions for YUV_420_888 from {@link
4842 android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} contains at
4843 least one resolution >= 8 megapixels, with a minimum frame duration of <= 1/20
4846 If the device supports the {@link
4847 android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10}, {@link
4848 android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12}, then those can also be
4849 captured at the same rate as the maximum-size YUV_420_888 resolution is.
4851 If the device supports the PRIVATE_REPROCESSING capability, then the same guarantees
4852 as for the YUV_420_888 format also apply to the {@link
4853 android.graphics.ImageFormat#PRIVATE} format.
4855 In addition, the android.sync.maxLatency field is guaranted to have a value between 0
4856 and 4, inclusive. android.control.aeLockAvailable and android.control.awbLockAvailable
4857 are also guaranteed to be `true` so burst capture with these two locks ON yields
4858 consistent image output.
4861 More specifically, this means that at least one output {@link
4862 android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888} size listed in
4864 android.hardware.camera2.params.StreamConfigurationMap|ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}
4865 is larger or equal to the 'high resolution' defined above, and can be captured at at
4866 least 20 fps. For the largest {@link
4867 android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888} size listed in
4869 android.hardware.camera2.params.StreamConfigurationMap|ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS},
4870 camera device can capture this size for at least 10 frames per second. Also the
4871 android.control.aeAvailableTargetFpsRanges entry lists at least one FPS range where
4872 the minimum FPS is >= 1 / minimumFrameDuration for the largest YUV_420_888 size.
4874 If the device supports the {@link
4875 android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10}, {@link
4876 android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12}, then those can also be
4877 captured at the same rate as the maximum-size YUV_420_888 resolution is.
4879 In addition, the android.sync.maxLatency field is guaranted to have a value between 0
4880 and 4, inclusive. android.control.aeLockAvailable and android.control.awbLockAvailable
4881 are also guaranteed to be `true` so burst capture with these two locks ON yields
4882 consistent image output.
4885 <value optional="true" ndk_hidden="true">YUV_REPROCESSING
4887 The camera device supports the YUV_420_888 reprocessing use case, similar as
4888 PRIVATE_REPROCESSING, This capability requires the camera device to support the
4891 * One input stream is supported, that is, `android.request.maxNumInputStreams == 1`.
4892 * {@link android.graphics.ImageFormat#YUV_420_888} is supported as an output/input
4893 format, that is, YUV_420_888 is included in the lists of formats returned by {@link
4894 android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and {@link
4895 android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}.
4897 android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput}
4898 returns non-empty int[] for each supported input format returned by {@link
4899 android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}.
4900 * Each size returned by {@link
4901 android.hardware.camera2.params.StreamConfigurationMap#getInputSizes
4902 getInputSizes(YUV_420_888)} is also included in {@link
4903 android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes
4904 getOutputSizes(YUV_420_888)}
4905 * Using {@link android.graphics.ImageFormat#YUV_420_888} does not cause a frame rate
4906 drop relative to the sensor's maximum capture rate (at that resolution).
4907 * {@link android.graphics.ImageFormat#YUV_420_888} will be reprocessable into both
4908 {@link android.graphics.ImageFormat#YUV_420_888} and {@link
4909 android.graphics.ImageFormat#JPEG} formats.
4910 * The maximum available resolution for {@link
4911 android.graphics.ImageFormat#YUV_420_888} streams (both input/output) will match the
4912 maximum available resolution of {@link android.graphics.ImageFormat#JPEG} streams.
4913 * Static metadata android.reprocess.maxCaptureStall.
4914 * Only the below controls are effective for reprocessing requests and will be present
4915 in capture results. The reprocess requests are from the original capture results
4916 that are associated with the intermediate {@link
4917 android.graphics.ImageFormat#YUV_420_888} output buffers. All other controls in the
4918 reprocess requests will be ignored by the camera device.
4920 * android.noiseReduction.mode
4922 * android.reprocess.effectiveExposureFactor
4923 * android.noiseReduction.availableNoiseReductionModes and
4924 android.edge.availableEdgeModes will both list ZERO_SHUTTER_LAG as a supported mode.
4927 <value optional="true">DEPTH_OUTPUT
4929 The camera device can produce depth measurements from its field of view.
4931 This capability requires the camera device to support the following:
4933 * {@link android.graphics.ImageFormat#DEPTH16|AIMAGE_FORMAT_DEPTH16} is supported as
4936 android.graphics.ImageFormat#DEPTH_POINT_CLOUD|AIMAGE_FORMAT_DEPTH_POINT_CLOUD} is
4937 optionally supported as an output format.
4938 * This camera device, and all camera devices with the same android.lens.facing, will
4939 list the following calibration metadata entries in both {@link
4940 android.hardware.camera2.CameraCharacteristics|ACameraManager_getCameraCharacteristics}
4942 android.hardware.camera2.CaptureResult|ACameraCaptureSession_captureCallback_result}:
4943 - android.lens.poseTranslation
4944 - android.lens.poseRotation
4945 - android.lens.intrinsicCalibration
4946 - android.lens.radialDistortion
4947 * The android.depth.depthIsExclusive entry is listed by this device.
4948 * A LIMITED camera with only the DEPTH_OUTPUT capability does not have to support
4949 normal YUV_420_888, JPEG, and PRIV-format outputs. It only has to support the DEPTH16
4952 Generally, depth output operates at a slower frame rate than standard color capture,
4953 so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that
4954 should be accounted for (see {@link
4955 android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration|ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS}).
4956 On a device that supports both depth and color-based output, to enable smooth preview,
4957 using a repeating burst is recommended, where a depth-output target is only included
4958 once every N frames, where N is the ratio between preview output rate and depth output
4959 rate, including depth stall time.
4962 <value optional="true" ndk_hidden="true">CONSTRAINED_HIGH_SPEED_VIDEO
4964 The device supports constrained high speed video recording (frame rate >=120fps) use
4965 case. The camera device will support high speed capture session created by {@link
4966 android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}, which
4967 only accepts high speed request lists created by {@link
4968 android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
4970 A camera device can still support high speed video streaming by advertising the high
4971 speed FPS ranges in android.control.aeAvailableTargetFpsRanges. For this case, all
4972 normal capture request per frame control and synchronization requirements will apply
4973 to the high speed fps ranges, the same as all other fps ranges. This capability
4974 describes the capability of a specialized operating mode with many limitations (see
4975 below), which is only targeted at high speed video recording.
4977 The supported high speed video sizes and fps ranges are specified in {@link
4978 android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}.
4979 To get desired output frame rates, the application is only allowed to select video
4980 size and FPS range combinations provided by {@link
4981 android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}. The
4982 fps range can be controlled via android.control.aeTargetFpsRange.
4984 In this capability, the camera device will override aeMode, awbMode, and afMode to
4985 ON, AUTO, and CONTINUOUS_VIDEO, respectively. All post-processing block mode
4986 controls will be overridden to be FAST. Therefore, no manual control of capture
4987 and post-processing parameters is possible. All other controls operate the
4988 same as when android.control.mode == AUTO. This means that all other
4989 android.control.* fields continue to work, such as
4991 * android.control.aeTargetFpsRange
4992 * android.control.aeExposureCompensation
4993 * android.control.aeLock
4994 * android.control.awbLock
4995 * android.control.effectMode
4996 * android.control.aeRegions
4997 * android.control.afRegions
4998 * android.control.awbRegions
4999 * android.control.afTrigger
5000 * android.control.aePrecaptureTrigger
5002 Outside of android.control.*, the following controls will work:
5004 * android.flash.mode (TORCH mode only, automatic flash for still capture will not
5005 work since aeMode is ON)
5006 * android.lens.opticalStabilizationMode (if it is supported)
5007 * android.scaler.cropRegion
5008 * android.statistics.faceDetectMode (if it is supported)
5010 For high speed recording use case, the actual maximum supported frame rate may
5011 be lower than what camera can output, depending on the destination Surfaces for
5012 the image data. For example, if the destination surface is from video encoder,
5013 the application need check if the video encoder is capable of supporting the
5014 high frame rate for a given video size, or it will end up with lower recording
5015 frame rate. If the destination surface is from preview window, the actual preview frame
5016 rate will be bounded by the screen refresh rate.
5018 The camera device will only support up to 2 high speed simultaneous output surfaces
5019 (preview and recording surfaces) in this mode. Above controls will be effective only
5020 if all of below conditions are true:
5022 * The application creates a camera capture session with no more than 2 surfaces via
5024 android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}. The
5025 targeted surfaces must be preview surface (either from {@link
5026 android.view.SurfaceView} or {@link android.graphics.SurfaceTexture}) or recording
5027 surface(either from {@link android.media.MediaRecorder#getSurface} or {@link
5028 android.media.MediaCodec#createInputSurface}).
5029 * The stream sizes are selected from the sizes reported by
5030 {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}.
5031 * The FPS ranges are selected from {@link
5032 android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}.
5034 When above conditions are NOT satistied,
5035 {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}
5038 Switching to a FPS range that has different maximum FPS may trigger some camera device
5039 reconfigurations, which may introduce extra latency. It is recommended that
5040 the application avoids unnecessary maximum target FPS changes as much as possible
5041 during high speed streaming.
5045 <description>List of capabilities that this camera device
5046 advertises as fully supporting.</description>
5048 A capability is a contract that the camera device makes in order
5049 to be able to satisfy one or more use cases.
5051 Listing a capability guarantees that the whole set of features
5052 required to support a common use will all be available.
5054 Using a subset of the functionality provided by an unsupported
5055 capability may be possible on a specific camera device implementation;
5056 to do this query each of android.request.availableRequestKeys,
5057 android.request.availableResultKeys,
5058 android.request.availableCharacteristicsKeys.
5060 The following capabilities are guaranteed to be available on
5061 android.info.supportedHardwareLevel `==` FULL devices:
5064 * MANUAL_POST_PROCESSING
5066 Other capabilities may be available on either FULL or LIMITED
5067 devices, but the application should query this key to be sure.
5070 Additional constraint details per-capability will be available
5071 in the Compatibility Test Suite.
5073 Minimum baseline requirements required for the
5074 BACKWARD_COMPATIBLE capability are not explicitly listed.
5075 Instead refer to "BC" tags and the camera CTS tests in the
5076 android.hardware.camera2.cts package.
5078 Listed controls that can be either request or result (e.g.
5079 android.sensor.exposureTime) must be available both in the
5080 request and the result in order to be considered to be
5081 capability-compliant.
5083 For example, if the HAL claims to support MANUAL control,
5084 then exposure time must be configurable via the request _and_
5085 the actual exposure applied must be available via
5088 If MANUAL_SENSOR is omitted, the HAL may choose to omit the
5089 android.scaler.availableMinFrameDurations static property entirely.
5091 For PRIVATE_REPROCESSING and YUV_REPROCESSING capabilities, see
5092 hardware/libhardware/include/hardware/camera3.h Section 10 for more information.
5094 Devices that support the MANUAL_SENSOR capability must support the
5095 CAMERA3_TEMPLATE_MANUAL template defined in camera3.h.
5097 Devices that support the PRIVATE_REPROCESSING capability or the
5098 YUV_REPROCESSING capability must support the
5099 CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template defined in camera3.h.
5101 For DEPTH_OUTPUT, the depth-format keys
5102 android.depth.availableDepthStreamConfigurations,
5103 android.depth.availableDepthMinFrameDurations,
5104 android.depth.availableDepthStallDurations must be available, in
5105 addition to the other keys explicitly mentioned in the DEPTH_OUTPUT
5106 enum notes. The entry android.depth.maxDepthSamples must be available
5107 if the DEPTH_POINT_CLOUD format is supported (HAL pixel format BLOB, dataspace
5111 <entry name="availableRequestKeys" type="int32" visibility="ndk_public"
5112 container="array" hwlevel="legacy">
5116 <description>A list of all keys that the camera device has available
5117 to use with {@link android.hardware.camera2.CaptureRequest|ACaptureRequest}.</description>
5119 <details>Attempting to set a key into a CaptureRequest that is not
5120 listed here will result in an invalid request and will be rejected
5121 by the camera device.
5123 This field can be used to query the feature set of a camera device
5124 at a more granular level than capabilities. This is especially
5125 important for optional keys that are not listed under any capability
5126 in android.request.availableCapabilities.
5129 Vendor tags can be listed here. Vendor tag metadata should also
5130 use the extensions C api (refer to camera3.h for more details).
5132 Setting/getting vendor tags will be checked against the metadata
5133 vendor extensions API and not against this field.
5135 The HAL must not consume any request tags that are not listed either
5136 here or in the vendor tag list.
5138 The public camera2 API will always make the vendor tags visible
5140 {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys}.
5143 <entry name="availableResultKeys" type="int32" visibility="ndk_public"
5144 container="array" hwlevel="legacy">
5148 <description>A list of all keys that the camera device has available to use with {@link
5149 android.hardware.camera2.CaptureResult|ACameraCaptureSession_captureCallback_result}.
5152 <details>Attempting to get a key from a CaptureResult that is not
5153 listed here will always return a `null` value. Getting a key from
5154 a CaptureResult that is listed here will generally never return a `null`
5157 The following keys may return `null` unless they are enabled:
5159 * android.statistics.lensShadingMap (non-null iff android.statistics.lensShadingMapMode == ON)
5161 (Those sometimes-null keys will nevertheless be listed here
5162 if they are available.)
5164 This field can be used to query the feature set of a camera device
5165 at a more granular level than capabilities. This is especially
5166 important for optional keys that are not listed under any capability
5167 in android.request.availableCapabilities.
5170 Tags listed here must always have an entry in the result metadata,
5171 even if that size is 0 elements. Only array-type tags (e.g. lists,
5172 matrices, strings) are allowed to have 0 elements.
5174 Vendor tags can be listed here. Vendor tag metadata should also
5175 use the extensions C api (refer to camera3.h for more details).
5177 Setting/getting vendor tags will be checked against the metadata
5178 vendor extensions API and not against this field.
5180 The HAL must not produce any result tags that are not listed either
5181 here or in the vendor tag list.
5183 The public camera2 API will always make the vendor tags visible via {@link
5184 android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys}.
5187 <entry name="availableCharacteristicsKeys" type="int32" visibility="ndk_public"
5188 container="array" hwlevel="legacy">
5192 <description>A list of all keys that the camera device has available to use with {@link
5193 android.hardware.camera2.CameraCharacteristics|ACameraManager_getCameraCharacteristics}.
5195 <details>This entry follows the same rules as
5196 android.request.availableResultKeys (except that it applies for
5197 CameraCharacteristics instead of CaptureResult). See above for more
5201 Keys listed here must always have an entry in the static info metadata,
5202 even if that size is 0 elements. Only array-type tags (e.g. lists,
5203 matrices, strings) are allowed to have 0 elements.
5205 Vendor tags can listed here. Vendor tag metadata should also use
5206 the extensions C api (refer to camera3.h for more details).
5208 Setting/getting vendor tags will be checked against the metadata
5209 vendor extensions API and not against this field.
5211 The HAL must not have any tags in its static info that are not listed
5212 either here or in the vendor tag list.
5214 The public camera2 API will always make the vendor tags visible
5215 via {@link android.hardware.camera2.CameraCharacteristics#getKeys}.
5218 <entry name="availableSessionKeys" type="int32" visibility="ndk_public"
5219 container="array" hwlevel="legacy" hal_version="3.4">
5223 <description>A subset of the available request keys that the camera device
5224 can pass as part of the capture session initialization.</description>
5226 <details> This is a subset of android.request.availableRequestKeys which
5227 contains a list of keys that are difficult to apply per-frame and
5228 can result in unexpected delays when modified during the capture session
5229 lifetime. Typical examples include parameters that require a
5230 time-consuming hardware re-configuration or internal camera pipeline
5231 change. For performance reasons we advise clients to pass their initial
5233 {@link SessionConfiguration#setSessionParameters|ACameraDevice_createCaptureSessionWithSessionParameters}.i
5234 Once the camera capture session is enabled it is also recommended to avoid
5235 changing them from their initial values set in
5236 {@link SessionConfiguration#setSessionParameters|ACameraDevice_createCaptureSessionWithSessionParameters}.
5237 Control over session parameters can still be exerted in capture requests
5238 but clients should be aware and expect delays during their application.
5239 An example usage scenario could look like this:
5241 * The camera client starts by quering the session parameter key list via
5242 {@link android.hardware.camera2.CameraCharacteristics#getAvailableSessionKeys|ACameraManager_getCameraCharacteristics}.
5243 * Before triggering the capture session create sequence, a capture request
5245 {@link CameraDevice#createCaptureRequest|ACameraDevice_createCaptureRequest}
5246 using an appropriate template matching the particular use case.
5247 * The client should go over the list of session parameters and check
5248 whether some of the keys listed matches with the parameters that
5249 they intend to modify as part of the first capture request.
5250 * If there is no such match, the capture request can be passed
5252 {@link SessionConfiguration#setSessionParameters|ACameraDevice_createCaptureSessionWithSessionParameters}.
5253 * If matches do exist, the client should update the respective values
5254 and pass the request to
5255 {@link SessionConfiguration#setSessionParameters|ACameraDevice_createCaptureSessionWithSessionParameters}.
5256 * After the capture session initialization completes the session parameter
5257 key list can continue to serve as reference when posting or updating
5258 further requests. As mentioned above further changes to session
5259 parameters should ideally be avoided, if updates are necessary
5260 however clients could expect a delay/glitch during the
5265 Vendor tags can be listed here. Vendor tag metadata should also
5266 use the extensions C api (refer to
5267 android.hardware.camera.device.V3_4.StreamConfiguration.sessionParams for more details).
5269 Setting/getting vendor tags will be checked against the metadata
5270 vendor extensions API and not against this field.
5272 The HAL must not consume any request tags in the session parameters that
5273 are not listed either here or in the vendor tag list.
5275 The public camera2 API will always make the vendor tags visible
5277 {@link android.hardware.camera2.CameraCharacteristics#getAvailableSessionKeys}.
5282 <section name="scaler">
5284 <entry name="cropRegion" type="int32" visibility="public"
5285 container="array" typedef="rectangle" hwlevel="legacy">
5289 <description>The desired region of the sensor to read out for this capture.</description>
5290 <units>Pixel coordinates relative to
5291 android.sensor.info.activeArraySize</units>
5293 This control can be used to implement digital zoom.
5295 The crop region coordinate system is based off
5296 android.sensor.info.activeArraySize, with `(0, 0)` being the
5297 top-left corner of the sensor active array.
5299 Output streams use this rectangle to produce their output,
5300 cropping to a smaller region if necessary to maintain the
5301 stream's aspect ratio, then scaling the sensor input to
5302 match the output's configured resolution.
5304 The crop region is applied after the RAW to other color
5305 space (e.g. YUV) conversion. Since raw streams
5306 (e.g. RAW16) don't have the conversion stage, they are not
5307 croppable. The crop region will be ignored by raw streams.
5309 For non-raw streams, any additional per-stream cropping will
5310 be done to maximize the final pixel area of the stream.
5312 For example, if the crop region is set to a 4:3 aspect
5313 ratio, then 4:3 streams will use the exact crop
5314 region. 16:9 streams will further crop vertically
5317 Conversely, if the crop region is set to a 16:9, then 4:3
5318 outputs will crop horizontally (pillarbox), and 16:9
5319 streams will match exactly. These additional crops will
5320 be centered within the crop region.
5322 The width and height of the crop region cannot
5323 be set to be smaller than
5324 `floor( activeArraySize.width / android.scaler.availableMaxDigitalZoom )` and
5325 `floor( activeArraySize.height / android.scaler.availableMaxDigitalZoom )`, respectively.
5327 The camera device may adjust the crop region to account
5328 for rounding and other hardware requirements; the final
5329 crop region used will be included in the output capture
5333 The data representation is int[4], which maps to (left, top, width, height).
5336 The output streams must maintain square pixels at all
5337 times, no matter what the relative aspect ratios of the
5338 crop region and the stream are. Negative values for
5339 corner are allowed for raw output if full pixel array is
5340 larger than active pixel array. Width and height may be
5341 rounded to nearest larger supportable width, especially
5342 for raw output, where only a few fixed scales may be
5345 For a set of output streams configured, if the sensor output is cropped to a smaller
5346 size than active array size, the HAL need follow below cropping rules:
5348 * The HAL need handle the cropRegion as if the sensor crop size is the effective active
5349 array size.More specifically, the HAL must transform the request cropRegion from
5350 android.sensor.info.activeArraySize to the sensor cropped pixel area size in this way:
5351 1. Translate the requested cropRegion w.r.t., the left top corner of the sensor
5352 cropped pixel area by (tx, ty),
5353 where `tx = sensorCrop.top * (sensorCrop.height / activeArraySize.height)`
5354 and `tx = sensorCrop.left * (sensorCrop.width / activeArraySize.width)`. The
5355 (sensorCrop.top, sensorCrop.left) is the coordinate based off the
5356 android.sensor.info.activeArraySize.
5357 2. Scale the width and height of requested cropRegion with scaling factor of
5358 sensorCrop.width/activeArraySize.width and sensorCrop.height/activeArraySize.height
5360 Once this new cropRegion is calculated, the HAL must use this region to crop the image
5361 with regard to the sensor crop size (effective active array size). The HAL still need
5362 follow the general cropping rule for this new cropRegion and effective active
5365 * The HAL must report the cropRegion with regard to android.sensor.info.activeArraySize.
5366 The HAL need convert the new cropRegion generated above w.r.t., full active array size.
5367 The reported cropRegion may be slightly different with the requested cropRegion since
5368 the HAL may adjust the crop region to account for rounding, conversion error, or other
5369 hardware limitations.
5371 HAL2.x uses only (x, y, width)
5377 <entry name="availableFormats" type="int32"
5378 visibility="hidden" deprecated="true" enum="true"
5379 container="array" typedef="imageFormat">
5384 <value optional="true" id="0x20">RAW16
5386 RAW16 is a standard, cross-platform format for raw image
5387 buffers with 16-bit pixels.
5389 Buffers of this format are typically expected to have a
5390 Bayer Color Filter Array (CFA) layout, which is given in
5391 android.sensor.info.colorFilterArrangement. Sensors with
5392 CFAs that are not representable by a format in
5393 android.sensor.info.colorFilterArrangement should not
5396 Buffers of this format will also follow the constraints given for
5397 RAW_OPAQUE buffers, but with relaxed performance constraints.
5399 This format is intended to give users access to the full contents
5400 of the buffers coming directly from the image sensor prior to any
5401 cropping or scaling operations, and all coordinate systems for
5402 metadata used for this format are relative to the size of the
5403 active region of the image sensor before any geometric distortion
5404 correction has been applied (i.e.
5405 android.sensor.info.preCorrectionActiveArraySize). Supported
5406 dimensions for this format are limited to the full dimensions of
5407 the sensor (e.g. either android.sensor.info.pixelArraySize or
5408 android.sensor.info.preCorrectionActiveArraySize will be the
5409 only supported output size).
5411 See android.scaler.availableInputOutputFormatsMap for
5412 the full set of performance guarantees.
5415 <value optional="true" id="0x24">RAW_OPAQUE
5418 {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE}
5419 as referred in public API) is a format for raw image buffers
5420 coming from an image sensor.
5422 The actual structure of buffers of this format is
5423 platform-specific, but must follow several constraints:
5425 1. No image post-processing operations may have been applied to
5426 buffers of this type. These buffers contain raw image data coming
5427 directly from the image sensor.
5428 1. If a buffer of this format is passed to the camera device for
5429 reprocessing, the resulting images will be identical to the images
5430 produced if the buffer had come directly from the sensor and was
5431 processed with the same settings.
5433 The intended use for this format is to allow access to the native
5434 raw format buffers coming directly from the camera sensor without
5435 any additional conversions or decrease in framerate.
5437 See android.scaler.availableInputOutputFormatsMap for the full set of
5438 performance guarantees.
5441 <value optional="true" id="0x32315659">YV12
5442 <notes>YCrCb 4:2:0 Planar</notes>
5444 <value optional="true" id="0x11">YCrCb_420_SP
5447 <value id="0x22">IMPLEMENTATION_DEFINED
5448 <notes>System internal format, not application-accessible</notes>
5450 <value id="0x23">YCbCr_420_888
5451 <notes>Flexible YUV420 Format</notes>
5453 <value id="0x21">BLOB
5454 <notes>JPEG format</notes>
5457 <description>The list of image formats that are supported by this
5458 camera device for output streams.</description>
5460 All camera devices will support JPEG and YUV_420_888 formats.
5462 When set to YUV_420_888, application can access the YUV420 data directly.
5465 These format values are from HAL_PIXEL_FORMAT_* in
5466 system/core/include/system/graphics.h.
5468 When IMPLEMENTATION_DEFINED is used, the platform
5469 gralloc module will select a format based on the usage flags provided
5470 by the camera HAL device and the other endpoint of the stream. It is
5471 usually used by preview and recording streams, where the application doesn't
5472 need access the image data.
5474 YCbCr_420_888 format must be supported by the HAL. When an image stream
5475 needs CPU/application direct access, this format will be used.
5477 The BLOB format must be supported by the HAL. This is used for the JPEG stream.
5479 A RAW_OPAQUE buffer should contain only pixel data. It is strongly
5480 recommended that any information used by the camera device when
5481 processing images is fully expressed by the result metadata
5482 for that image buffer.
5486 <entry name="availableJpegMinDurations" type="int64" visibility="hidden" deprecated="true"
5491 <description>The minimum frame duration that is supported
5492 for each resolution in android.scaler.availableJpegSizes.
5494 <units>Nanoseconds</units>
5495 <range>TODO: Remove property.</range>
5497 This corresponds to the minimum steady-state frame duration when only
5498 that JPEG stream is active and captured in a burst, with all
5499 processing (typically in android.*.mode) set to FAST.
5501 When multiple streams are configured, the minimum
5502 frame duration will be &gt;= max(individual stream min
5503 durations)</details>
5506 <entry name="availableJpegSizes" type="int32" visibility="hidden"
5507 deprecated="true" container="array" typedef="size">
5512 <description>The JPEG resolutions that are supported by this camera device.</description>
5513 <range>TODO: Remove property.</range>
5515 The resolutions are listed as `(width, height)` pairs. All camera devices will support
5516 sensor maximum resolution (defined by android.sensor.info.activeArraySize).
5519 The HAL must include sensor maximum resolution
5520 (defined by android.sensor.info.activeArraySize),
5521 and should include half/quarter of sensor maximum resolution.
5525 <entry name="availableMaxDigitalZoom" type="float" visibility="public"
5527 <description>The maximum ratio between both active area width
5528 and crop region width, and active area height and
5529 crop region height, for android.scaler.cropRegion.
5531 <units>Zoom scale factor</units>
5532 <range>&gt;=1</range>
5534 This represents the maximum amount of zooming possible by
5535 the camera device, or equivalently, the minimum cropping
5538 Crop regions that have a width or height that is smaller
5539 than this ratio allows will be rounded up to the minimum
5540 allowed size by the camera device.
5544 <entry name="availableProcessedMinDurations" type="int64" visibility="hidden" deprecated="true"
5549 <description>For each available processed output size (defined in
5550 android.scaler.availableProcessedSizes), this property lists the
5551 minimum supportable frame duration for that size.
5553 <units>Nanoseconds</units>
5555 This should correspond to the frame duration when only that processed
5556 stream is active, with all processing (typically in android.*.mode)
5559 When multiple streams are configured, the minimum frame duration will
5560 be &gt;= max(individual stream min durations).
5564 <entry name="availableProcessedSizes" type="int32" visibility="hidden"
5565 deprecated="true" container="array" typedef="size">
5570 <description>The resolutions available for use with
5571 processed output streams, such as YV12, NV12, and
5572 platform opaque YUV/RGB streams to the GPU or video
5573 encoders.</description>
5575 The resolutions are listed as `(width, height)` pairs.
5577 For a given use case, the actual maximum supported resolution
5578 may be lower than what is listed here, depending on the destination
5579 Surface for the image data. For example, for recording video,
5580 the video encoder chosen may have a maximum size limit (e.g. 1080p)
5581 smaller than what the camera (e.g. maximum resolution is 3264x2448)
5584 Please reference the documentation for the image data destination to
5585 check if it limits the maximum size for image data.
5588 For FULL capability devices (`android.info.supportedHardwareLevel == FULL`),
5589 the HAL must include all JPEG sizes listed in android.scaler.availableJpegSizes
5590 and each below resolution if it is smaller than or equal to the sensor
5591 maximum resolution (if they are not listed in JPEG sizes already):
5596 * 1080p (1920 x 1080)
5598 For LIMITED capability devices (`android.info.supportedHardwareLevel == LIMITED`),
5599 the HAL only has to list up to the maximum video size supported by the devices.
5603 <entry name="availableRawMinDurations" type="int64" deprecated="true"
5609 For each available raw output size (defined in
5610 android.scaler.availableRawSizes), this property lists the minimum
5611 supportable frame duration for that size.
5613 <units>Nanoseconds</units>
5615 Should correspond to the frame duration when only the raw stream is
5618 When multiple streams are configured, the minimum
5619 frame duration will be &gt;= max(individual stream min
5620 durations)</details>
5623 <entry name="availableRawSizes" type="int32" deprecated="true"
5624 container="array" typedef="size">
5629 <description>The resolutions available for use with raw
5630 sensor output streams, listed as width,
5631 height</description>
5635 <clone entry="android.scaler.cropRegion" kind="controls">
5639 <entry name="availableInputOutputFormatsMap" type="int32" visibility="hidden"
5640 typedef="reprocessFormatsMap">
5641 <description>The mapping of image formats that are supported by this
5642 camera device for input streams, to their corresponding output formats.
5645 All camera devices with at least 1
5646 android.request.maxNumInputStreams will have at least one
5647 available input format.
5649 The camera device will support the following map of formats,
5650 if its dependent capability (android.request.availableCapabilities) is supported:
5652 Input Format | Output Format | Capability
5653 :-------------------------------------------------|:--------------------------------------------------|:----------
5654 {@link android.graphics.ImageFormat#PRIVATE} | {@link android.graphics.ImageFormat#JPEG} | PRIVATE_REPROCESSING
5655 {@link android.graphics.ImageFormat#PRIVATE} | {@link android.graphics.ImageFormat#YUV_420_888} | PRIVATE_REPROCESSING
5656 {@link android.graphics.ImageFormat#YUV_420_888} | {@link android.graphics.ImageFormat#JPEG} | YUV_REPROCESSING
5657 {@link android.graphics.ImageFormat#YUV_420_888} | {@link android.graphics.ImageFormat#YUV_420_888} | YUV_REPROCESSING
5659 PRIVATE refers to a device-internal format that is not directly application-visible. A
5660 PRIVATE input surface can be acquired by {@link android.media.ImageReader#newInstance}
5661 with {@link android.graphics.ImageFormat#PRIVATE} as the format.
5663 For a PRIVATE_REPROCESSING-capable camera device, using the PRIVATE format as either input
5664 or output will never hurt maximum frame rate (i.e. {@link
5665 android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration
5666 getOutputStallDuration(ImageFormat.PRIVATE, size)} is always 0),
5668 Attempting to configure an input stream with output streams not
5669 listed as available in this map is not valid.
5672 For the formats, see `system/core/include/system/graphics.h` for a definition
5673 of the image format enumerations. The PRIVATE format refers to the
5674 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED format. The HAL could determine
5675 the actual format by using the gralloc usage flags.
5676 For ZSL use case in particular, the HAL could choose appropriate format (partially
5677 processed YUV or RAW based format) by checking the format and GRALLOC_USAGE_HW_CAMERA_ZSL.
5678 See camera3.h for more details.
5680 This value is encoded as a variable-size array-of-arrays.
5681 The inner array always contains `[format, length, ...]` where
5682 `...` has `length` elements. An inner array is followed by another
5683 inner array if the total metadata entry size hasn't yet been exceeded.
5685 A code sample to read/write this encoding (with a device that
5686 supports reprocessing IMPLEMENTATION_DEFINED to YUV_420_888, and JPEG,
5687 and reprocessing YUV_420_888 to YUV_420_888 and JPEG):
5690 int32_t* contents = &entry.i32[0];
5691 for (size_t i = 0; i < entry.count; ) {
5692 int32_t format = contents[i++];
5693 int32_t length = contents[i++];
5694 int32_t output_formats[length];
5695 memcpy(&output_formats[0], &contents[i],
5696 length * sizeof(int32_t));
5700 // writing (static example, PRIVATE_REPROCESSING + YUV_REPROCESSING)
5701 int32_t[] contents = {
5702 IMPLEMENTATION_DEFINED, 2, YUV_420_888, BLOB,
5703 YUV_420_888, 2, YUV_420_888, BLOB,
5705 update_camera_metadata_entry(metadata, index, &contents[0],
5706 sizeof(contents)/sizeof(contents[0]), &updated_entry);
5708 If the HAL claims to support any of the capabilities listed in the
5709 above details, then it must also support all the input-output
5710 combinations listed for that capability. It can optionally support
5711 additional formats if it so chooses.
5715 <entry name="availableStreamConfigurations" type="int32" visibility="ndk_public"
5716 enum="true" container="array" typedef="streamConfiguration" hwlevel="legacy">
5722 <value>OUTPUT</value>
5723 <value>INPUT</value>
5725 <description>The available stream configurations that this
5726 camera device supports
5727 (i.e. format, width, height, output/input stream).
5730 The configurations are listed as `(format, width, height, input?)`
5733 For a given use case, the actual maximum supported resolution
5734 may be lower than what is listed here, depending on the destination
5735 Surface for the image data. For example, for recording video,
5736 the video encoder chosen may have a maximum size limit (e.g. 1080p)
5737 smaller than what the camera (e.g. maximum resolution is 3264x2448)
5740 Please reference the documentation for the image data destination to
5741 check if it limits the maximum size for image data.
5743 Not all output formats may be supported in a configuration with
5744 an input stream of a particular format. For more details, see
5745 android.scaler.availableInputOutputFormatsMap.
5747 The following table describes the minimum required output stream
5748 configurations based on the hardware level
5749 (android.info.supportedHardwareLevel):
5751 Format | Size | Hardware Level | Notes
5752 :-------------:|:--------------------------------------------:|:--------------:|:--------------:
5753 JPEG | android.sensor.info.activeArraySize | Any |
5754 JPEG | 1920x1080 (1080p) | Any | if 1080p <= activeArraySize
5755 JPEG | 1280x720 (720) | Any | if 720p <= activeArraySize
5756 JPEG | 640x480 (480p) | Any | if 480p <= activeArraySize
5757 JPEG | 320x240 (240p) | Any | if 240p <= activeArraySize
5758 YUV_420_888 | all output sizes available for JPEG | FULL |
5759 YUV_420_888 | all output sizes available for JPEG, up to the maximum video size | LIMITED |
5760 IMPLEMENTATION_DEFINED | same as YUV_420_888 | Any |
5762 Refer to android.request.availableCapabilities for additional
5763 mandatory stream configurations on a per-capability basis.
5766 It is recommended (but not mandatory) to also include half/quarter
5767 of sensor maximum resolution for JPEG formats (regardless of hardware
5770 (The following is a rewording of the above required table):
5772 For JPEG format, the sizes may be restricted by below conditions:
5774 * The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones
5775 (e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution
5776 (defined by android.sensor.info.activeArraySize) has an aspect ratio other than these,
5777 it does not have to be included in the supported JPEG sizes.
5778 * Some hardware JPEG encoders may have pixel boundary alignment requirements, such as
5779 the dimensions being a multiple of 16.
5781 Therefore, the maximum JPEG size may be smaller than sensor maximum resolution.
5782 However, the largest JPEG size must be as close as possible to the sensor maximum
5783 resolution given above constraints. It is required that after aspect ratio adjustments,
5784 additional size reduction due to other issues must be less than 3% in area. For example,
5785 if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect
5786 ratio 4:3, the JPEG encoder alignment requirement is 16, the maximum JPEG size will be
5789 For FULL capability devices (`android.info.supportedHardwareLevel == FULL`),
5790 the HAL must include all YUV_420_888 sizes that have JPEG sizes listed
5791 here as output streams.
5793 It must also include each below resolution if it is smaller than or
5794 equal to the sensor maximum resolution (for both YUV_420_888 and JPEG
5795 formats), as output streams:
5800 * 1080p (1920 x 1080)
5802 For LIMITED capability devices
5803 (`android.info.supportedHardwareLevel == LIMITED`),
5804 the HAL only has to list up to the maximum video size
5805 supported by the device.
5807 Regardless of hardware level, every output resolution available for
5808 YUV_420_888 must also be available for IMPLEMENTATION_DEFINED.
5810 This supercedes the following fields, which are now deprecated:
5813 * available[Processed,Raw,Jpeg]Sizes
5816 <entry name="availableMinFrameDurations" type="int64" visibility="ndk_public"
5817 container="array" typedef="streamConfigurationDuration" hwlevel="legacy">
5822 <description>This lists the minimum frame duration for each
5823 format/size combination.
5825 <units>(format, width, height, ns) x n</units>
5827 This should correspond to the frame duration when only that
5828 stream is active, with all processing (typically in android.*.mode)
5829 set to either OFF or FAST.
5831 When multiple streams are used in a request, the minimum frame
5832 duration will be max(individual stream min durations).
5834 The minimum frame duration of a stream (of a particular format, size)
5835 is the same regardless of whether the stream is input or output.
5837 See android.sensor.frameDuration and
5838 android.scaler.availableStallDurations for more details about
5839 calculating the max frame rate.
5843 <entry name="availableStallDurations" type="int64" visibility="ndk_public"
5844 container="array" typedef="streamConfigurationDuration" hwlevel="legacy">
5849 <description>This lists the maximum stall duration for each
5850 output format/size combination.
5852 <units>(format, width, height, ns) x n</units>
5854 A stall duration is how much extra time would get added
5855 to the normal minimum frame duration for a repeating request
5856 that has streams with non-zero stall.
5858 For example, consider JPEG captures which have the following
5861 * JPEG streams act like processed YUV streams in requests for which
5862 they are not included; in requests in which they are directly
5863 referenced, they act as JPEG streams. This is because supporting a
5864 JPEG stream requires the underlying YUV data to always be ready for
5865 use by a JPEG encoder, but the encoder will only be used (and impact
5866 frame duration) on requests that actually reference a JPEG stream.
5867 * The JPEG processor can run concurrently to the rest of the camera
5868 pipeline, but cannot process more than 1 capture at a time.
5870 In other words, using a repeating YUV request would result
5871 in a steady frame rate (let's say it's 30 FPS). If a single
5872 JPEG request is submitted periodically, the frame rate will stay
5873 at 30 FPS (as long as we wait for the previous JPEG to return each
5874 time). If we try to submit a repeating YUV + JPEG request, then
5875 the frame rate will drop from 30 FPS.
5877 In general, submitting a new request with a non-0 stall time
5878 stream will _not_ cause a frame rate drop unless there are still
5879 outstanding buffers for that stream from previous requests.
5881 Submitting a repeating request with streams (call this `S`)
5882 is the same as setting the minimum frame duration from
5883 the normal minimum frame duration corresponding to `S`, added with
5884 the maximum stall duration for `S`.
5886 If interleaving requests with and without a stall duration,
5887 a request will stall by the maximum of the remaining times
5888 for each can-stall stream with outstanding buffers.
5890 This means that a stalling request will not have an exposure start
5891 until the stall has completed.
5893 This should correspond to the stall duration when only that stream is
5894 active, with all processing (typically in android.*.mode) set to FAST
5895 or OFF. Setting any of the processing modes to HIGH_QUALITY
5896 effectively results in an indeterminate stall duration for all
5897 streams in a request (the regular stall calculation rules are
5900 The following formats may always have a stall duration:
5902 * {@link android.graphics.ImageFormat#JPEG|AIMAGE_FORMAT_JPEG}
5903 * {@link android.graphics.ImageFormat#RAW_SENSOR|AIMAGE_FORMAT_RAW16}
5905 The following formats will never have a stall duration:
5907 * {@link android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888}
5908 * {@link android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10}
5909 * {@link android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12}
5911 All other formats may or may not have an allowed stall duration on
5912 a per-capability basis; refer to android.request.availableCapabilities
5915 See android.sensor.frameDuration for more information about
5916 calculating the max frame rate (absent stalls).
5919 If possible, it is recommended that all non-JPEG formats
5920 (such as RAW16) should not have a stall duration. RAW10, RAW12, RAW_OPAQUE
5921 and IMPLEMENTATION_DEFINED must not have stall durations.
5925 <entry name="streamConfigurationMap" type="int32" visibility="java_public"
5926 synthetic="true" typedef="streamConfigurationMap"
5928 <description>The available stream configurations that this
5929 camera device supports; also includes the minimum frame durations
5930 and the stall durations for each format/size combination.
5933 All camera devices will support sensor maximum resolution (defined by
5934 android.sensor.info.activeArraySize) for the JPEG format.
5936 For a given use case, the actual maximum supported resolution
5937 may be lower than what is listed here, depending on the destination
5938 Surface for the image data. For example, for recording video,
5939 the video encoder chosen may have a maximum size limit (e.g. 1080p)
5940 smaller than what the camera (e.g. maximum resolution is 3264x2448)
5943 Please reference the documentation for the image data destination to
5944 check if it limits the maximum size for image data.
5946 The following table describes the minimum required output stream
5947 configurations based on the hardware level
5948 (android.info.supportedHardwareLevel):
5950 Format | Size | Hardware Level | Notes
5951 :-------------------------------------------------:|:--------------------------------------------:|:--------------:|:--------------:
5952 {@link android.graphics.ImageFormat#JPEG} | android.sensor.info.activeArraySize (*1) | Any |
5953 {@link android.graphics.ImageFormat#JPEG} | 1920x1080 (1080p) | Any | if 1080p <= activeArraySize
5954 {@link android.graphics.ImageFormat#JPEG} | 1280x720 (720p) | Any | if 720p <= activeArraySize
5955 {@link android.graphics.ImageFormat#JPEG} | 640x480 (480p) | Any | if 480p <= activeArraySize
5956 {@link android.graphics.ImageFormat#JPEG} | 320x240 (240p) | Any | if 240p <= activeArraySize
5957 {@link android.graphics.ImageFormat#YUV_420_888} | all output sizes available for JPEG | FULL |
5958 {@link android.graphics.ImageFormat#YUV_420_888} | all output sizes available for JPEG, up to the maximum video size | LIMITED |
5959 {@link android.graphics.ImageFormat#PRIVATE} | same as YUV_420_888 | Any |
5961 Refer to android.request.availableCapabilities and {@link
5962 android.hardware.camera2.CameraDevice#createCaptureSession} for additional mandatory
5963 stream configurations on a per-capability basis.
5965 *1: For JPEG format, the sizes may be restricted by below conditions:
5967 * The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones
5968 (e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution
5969 (defined by android.sensor.info.activeArraySize) has an aspect ratio other than these,
5970 it does not have to be included in the supported JPEG sizes.
5971 * Some hardware JPEG encoders may have pixel boundary alignment requirements, such as
5972 the dimensions being a multiple of 16.
5973 Therefore, the maximum JPEG size may be smaller than sensor maximum resolution.
5974 However, the largest JPEG size will be as close as possible to the sensor maximum
5975 resolution given above constraints. It is required that after aspect ratio adjustments,
5976 additional size reduction due to other issues must be less than 3% in area. For example,
5977 if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect
5978 ratio 4:3, and the JPEG encoder alignment requirement is 16, the maximum JPEG size will be
5982 Do not set this property directly
5983 (it is synthetic and will not be available at the HAL layer);
5984 set the android.scaler.availableStreamConfigurations instead.
5986 Not all output formats may be supported in a configuration with
5987 an input stream of a particular format. For more details, see
5988 android.scaler.availableInputOutputFormatsMap.
5990 It is recommended (but not mandatory) to also include half/quarter
5991 of sensor maximum resolution for JPEG formats (regardless of hardware
5994 (The following is a rewording of the above required table):
5996 The HAL must include sensor maximum resolution (defined by
5997 android.sensor.info.activeArraySize).
5999 For FULL capability devices (`android.info.supportedHardwareLevel == FULL`),
6000 the HAL must include all YUV_420_888 sizes that have JPEG sizes listed
6001 here as output streams.
6003 It must also include each below resolution if it is smaller than or
6004 equal to the sensor maximum resolution (for both YUV_420_888 and JPEG
6005 formats), as output streams:
6010 * 1080p (1920 x 1080)
6012 For LIMITED capability devices
6013 (`android.info.supportedHardwareLevel == LIMITED`),
6014 the HAL only has to list up to the maximum video size
6015 supported by the device.
6017 Regardless of hardware level, every output resolution available for
6018 YUV_420_888 must also be available for IMPLEMENTATION_DEFINED.
6020 This supercedes the following fields, which are now deprecated:
6023 * available[Processed,Raw,Jpeg]Sizes
6026 <entry name="croppingType" type="byte" visibility="public" enum="true"
6031 The camera device only supports centered crop regions.
6036 The camera device supports arbitrarily chosen crop regions.
6040 <description>The crop type that this camera device supports.</description>
6042 When passing a non-centered crop region (android.scaler.cropRegion) to a camera
6043 device that only supports CENTER_ONLY cropping, the camera device will move the
6044 crop region to the center of the sensor active array (android.sensor.info.activeArraySize)
6045 and keep the crop region width and height unchanged. The camera device will return the
6046 final used crop region in metadata result android.scaler.cropRegion.
6048 Camera devices that support FREEFORM cropping will support any crop region that
6049 is inside of the active array. The camera device will apply the same crop region and
6050 return the final used crop region in capture result metadata android.scaler.cropRegion.
6052 LEGACY capability devices will only support CENTER_ONLY cropping.
6057 <section name="sensor">
6059 <entry name="exposureTime" type="int64" visibility="public" hwlevel="full">
6060 <description>Duration each pixel is exposed to
6061 light.</description>
6062 <units>Nanoseconds</units>
6063 <range>android.sensor.info.exposureTimeRange</range>
6064 <details>If the sensor can't expose this exact duration, it will shorten the
6065 duration exposed to the nearest possible value (rather than expose longer).
6066 The final exposure time used will be available in the output capture result.
6068 This control is only effective if android.control.aeMode or android.control.mode is set to
6069 OFF; otherwise the auto-exposure algorithm will override this value.
6073 <entry name="frameDuration" type="int64" visibility="public" hwlevel="full">
6074 <description>Duration from start of frame exposure to
6075 start of next frame exposure.</description>
6076 <units>Nanoseconds</units>
6077 <range>See android.sensor.info.maxFrameDuration, {@link
6078 android.hardware.camera2.params.StreamConfigurationMap|ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}.
6079 The duration is capped to `max(duration, exposureTime + overhead)`.</range>
6081 The maximum frame rate that can be supported by a camera subsystem is
6082 a function of many factors:
6084 * Requested resolutions of output image streams
6085 * Availability of binning / skipping modes on the imager
6086 * The bandwidth of the imager interface
6087 * The bandwidth of the various ISP processing blocks
6089 Since these factors can vary greatly between different ISPs and
6090 sensors, the camera abstraction tries to represent the bandwidth
6091 restrictions with as simple a model as possible.
6093 The model presented has the following characteristics:
6095 * The image sensor is always configured to output the smallest
6096 resolution possible given the application's requested output stream
6097 sizes. The smallest resolution is defined as being at least as large
6098 as the largest requested output stream size; the camera pipeline must
6099 never digitally upsample sensor data when the crop region covers the
6100 whole sensor. In general, this means that if only small output stream
6101 resolutions are configured, the sensor can provide a higher frame
6103 * Since any request may use any or all the currently configured
6104 output streams, the sensor and ISP must be configured to support
6105 scaling a single capture to all the streams at the same time. This
6106 means the camera pipeline must be ready to produce the largest
6107 requested output size without any delay. Therefore, the overall
6108 frame rate of a given configured stream set is governed only by the
6109 largest requested stream resolution.
6110 * Using more than one output stream in a request does not affect the
6112 * Certain format-streams may need to do additional background processing
6113 before data is consumed/produced by that stream. These processors
6114 can run concurrently to the rest of the camera pipeline, but
6115 cannot process more than 1 capture at a time.
6117 The necessary information for the application, given the model above, is provided via
6119 android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration|ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}.
6120 These are used to determine the maximum frame rate / minimum frame duration that is
6121 possible for a given stream configuration.
6123 Specifically, the application can use the following rules to
6124 determine the minimum frame duration it can request from the camera
6127 1. Let the set of currently configured input/output streams be called `S`.
6128 1. Find the minimum frame durations for each stream in `S`, by looking it up in {@link
6129 android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration|ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}
6130 (with its respective size/format). Let this set of frame durations be called `F`.
6131 1. For any given request `R`, the minimum frame duration allowed for `R` is the maximum
6132 out of all values in `F`. Let the streams used in `R` be called `S_r`.
6134 If none of the streams in `S_r` have a stall time (listed in {@link
6135 android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration|ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}
6136 using its respective size/format), then the frame duration in `F` determines the steady
6137 state frame rate that the application will get if it uses `R` as a repeating request. Let
6138 this special kind of request be called `Rsimple`.
6140 A repeating request `Rsimple` can be _occasionally_ interleaved by a single capture of a
6141 new request `Rstall` (which has at least one in-use stream with a non-0 stall time) and if
6142 `Rstall` has the same minimum frame duration this will not cause a frame rate loss if all
6143 buffers from the previous `Rstall` have already been delivered.
6145 For more details about stalling, see {@link
6146 android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration|ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}.
6148 This control is only effective if android.control.aeMode or android.control.mode is set to
6149 OFF; otherwise the auto-exposure algorithm will override this value.
6152 For more details about stalling, see
6153 android.scaler.availableStallDurations.
6157 <entry name="sensitivity" type="int32" visibility="public" hwlevel="full">
6158 <description>The amount of gain applied to sensor data
6159 before processing.</description>
6160 <units>ISO arithmetic units</units>
6161 <range>android.sensor.info.sensitivityRange</range>
6163 The sensitivity is the standard ISO sensitivity value,
6164 as defined in ISO 12232:2006.
6166 The sensitivity must be within android.sensor.info.sensitivityRange, and
6167 if if it less than android.sensor.maxAnalogSensitivity, the camera device
6168 is guaranteed to use only analog amplification for applying the gain.
6170 If the camera device cannot apply the exact sensitivity
6171 requested, it will reduce the gain to the nearest supported
6172 value. The final sensitivity used will be available in the
6173 output capture result.
6175 This control is only effective if android.control.aeMode or android.control.mode is set to
6176 OFF; otherwise the auto-exposure algorithm will override this value.
6178 <hal_details>ISO 12232:2006 REI method is acceptable.</hal_details>
6183 <namespace name="info">
6184 <entry name="activeArraySize" type="int32" visibility="public"
6185 type_notes="Four ints defining the active pixel rectangle"
6186 container="array" typedef="rectangle" hwlevel="legacy">
6191 The area of the image sensor which corresponds to active pixels after any geometric
6192 distortion correction has been applied.
6194 <units>Pixel coordinates on the image sensor</units>
6196 This is the rectangle representing the size of the active region of the sensor (i.e.
6197 the region that actually receives light from the scene) after any geometric correction
6198 has been applied, and should be treated as the maximum size in pixels of any of the
6199 image output formats aside from the raw formats.
6201 This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
6202 the full pixel array, and the size of the full pixel array is given by
6203 android.sensor.info.pixelArraySize.
6205 The coordinate system for most other keys that list pixel coordinates, including
6206 android.scaler.cropRegion, is defined relative to the active array rectangle given in
6207 this field, with `(0, 0)` being the top-left of this rectangle.
6209 The active array may be smaller than the full pixel array, since the full array may
6210 include black calibration pixels or other inactive regions, and geometric correction
6211 resulting in scaling or cropping may have been applied.
6214 The data representation is `int[4]`, which maps to `(left, top, width, height)`.
6217 This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be
6219 The `(width, height)` must be &lt;= `android.sensor.info.pixelArraySize`.
6223 <entry name="sensitivityRange" type="int32" visibility="public"
6224 type_notes="Range of supported sensitivities"
6225 container="array" typedef="rangeInt"
6230 <description>Range of sensitivities for android.sensor.sensitivity supported by this
6231 camera device.</description>
6232 <range>Min <= 100, Max &gt;= 800</range>
6234 The values are the standard ISO sensitivity values,
6235 as defined in ISO 12232:2006.
6241 <entry name="colorFilterArrangement" type="byte" visibility="public" enum="true"
6249 <notes>Sensor is not Bayer; output has 3 16-bit
6250 values for each pixel, instead of just 1 16-bit value
6251 per pixel.</notes></value>
6253 <description>The arrangement of color filters on sensor;
6254 represents the colors in the top-left 2x2 section of
6255 the sensor, in reading order.</description>
6258 <entry name="exposureTimeRange" type="int64" visibility="public"
6259 type_notes="nanoseconds" container="array" typedef="rangeLong"
6264 <description>The range of image exposure times for android.sensor.exposureTime supported
6265 by this camera device.
6267 <units>Nanoseconds</units>
6268 <range>The minimum exposure time will be less than 100 us. For FULL
6269 capability devices (android.info.supportedHardwareLevel == FULL),
6270 the maximum exposure time will be greater than 100ms.</range>
6271 <hal_details>For FULL capability devices (android.info.supportedHardwareLevel == FULL),
6272 The maximum of the range SHOULD be at least 1 second (1e9), MUST be at least
6277 <entry name="maxFrameDuration" type="int64" visibility="public"
6279 <description>The maximum possible frame duration (minimum frame rate) for
6280 android.sensor.frameDuration that is supported this camera device.</description>
6281 <units>Nanoseconds</units>
6282 <range>For FULL capability devices
6283 (android.info.supportedHardwareLevel == FULL), at least 100ms.
6285 <details>Attempting to use frame durations beyond the maximum will result in the frame
6286 duration being clipped to the maximum. See that control for a full definition of frame
6290 android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration|ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}
6291 for the minimum frame duration values.
6294 For FULL capability devices (android.info.supportedHardwareLevel == FULL),
6295 The maximum of the range SHOULD be at least
6296 1 second (1e9), MUST be at least 100ms (100e6).
6298 android.sensor.info.maxFrameDuration must be greater or
6299 equal to the android.sensor.info.exposureTimeRange max
6300 value (since exposure time overrides frame duration).
6302 Available minimum frame durations for JPEG must be no greater
6303 than that of the YUV_420_888/IMPLEMENTATION_DEFINED
6304 minimum frame durations (for that respective size).
6306 Since JPEG processing is considered offline and can take longer than
6307 a single uncompressed capture, refer to
6308 android.scaler.availableStallDurations
6309 for details about encoding this scenario.
6313 <entry name="physicalSize" type="float" visibility="public"
6314 type_notes="width x height"
6315 container="array" typedef="sizeF" hwlevel="legacy">
6319 <description>The physical dimensions of the full pixel
6320 array.</description>
6321 <units>Millimeters</units>
6322 <details>This is the physical size of the sensor pixel
6323 array defined by android.sensor.info.pixelArraySize.
6325 <hal_details>Needed for FOV calculation for old API</hal_details>
6329 <entry name="pixelArraySize" type="int32" visibility="public"
6330 container="array" typedef="size" hwlevel="legacy">
6334 <description>Dimensions of the full pixel array, possibly
6335 including black calibration pixels.</description>
6336 <units>Pixels</units>
6337 <details>The pixel count of the full pixel array of the image sensor, which covers
6338 android.sensor.info.physicalSize area. This represents the full pixel dimensions of
6339 the raw buffers produced by this sensor.
6341 If a camera device supports raw sensor formats, either this or
6342 android.sensor.info.preCorrectionActiveArraySize is the maximum dimensions for the raw
6343 output formats listed in {@link
6344 android.hardware.camera2.params.StreamConfigurationMap|ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}
6345 (this depends on whether or not the image sensor returns buffers containing pixels that
6346 are not part of the active array region for blacklevel calibration or other purposes).
6348 Some parts of the full pixel array may not receive light from the scene,
6349 or be otherwise inactive. The android.sensor.info.preCorrectionActiveArraySize key
6350 defines the rectangle of active pixels that will be included in processed image
6356 <entry name="whiteLevel" type="int32" visibility="public">
6358 Maximum raw value output by sensor.
6360 <range>&gt; 255 (8-bit output)</range>
6362 This specifies the fully-saturated encoding level for the raw
6363 sample values from the sensor. This is typically caused by the
6364 sensor becoming highly non-linear or clipping. The minimum for
6365 each channel is specified by the offset in the
6366 android.sensor.blackLevelPattern key.
6368 The white level is typically determined either by sensor bit depth
6369 (8-14 bits is expected), or by the point where the sensor response
6370 becomes too non-linear to be useful. The default value for this is
6371 maximum representable value for a 16-bit raw sample (2^16 - 1).
6373 The white level values of captured images may vary for different
6374 capture settings (e.g., android.sensor.sensitivity). This key
6375 represents a coarse approximation for such case. It is recommended
6376 to use android.sensor.dynamicWhiteLevel for captures when supported
6377 by the camera device, which provides more accurate white level values.
6380 The full bit depth of the sensor must be available in the raw data,
6381 so the value for linear sensors should not be significantly lower
6382 than maximum raw value supported, i.e. 2^(sensor bits per pixel).
6386 <entry name="timestampSource" type="byte" visibility="public"
6387 enum="true" hwlevel="legacy">
6391 Timestamps from android.sensor.timestamp are in nanoseconds and monotonic,
6392 but can not be compared to timestamps from other subsystems
6393 (e.g. accelerometer, gyro etc.), or other instances of the same or different
6394 camera devices in the same system. Timestamps between streams and results for
6395 a single camera instance are comparable, and the timestamps for all buffers
6396 and the result metadata generated by a single capture are identical.
6401 Timestamps from android.sensor.timestamp are in the same timebase as
6402 {@link android.os.SystemClock#elapsedRealtimeNanos},
6403 and they can be compared to other timestamps using that base.
6407 <description>The time base source for sensor capture start timestamps.</description>
6409 The timestamps provided for captures are always in nanoseconds and monotonic, but
6410 may not based on a time source that can be compared to other system time sources.
6412 This characteristic defines the source for the timestamps, and therefore whether they
6413 can be compared against other system time sources/timestamps.
6416 For camera devices implement UNKNOWN, the camera framework expects that the timestamp
6417 source to be SYSTEM_TIME_MONOTONIC. For camera devices implement REALTIME, the camera
6418 framework expects that the timestamp source to be SYSTEM_TIME_BOOTTIME. See
6419 system/core/include/utils/Timers.h for the definition of SYSTEM_TIME_MONOTONIC and
6420 SYSTEM_TIME_BOOTTIME. Note that HAL must follow above expectation; otherwise video
6421 recording might suffer unexpected behavior.
6423 Also, camera devices which implement REALTIME must pass the ITS sensor fusion test which
6424 tests the alignment between camera timestamps and gyro sensor timestamps.
6428 <entry name="lensShadingApplied" type="byte" visibility="public" enum="true"
6431 <value>FALSE</value>
6434 <description>Whether the RAW images output from this camera device are subject to
6435 lens shading correction.</description>
6437 If TRUE, all images produced by the camera device in the RAW image formats will
6438 have lens shading correction already applied to it. If FALSE, the images will
6439 not be adjusted for lens shading correction.
6440 See android.request.maxNumOutputRaw for a list of RAW image formats.
6442 This key will be `null` for all devices do not report this information.
6443 Devices with RAW capability will always report this information in this key.
6446 <entry name="preCorrectionActiveArraySize" type="int32" visibility="public"
6447 type_notes="Four ints defining the active pixel rectangle" container="array"
6448 typedef="rectangle" hwlevel="legacy">
6453 The area of the image sensor which corresponds to active pixels prior to the
6454 application of any geometric distortion correction.
6456 <units>Pixel coordinates on the image sensor</units>
6458 This is the rectangle representing the size of the active region of the sensor (i.e.
6459 the region that actually receives light from the scene) before any geometric correction
6460 has been applied, and should be treated as the active region rectangle for any of the
6461 raw formats. All metadata associated with raw processing (e.g. the lens shading
6462 correction map, and radial distortion fields) treats the top, left of this rectangle as
6465 The size of this region determines the maximum field of view and the maximum number of
6466 pixels that an image from this sensor can contain, prior to the application of
6467 geometric distortion correction. The effective maximum pixel dimensions of a
6468 post-distortion-corrected image is given by the android.sensor.info.activeArraySize
6469 field, and the effective maximum field of view for a post-distortion-corrected image
6470 can be calculated by applying the geometric distortion correction fields to this
6471 rectangle, and cropping to the rectangle given in android.sensor.info.activeArraySize.
6473 E.g. to calculate position of a pixel, (x,y), in a processed YUV output image with the
6474 dimensions in android.sensor.info.activeArraySize given the position of a pixel,
6475 (x', y'), in the raw pixel array with dimensions give in
6476 android.sensor.info.pixelArraySize:
6478 1. Choose a pixel (x', y') within the active array region of the raw buffer given in
6479 android.sensor.info.preCorrectionActiveArraySize, otherwise this pixel is considered
6480 to be outside of the FOV, and will not be shown in the processed output image.
6481 1. Apply geometric distortion correction to get the post-distortion pixel coordinate,
6482 (x_i, y_i). When applying geometric correction metadata, note that metadata for raw
6483 buffers is defined relative to the top, left of the
6484 android.sensor.info.preCorrectionActiveArraySize rectangle.
6485 1. If the resulting corrected pixel coordinate is within the region given in
6486 android.sensor.info.activeArraySize, then the position of this pixel in the
6487 processed output image buffer is `(x_i - activeArray.left, y_i - activeArray.top)`,
6488 when the top, left coordinate of that buffer is treated as (0, 0).
6490 Thus, for pixel x',y' = (25, 25) on a sensor where android.sensor.info.pixelArraySize
6491 is (100,100), android.sensor.info.preCorrectionActiveArraySize is (10, 10, 100, 100),
6492 android.sensor.info.activeArraySize is (20, 20, 80, 80), and the geometric distortion
6493 correction doesn't change the pixel coordinate, the resulting pixel selected in
6494 pixel coordinates would be x,y = (25, 25) relative to the top,left of the raw buffer
6495 with dimensions given in android.sensor.info.pixelArraySize, and would be (5, 5)
6496 relative to the top,left of post-processed YUV output buffer with dimensions given in
6497 android.sensor.info.activeArraySize.
6499 The currently supported fields that correct for geometric distortion are:
6501 1. android.lens.radialDistortion.
6503 If all of the geometric distortion fields are no-ops, this rectangle will be the same
6504 as the post-distortion-corrected rectangle given in
6505 android.sensor.info.activeArraySize.
6507 This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
6508 the full pixel array, and the size of the full pixel array is given by
6509 android.sensor.info.pixelArraySize.
6511 The pre-correction active array may be smaller than the full pixel array, since the
6512 full array may include black calibration pixels or other inactive regions.
6515 The data representation is `int[4]`, which maps to `(left, top, width, height)`.
6518 This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be
6520 The `(width, height)` must be &lt;= `android.sensor.info.pixelArraySize`.
6522 If omitted by the HAL implementation, the camera framework will assume that this is
6523 the same as the post-correction active array region given in
6524 android.sensor.info.activeArraySize.
6529 <entry name="referenceIlluminant1" type="byte" visibility="public"
6532 <value id="1">DAYLIGHT</value>
6533 <value id="2">FLUORESCENT</value>
6534 <value id="3">TUNGSTEN
6535 <notes>Incandescent light</notes>
6537 <value id="4">FLASH</value>
6538 <value id="9">FINE_WEATHER</value>
6539 <value id="10">CLOUDY_WEATHER</value>
6540 <value id="11">SHADE</value>
6541 <value id="12">DAYLIGHT_FLUORESCENT
6542 <notes>D 5700 - 7100K</notes>
6544 <value id="13">DAY_WHITE_FLUORESCENT
6545 <notes>N 4600 - 5400K</notes>
6547 <value id="14">COOL_WHITE_FLUORESCENT
6548 <notes>W 3900 - 4500K</notes>
6550 <value id="15">WHITE_FLUORESCENT
6551 <notes>WW 3200 - 3700K</notes>
6553 <value id="17">STANDARD_A</value>
6554 <value id="18">STANDARD_B</value>
6555 <value id="19">STANDARD_C</value>
6556 <value id="20">D55</value>
6557 <value id="21">D65</value>
6558 <value id="22">D75</value>
6559 <value id="23">D50</value>
6560 <value id="24">ISO_STUDIO_TUNGSTEN</value>
6563 The standard reference illuminant used as the scene light source when
6564 calculating the android.sensor.colorTransform1,
6565 android.sensor.calibrationTransform1, and
6566 android.sensor.forwardMatrix1 matrices.
6569 The values in this key correspond to the values defined for the
6570 EXIF LightSource tag. These illuminants are standard light sources
6571 that are often used calibrating camera devices.
6573 If this key is present, then android.sensor.colorTransform1,
6574 android.sensor.calibrationTransform1, and
6575 android.sensor.forwardMatrix1 will also be present.
6577 Some devices may choose to provide a second set of calibration
6578 information for improved quality, including
6579 android.sensor.referenceIlluminant2 and its corresponding matrices.
6582 The first reference illuminant (android.sensor.referenceIlluminant1)
6583 and corresponding matrices must be present to support the RAW capability
6586 When producing raw images with a color profile that has only been
6587 calibrated against a single light source, it is valid to omit
6588 android.sensor.referenceIlluminant2 along with the
6589 android.sensor.colorTransform2, android.sensor.calibrationTransform2,
6590 and android.sensor.forwardMatrix2 matrices.
6592 If only android.sensor.referenceIlluminant1 is included, it should be
6593 chosen so that it is representative of typical scene lighting. In
6594 general, D50 or DAYLIGHT will be chosen for this case.
6596 If both android.sensor.referenceIlluminant1 and
6597 android.sensor.referenceIlluminant2 are included, they should be
6598 chosen to represent the typical range of scene lighting conditions.
6599 In general, low color temperature illuminant such as Standard-A will
6600 be chosen for the first reference illuminant and a higher color
6601 temperature illuminant such as D65 will be chosen for the second
6602 reference illuminant.
6606 <entry name="referenceIlluminant2" type="byte" visibility="public">
6608 The standard reference illuminant used as the scene light source when
6609 calculating the android.sensor.colorTransform2,
6610 android.sensor.calibrationTransform2, and
6611 android.sensor.forwardMatrix2 matrices.
6613 <range>Any value listed in android.sensor.referenceIlluminant1</range>
6615 See android.sensor.referenceIlluminant1 for more details.
6617 If this key is present, then android.sensor.colorTransform2,
6618 android.sensor.calibrationTransform2, and
6619 android.sensor.forwardMatrix2 will also be present.
6623 <entry name="calibrationTransform1" type="rational"
6624 visibility="public" optional="true"
6625 type_notes="3x3 matrix in row-major-order" container="array"
6626 typedef="colorSpaceTransform">
6632 A per-device calibration transform matrix that maps from the
6633 reference sensor colorspace to the actual device sensor colorspace.
6636 This matrix is used to correct for per-device variations in the
6637 sensor colorspace, and is used for processing raw buffer data.
6639 The matrix is expressed as a 3x3 matrix in row-major-order, and
6640 contains a per-device calibration transform that maps colors
6641 from reference sensor color space (i.e. the "golden module"
6642 colorspace) into this camera device's native sensor color
6643 space under the first reference illuminant
6644 (android.sensor.referenceIlluminant1).
6648 <entry name="calibrationTransform2" type="rational"
6649 visibility="public" optional="true"
6650 type_notes="3x3 matrix in row-major-order" container="array"
6651 typedef="colorSpaceTransform">
6657 A per-device calibration transform matrix that maps from the
6658 reference sensor colorspace to the actual device sensor colorspace
6659 (this is the colorspace of the raw buffer data).
6662 This matrix is used to correct for per-device variations in the
6663 sensor colorspace, and is used for processing raw buffer data.
6665 The matrix is expressed as a 3x3 matrix in row-major-order, and
6666 contains a per-device calibration transform that maps colors
6667 from reference sensor color space (i.e. the "golden module"
6668 colorspace) into this camera device's native sensor color
6669 space under the second reference illuminant
6670 (android.sensor.referenceIlluminant2).
6672 This matrix will only be present if the second reference
6673 illuminant is present.
6677 <entry name="colorTransform1" type="rational"
6678 visibility="public" optional="true"
6679 type_notes="3x3 matrix in row-major-order" container="array"
6680 typedef="colorSpaceTransform">
6686 A matrix that transforms color values from CIE XYZ color space to
6687 reference sensor color space.
6690 This matrix is used to convert from the standard CIE XYZ color
6691 space to the reference sensor colorspace, and is used when processing
6694 The matrix is expressed as a 3x3 matrix in row-major-order, and
6695 contains a color transform matrix that maps colors from the CIE
6696 XYZ color space to the reference sensor color space (i.e. the
6697 "golden module" colorspace) under the first reference illuminant
6698 (android.sensor.referenceIlluminant1).
6700 The white points chosen in both the reference sensor color space
6701 and the CIE XYZ colorspace when calculating this transform will
6702 match the standard white point for the first reference illuminant
6703 (i.e. no chromatic adaptation will be applied by this transform).
6707 <entry name="colorTransform2" type="rational"
6708 visibility="public" optional="true"
6709 type_notes="3x3 matrix in row-major-order" container="array"
6710 typedef="colorSpaceTransform">
6716 A matrix that transforms color values from CIE XYZ color space to
6717 reference sensor color space.
6720 This matrix is used to convert from the standard CIE XYZ color
6721 space to the reference sensor colorspace, and is used when processing
6724 The matrix is expressed as a 3x3 matrix in row-major-order, and
6725 contains a color transform matrix that maps colors from the CIE
6726 XYZ color space to the reference sensor color space (i.e. the
6727 "golden module" colorspace) under the second reference illuminant
6728 (android.sensor.referenceIlluminant2).
6730 The white points chosen in both the reference sensor color space
6731 and the CIE XYZ colorspace when calculating this transform will
6732 match the standard white point for the second reference illuminant
6733 (i.e. no chromatic adaptation will be applied by this transform).
6735 This matrix will only be present if the second reference
6736 illuminant is present.
6740 <entry name="forwardMatrix1" type="rational"
6741 visibility="public" optional="true"
6742 type_notes="3x3 matrix in row-major-order" container="array"
6743 typedef="colorSpaceTransform">
6749 A matrix that transforms white balanced camera colors from the reference
6750 sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.
6753 This matrix is used to convert to the standard CIE XYZ colorspace, and
6754 is used when processing raw buffer data.
6756 This matrix is expressed as a 3x3 matrix in row-major-order, and contains
6757 a color transform matrix that maps white balanced colors from the
6758 reference sensor color space to the CIE XYZ color space with a D50 white
6761 Under the first reference illuminant (android.sensor.referenceIlluminant1)
6762 this matrix is chosen so that the standard white point for this reference
6763 illuminant in the reference sensor colorspace is mapped to D50 in the
6768 <entry name="forwardMatrix2" type="rational"
6769 visibility="public" optional="true"
6770 type_notes="3x3 matrix in row-major-order" container="array"
6771 typedef="colorSpaceTransform">
6777 A matrix that transforms white balanced camera colors from the reference
6778 sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.
6781 This matrix is used to convert to the standard CIE XYZ colorspace, and
6782 is used when processing raw buffer data.
6784 This matrix is expressed as a 3x3 matrix in row-major-order, and contains
6785 a color transform matrix that maps white balanced colors from the
6786 reference sensor color space to the CIE XYZ color space with a D50 white
6789 Under the second reference illuminant (android.sensor.referenceIlluminant2)
6790 this matrix is chosen so that the standard white point for this reference
6791 illuminant in the reference sensor colorspace is mapped to D50 in the
6794 This matrix will only be present if the second reference
6795 illuminant is present.
6799 <entry name="baseGainFactor" type="rational"
6801 <description>Gain factor from electrons to raw units when
6802 ISO=100</description>
6805 <entry name="blackLevelPattern" type="int32" visibility="public"
6806 optional="true" type_notes="2x2 raw count block" container="array"
6807 typedef="blackLevelPattern">
6812 A fixed black level offset for each of the color filter arrangement
6813 (CFA) mosaic channels.
6815 <range>&gt;= 0 for each.</range>
6817 This key specifies the zero light value for each of the CFA mosaic
6818 channels in the camera sensor. The maximal value output by the
6819 sensor is represented by the value in android.sensor.info.whiteLevel.
6821 The values are given in the same order as channels listed for the CFA
6822 layout key (see android.sensor.info.colorFilterArrangement), i.e. the
6823 nth value given corresponds to the black level offset for the nth
6824 color channel listed in the CFA.
6826 The black level values of captured images may vary for different
6827 capture settings (e.g., android.sensor.sensitivity). This key
6828 represents a coarse approximation for such case. It is recommended to
6829 use android.sensor.dynamicBlackLevel or use pixels from
6830 android.sensor.opticalBlackRegions directly for captures when
6831 supported by the camera device, which provides more accurate black
6832 level values. For raw capture in particular, it is recommended to use
6833 pixels from android.sensor.opticalBlackRegions to calculate black
6834 level values for each frame.
6837 The values are given in row-column scan order, with the first value
6838 corresponding to the element of the CFA in row=0, column=0.
6842 <entry name="maxAnalogSensitivity" type="int32" visibility="public"
6843 optional="true" hwlevel="full">
6844 <description>Maximum sensitivity that is implemented
6845 purely through analog gain.</description>
6846 <details>For android.sensor.sensitivity values less than or
6847 equal to this, all applied gain must be analog. For
6848 values above this, the gain applied can be a mix of analog and
6853 <entry name="orientation" type="int32" visibility="public"
6855 <description>Clockwise angle through which the output image needs to be rotated to be
6856 upright on the device screen in its native orientation.
6858 <units>Degrees of clockwise rotation; always a multiple of
6860 <range>0, 90, 180, 270</range>
6862 Also defines the direction of rolling shutter readout, which is from top to bottom in
6863 the sensor's coordinate system.
6867 <entry name="profileHueSatMapDimensions" type="int32"
6868 visibility="system" optional="true"
6869 type_notes="Number of samples for hue, saturation, and value"
6875 The number of input samples for each dimension of
6876 android.sensor.profileHueSatMap.
6880 Saturation &gt;= 2,
6884 The number of input samples for the hue, saturation, and value
6885 dimension of android.sensor.profileHueSatMap. The order of the
6886 dimensions given is hue, saturation, value; where hue is the 0th
6893 <clone entry="android.sensor.exposureTime" kind="controls">
6895 <clone entry="android.sensor.frameDuration"
6896 kind="controls"></clone>
6897 <clone entry="android.sensor.sensitivity" kind="controls">
6899 <entry name="timestamp" type="int64" visibility="public"
6901 <description>Time at start of exposure of first
6902 row of the image sensor active array, in nanoseconds.</description>
6903 <units>Nanoseconds</units>
6904 <range>&gt; 0</range>
6905 <details>The timestamps are also included in all image
6906 buffers produced for the same capture, and will be identical
6909 When android.sensor.info.timestampSource `==` UNKNOWN,
6910 the timestamps measure time since an unspecified starting point,
6911 and are monotonically increasing. They can be compared with the
6912 timestamps for other captures from the same camera device, but are
6913 not guaranteed to be comparable to any other time source.
6915 When android.sensor.info.timestampSource `==` REALTIME, the
6916 timestamps measure time in the same timebase as {@link
6917 android.os.SystemClock#elapsedRealtimeNanos}, and they can
6918 be compared to other timestamps from other subsystems that
6919 are using that base.
6921 For reprocessing, the timestamp will match the start of exposure of
6922 the input image, i.e. {@link CaptureResult#SENSOR_TIMESTAMP the
6923 timestamp} in the TotalCaptureResult that was used to create the
6924 reprocess capture request.
6927 All timestamps must be in reference to the kernel's
6928 CLOCK_BOOTTIME monotonic clock, which properly accounts for
6929 time spent asleep. This allows for synchronization with
6930 sensors that continue to operate while the system is
6933 If android.sensor.info.timestampSource `==` REALTIME,
6934 The timestamp must be synchronized with the timestamps from other
6935 sensor subsystems that are using the same timebase.
6937 For reprocessing, the input image's start of exposure can be looked up
6938 with android.sensor.timestamp from the metadata included in the
6943 <entry name="temperature" type="float"
6945 <description>The temperature of the sensor, sampled at the time
6946 exposure began for this frame.
6948 The thermal diode being queried should be inside the sensor PCB, or
6949 somewhere close to it.
6952 <units>Celsius</units>
6953 <range>Optional. This value is missing if no temperature is available.</range>
6956 <entry name="neutralColorPoint" type="rational" visibility="public"
6957 optional="true" container="array">
6962 The estimated camera neutral color in the native sensor colorspace at
6963 the time of capture.
6966 This value gives the neutral color point encoded as an RGB value in the
6967 native sensor color space. The neutral color point indicates the
6968 currently estimated white point of the scene illumination. It can be
6969 used to interpolate between the provided color transforms when
6970 processing raw sensor data.
6972 The order of the values is R, G, B; where R is in the lowest index.
6976 <entry name="noiseProfile" type="double" visibility="public"
6977 optional="true" type_notes="Pairs of noise model coefficients"
6978 container="array" typedef="pairDoubleDouble">
6981 <size>CFA Channels</size>
6984 Noise model coefficients for each CFA mosaic channel.
6987 This key contains two noise model coefficients for each CFA channel
6988 corresponding to the sensor amplification (S) and sensor readout
6989 noise (O). These are given as pairs of coefficients for each channel
6990 in the same order as channels listed for the CFA layout key
6991 (see android.sensor.info.colorFilterArrangement). This is
6992 represented as an array of Pair&lt;Double, Double&gt;, where
6993 the first member of the Pair at index n is the S coefficient and the
6994 second member is the O coefficient for the nth color channel in the CFA.
6996 These coefficients are used in a two parameter noise model to describe
6997 the amount of noise present in the image for each CFA channel. The
6998 noise model used here is:
7002 Where x represents the recorded signal of a CFA channel normalized to
7003 the range [0, 1], and S and O are the noise model coeffiecients for
7006 A more detailed description of the noise model can be found in the
7007 Adobe DNG specification for the NoiseProfile tag.
7010 For a CFA layout of RGGB, the list of coefficients would be given as
7011 an array of doubles S0,O0,S1,O1,..., where S0 and O0 are the coefficients
7012 for the red channel, S1 and O1 are the coefficients for the first green
7017 <entry name="profileHueSatMap" type="float"
7018 visibility="system" optional="true"
7019 type_notes="Mapping for hue, saturation, and value"
7022 <size>hue_samples</size>
7023 <size>saturation_samples</size>
7024 <size>value_samples</size>
7028 A mapping containing a hue shift, saturation scale, and value scale
7032 The hue shift is given in degrees; saturation and value scale factors are
7033 unitless and are between 0 and 1 inclusive
7036 hue_samples, saturation_samples, and value_samples are given in
7037 android.sensor.profileHueSatMapDimensions.
7039 Each entry of this map contains three floats corresponding to the
7040 hue shift, saturation scale, and value scale, respectively; where the
7041 hue shift has the lowest index. The map entries are stored in the key
7042 in nested loop order, with the value divisions in the outer loop, the
7043 hue divisions in the middle loop, and the saturation divisions in the
7044 inner loop. All zero input saturation entries are required to have a
7045 value scale factor of 1.0.
7049 <entry name="profileToneCurve" type="float"
7050 visibility="system" optional="true"
7051 type_notes="Samples defining a spline for a tone-mapping curve"
7054 <size>samples</size>
7058 A list of x,y samples defining a tone-mapping curve for gamma adjustment.
7061 Each sample has an input range of `[0, 1]` and an output range of
7062 `[0, 1]`. The first sample is required to be `(0, 0)`, and the last
7063 sample is required to be `(1, 1)`.
7066 This key contains a default tone curve that can be applied while
7067 processing the image as a starting point for user adjustments.
7068 The curve is specified as a list of value pairs in linear gamma.
7069 The curve is interpolated using a cubic spline.
7073 <entry name="greenSplit" type="float" visibility="public" optional="true">
7075 The worst-case divergence between Bayer green channels.
7081 This value is an estimate of the worst case split between the
7082 Bayer green channels in the red and blue rows in the sensor color
7085 The green split is calculated as follows:
7087 1. A 5x5 pixel (or larger) window W within the active sensor array is
7088 chosen. The term 'pixel' here is taken to mean a group of 4 Bayer
7089 mosaic channels (R, Gr, Gb, B). The location and size of the window
7090 chosen is implementation defined, and should be chosen to provide a
7091 green split estimate that is both representative of the entire image
7092 for this camera sensor, and can be calculated quickly.
7093 1. The arithmetic mean of the green channels from the red
7094 rows (mean_Gr) within W is computed.
7095 1. The arithmetic mean of the green channels from the blue
7096 rows (mean_Gb) within W is computed.
7097 1. The maximum ratio R of the two means is computed as follows:
7098 `R = max((mean_Gr + 1)/(mean_Gb + 1), (mean_Gb + 1)/(mean_Gr + 1))`
7100 The ratio R is the green split divergence reported for this property,
7101 which represents how much the green channels differ in the mosaic
7102 pattern. This value is typically used to determine the treatment of
7103 the green mosaic channels when demosaicing.
7105 The green split value can be roughly interpreted as follows:
7107 * R &lt; 1.03 is a negligible split (&lt;3% divergence).
7108 * 1.20 &lt;= R &gt;= 1.03 will require some software
7109 correction to avoid demosaic errors (3-20% divergence).
7110 * R &gt; 1.20 will require strong software correction to produce
7111 a usuable image (&gt;20% divergence).
7114 The green split given may be a static value based on prior
7115 characterization of the camera sensor using the green split
7116 calculation method given here over a large, representative, sample
7117 set of images. Other methods of calculation that produce equivalent
7118 results, and can be interpreted in the same manner, may be used.
7124 <entry name="testPatternData" type="int32" visibility="public" optional="true" container="array">
7129 A pixel `[R, G_even, G_odd, B]` that supplies the test pattern
7130 when android.sensor.testPatternMode is SOLID_COLOR.
7133 Each color channel is treated as an unsigned 32-bit integer.
7134 The camera device then uses the most significant X bits
7135 that correspond to how many bits are in its Bayer raw sensor
7138 For example, a sensor with RAW10 Bayer output would use the
7139 10 most significant bits from each color channel.
7144 <entry name="testPatternMode" type="int32" visibility="public" optional="true"
7148 <notes>No test pattern mode is used, and the camera
7149 device returns captures from the image sensor.
7151 This is the default if the key is not set.</notes>
7155 Each pixel in `[R, G_even, G_odd, B]` is replaced by its
7156 respective color channel provided in
7157 android.sensor.testPatternData.
7161 android.testPatternData = [0, 0xFFFFFFFF, 0xFFFFFFFF, 0]
7163 All green pixels are 100% green. All red/blue pixels are black.
7165 android.testPatternData = [0xFFFFFFFF, 0, 0xFFFFFFFF, 0]
7167 All red pixels are 100% red. Only the odd green pixels
7168 are 100% green. All blue pixels are 100% black.
7173 All pixel data is replaced with an 8-bar color pattern.
7175 The vertical bars (left-to-right) are as follows:
7186 In general the image would look like the following:
7197 (B = Blue, K = Black)
7199 Each bar should take up 1/8 of the sensor pixel array width.
7200 When this is not possible, the bar size should be rounded
7201 down to the nearest integer and the pattern can repeat
7204 Each bar's height must always take up the full sensor
7207 Each pixel in this test pattern must be set to either
7208 0% intensity or 100% intensity.
7211 <value>COLOR_BARS_FADE_TO_GRAY
7213 The test pattern is similar to COLOR_BARS, except that
7214 each bar should start at its specified color at the top,
7215 and fade to gray at the bottom.
7217 Furthermore each bar is further subdivided into a left and
7218 right half. The left half should have a smooth gradient,
7219 and the right half should have a quantized gradient.
7221 In particular, the right half's should consist of blocks of the
7222 same color for 1/16th active sensor pixel array width.
7224 The least significant bits in the quantized gradient should
7225 be copied from the most significant bits of the smooth gradient.
7227 The height of each bar should always be a multiple of 128.
7228 When this is not the case, the pattern should repeat at the bottom
7234 All pixel data is replaced by a pseudo-random sequence
7235 generated from a PN9 512-bit sequence (typically implemented
7236 in hardware with a linear feedback shift register).
7238 The generator should be reset at the beginning of each frame,
7239 and thus each subsequent raw frame with this test pattern should
7240 be exactly the same as the last.
7243 <value id="256">CUSTOM1
7244 <notes>The first custom test pattern. All custom patterns that are
7245 available only on this camera device are at least this numeric
7248 All of the custom test patterns will be static
7249 (that is the raw image must not vary from frame to frame).
7253 <description>When enabled, the sensor sends a test pattern instead of
7254 doing a real exposure from the camera.
7256 <range>android.sensor.availableTestPatternModes</range>
7258 When a test pattern is enabled, all manual sensor controls specified
7259 by android.sensor.* will be ignored. All other controls should
7262 For example, if manual flash is enabled, flash firing should still
7263 occur (and that the test pattern remain unmodified, since the flash
7264 would not actually affect it).
7269 All test patterns are specified in the Bayer domain.
7271 The HAL may choose to substitute test patterns from the sensor
7272 with test patterns from on-device memory. In that case, it should be
7273 indistinguishable to the ISP whether the data came from the
7274 sensor interconnect bus (such as CSI2) or memory.
7279 <clone entry="android.sensor.testPatternData" kind="controls">
7281 <clone entry="android.sensor.testPatternMode" kind="controls">
7285 <entry name="availableTestPatternModes" type="int32" visibility="public" optional="true"
7286 type_notes="list of enums" container="array">
7290 <description>List of sensor test pattern modes for android.sensor.testPatternMode
7291 supported by this camera device.
7293 <range>Any value listed in android.sensor.testPatternMode</range>
7295 Defaults to OFF, and always includes OFF if defined.
7298 All custom modes must be >= CUSTOM1.
7303 <entry name="rollingShutterSkew" type="int64" visibility="public" hwlevel="limited">
7304 <description>Duration between the start of first row exposure
7305 and the start of last row exposure.</description>
7306 <units>Nanoseconds</units>
7307 <range> &gt;= 0 and &lt;
7308 {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}.</range>
7310 This is the exposure time skew between the first and last
7311 row exposure start times. The first row and the last row are
7312 the first and last rows inside of the
7313 android.sensor.info.activeArraySize.
7315 For typical camera sensors that use rolling shutters, this is also equivalent
7316 to the frame readout time.
7319 The HAL must report `0` if the sensor is using global shutter, where all pixels begin
7320 exposure at the same time.
7326 <entry name="opticalBlackRegions" type="int32" visibility="public" optional="true"
7327 container="array" typedef="rectangle">
7330 <size>num_regions</size>
7332 <description>List of disjoint rectangles indicating the sensor
7333 optically shielded black pixel regions.
7336 In most camera sensors, the active array is surrounded by some
7337 optically shielded pixel areas. By blocking light, these pixels
7338 provides a reliable black reference for black level compensation
7339 in active array region.
7341 This key provides a list of disjoint rectangles specifying the
7342 regions of optically shielded (with metal shield) black pixel
7343 regions if the camera device is capable of reading out these black
7344 pixels in the output raw images. In comparison to the fixed black
7345 level values reported by android.sensor.blackLevelPattern, this key
7346 may provide a more accurate way for the application to calculate
7347 black level of each captured raw images.
7349 When this key is reported, the android.sensor.dynamicBlackLevel and
7350 android.sensor.dynamicWhiteLevel will also be reported.
7353 The data representation is `int[4]`, which maps to `(left, top, width, height)`.
7356 This array contains (xmin, ymin, width, height). The (xmin, ymin)
7357 must be &gt;= (0,0) and &lt;=
7358 android.sensor.info.pixelArraySize. The (width, height) must be
7359 &lt;= android.sensor.info.pixelArraySize. Each region must be
7360 outside the region reported by
7361 android.sensor.info.preCorrectionActiveArraySize.
7363 The HAL must report minimal number of disjoint regions for the
7364 optically shielded back pixel regions. For example, if a region can
7365 be covered by one rectangle, the HAL must not split this region into
7366 multiple rectangles.
7371 <entry name="dynamicBlackLevel" type="float" visibility="public"
7372 optional="true" type_notes="2x2 raw count block" container="array">
7377 A per-frame dynamic black level offset for each of the color filter
7378 arrangement (CFA) mosaic channels.
7380 <range>&gt;= 0 for each.</range>
7382 Camera sensor black levels may vary dramatically for different
7383 capture settings (e.g. android.sensor.sensitivity). The fixed black
7384 level reported by android.sensor.blackLevelPattern may be too
7385 inaccurate to represent the actual value on a per-frame basis. The
7386 camera device internal pipeline relies on reliable black level values
7387 to process the raw images appropriately. To get the best image
7388 quality, the camera device may choose to estimate the per frame black
7389 level values either based on optically shielded black regions
7390 (android.sensor.opticalBlackRegions) or its internal model.
7392 This key reports the camera device estimated per-frame zero light
7393 value for each of the CFA mosaic channels in the camera sensor. The
7394 android.sensor.blackLevelPattern may only represent a coarse
7395 approximation of the actual black level values. This value is the
7396 black level used in camera device internal image processing pipeline
7397 and generally more accurate than the fixed black level values.
7398 However, since they are estimated values by the camera device, they
7399 may not be as accurate as the black level values calculated from the
7400 optical black pixels reported by android.sensor.opticalBlackRegions.
7402 The values are given in the same order as channels listed for the CFA
7403 layout key (see android.sensor.info.colorFilterArrangement), i.e. the
7404 nth value given corresponds to the black level offset for the nth
7405 color channel listed in the CFA.
7407 This key will be available if android.sensor.opticalBlackRegions is available or the
7408 camera device advertises this key via {@link
7409 android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys|ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}.
7412 The values are given in row-column scan order, with the first value
7413 corresponding to the element of the CFA in row=0, column=0.
7417 <entry name="dynamicWhiteLevel" type="int32" visibility="public"
7420 Maximum raw value output by sensor for this frame.
7422 <range> &gt;= 0</range>
7424 Since the android.sensor.blackLevelPattern may change for different
7425 capture settings (e.g., android.sensor.sensitivity), the white
7426 level will change accordingly. This key is similar to
7427 android.sensor.info.whiteLevel, but specifies the camera device
7428 estimated white level for each frame.
7430 This key will be available if android.sensor.opticalBlackRegions is
7431 available or the camera device advertises this key via
7432 {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys|ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}.
7435 The full bit depth of the sensor must be available in the raw data,
7436 so the value for linear sensors should not be significantly lower
7437 than maximum raw value supported, i.e. 2^(sensor bits per pixel).
7443 <entry name="opaqueRawSize" type="int32" visibility="system" container="array">
7448 <description>Size in bytes for all the listed opaque RAW buffer sizes</description>
7449 <range>Must be large enough to fit the opaque RAW of corresponding size produced by
7452 This configurations are listed as `(width, height, size_in_bytes)` tuples.
7453 This is used for sizing the gralloc buffers for opaque RAW buffers.
7454 All RAW_OPAQUE output stream configuration listed in
7455 android.scaler.availableStreamConfigurations will have a corresponding tuple in
7459 This key is added in legacy HAL3.4.
7461 For legacy HAL3.4 or above: devices advertising RAW_OPAQUE format output must list this
7462 key. For legacy HAL3.3 or earlier devices: if RAW_OPAQUE ouput is advertised, camera
7463 framework will derive this key by assuming each pixel takes two bytes and no padding bytes
7469 <section name="shading">
7471 <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
7474 <notes>No lens shading correction is applied.</notes></value>
7476 <notes>Apply lens shading corrections, without slowing
7477 frame rate relative to sensor raw output</notes></value>
7479 <notes>Apply high-quality lens shading correction, at the
7480 cost of possibly reduced frame rate.</notes></value>
7482 <description>Quality of lens shading correction applied
7483 to the image data.</description>
7484 <range>android.shading.availableModes</range>
7486 When set to OFF mode, no lens shading correction will be applied by the
7487 camera device, and an identity lens shading map data will be provided
7488 if `android.statistics.lensShadingMapMode == ON`. For example, for lens
7489 shading map with size of `[ 4, 3 ]`,
7490 the output android.statistics.lensShadingCorrectionMap for this case will be an identity
7493 [ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
7494 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
7495 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
7496 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
7497 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
7498 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ]
7500 When set to other modes, lens shading correction will be applied by the camera
7501 device. Applications can request lens shading map data by setting
7502 android.statistics.lensShadingMapMode to ON, and then the camera device will provide lens
7503 shading map data in android.statistics.lensShadingCorrectionMap; the returned shading map
7504 data will be the one applied by the camera device for this capture request.
7506 The shading map data may depend on the auto-exposure (AE) and AWB statistics, therefore
7507 the reliability of the map data may be affected by the AE and AWB algorithms. When AE and
7508 AWB are in AUTO modes(android.control.aeMode `!=` OFF and android.control.awbMode `!=`
7509 OFF), to get best results, it is recommended that the applications wait for the AE and AWB
7510 to be converged before using the returned shading map data.
7513 <entry name="strength" type="byte">
7514 <description>Control the amount of shading correction
7515 applied to the images</description>
7516 <units>unitless: 1-10; 10 is full shading
7517 compensation</units>
7522 <clone entry="android.shading.mode" kind="controls">
7526 <entry name="availableModes" type="byte" visibility="public"
7527 type_notes="List of enums (android.shading.mode)." container="array"
7528 typedef="enumList" hwlevel="legacy">
7533 List of lens shading modes for android.shading.mode that are supported by this camera device.
7535 <range>Any value listed in android.shading.mode</range>
7537 This list contains lens shading modes that can be set for the camera device.
7538 Camera devices that support the MANUAL_POST_PROCESSING capability will always
7539 list OFF and FAST mode. This includes all FULL level devices.
7540 LEGACY devices will always only support FAST mode.
7543 HAL must support both FAST and HIGH_QUALITY if lens shading correction control is
7544 available on the camera device, but the underlying implementation can be the same for
7545 both modes. That is, if the highest quality implementation on the camera device does not
7546 slow down capture rate, then FAST and HIGH_QUALITY will generate the same output.
7551 <section name="statistics">
7553 <entry name="faceDetectMode" type="byte" visibility="public" enum="true"
7557 <notes>Do not include face detection statistics in capture
7558 results.</notes></value>
7559 <value optional="true">SIMPLE
7560 <notes>Return face rectangle and confidence values only.
7562 <value optional="true">FULL
7563 <notes>Return all face
7566 In this mode, face rectangles, scores, landmarks, and face IDs are all valid.
7569 <description>Operating mode for the face detector
7571 <range>android.statistics.info.availableFaceDetectModes</range>
7572 <details>Whether face detection is enabled, and whether it
7573 should output just the basic fields or the full set of
7576 SIMPLE mode must fill in android.statistics.faceRectangles and
7577 android.statistics.faceScores.
7578 FULL mode must also fill in android.statistics.faceIds, and
7579 android.statistics.faceLandmarks.
7583 <entry name="histogramMode" type="byte" enum="true" typedef="boolean">
7588 <description>Operating mode for histogram
7589 generation</description>
7592 <entry name="sharpnessMapMode" type="byte" enum="true" typedef="boolean">
7597 <description>Operating mode for sharpness map
7598 generation</description>
7601 <entry name="hotPixelMapMode" type="byte" visibility="public" enum="true"
7605 <notes>Hot pixel map production is disabled.
7608 <notes>Hot pixel map production is enabled.
7612 Operating mode for hot pixel map generation.
7614 <range>android.statistics.info.availableHotPixelMapModes</range>
7616 If set to `true`, a hot pixel map is returned in android.statistics.hotPixelMap.
7617 If set to `false`, no hot pixel map will be returned.
7624 <namespace name="info">
7625 <entry name="availableFaceDetectModes" type="byte"
7627 type_notes="List of enums from android.statistics.faceDetectMode"
7634 <description>List of face detection modes for android.statistics.faceDetectMode that are
7635 supported by this camera device.
7637 <range>Any value listed in android.statistics.faceDetectMode</range>
7638 <details>OFF is always supported.
7641 <entry name="histogramBucketCount" type="int32">
7642 <description>Number of histogram buckets
7643 supported</description>
7644 <range>&gt;= 64</range>
7647 <entry name="maxFaceCount" type="int32" visibility="public" hwlevel="legacy">
7648 <description>The maximum number of simultaneously detectable
7649 faces.</description>
7650 <range>0 for cameras without available face detection; otherwise:
7651 `>=4` for LIMITED or FULL hwlevel devices or
7652 `>0` for LEGACY devices.</range>
7655 <entry name="maxHistogramCount" type="int32">
7656 <description>Maximum value possible for a histogram
7657 bucket</description>
7660 <entry name="maxSharpnessMapValue" type="int32">
7661 <description>Maximum value possible for a sharpness map
7662 region.</description>
7665 <entry name="sharpnessMapSize" type="int32"
7666 type_notes="width x height" container="array" typedef="size">
7670 <description>Dimensions of the sharpness
7672 <range>Must be at least 32 x 32</range>
7675 <entry name="availableHotPixelMapModes" type="byte" visibility="public"
7676 type_notes="list of enums" container="array" typedef="boolean">
7681 List of hot pixel map output modes for android.statistics.hotPixelMapMode that are
7682 supported by this camera device.
7684 <range>Any value listed in android.statistics.hotPixelMapMode</range>
7686 If no hotpixel map output is available for this camera device, this will contain only
7689 ON is always supported on devices with the RAW capability.
7694 <entry name="availableLensShadingMapModes" type="byte" visibility="public"
7695 type_notes="list of enums" container="array" typedef="enumList">
7700 List of lens shading map output modes for android.statistics.lensShadingMapMode that
7701 are supported by this camera device.
7703 <range>Any value listed in android.statistics.lensShadingMapMode</range>
7705 If no lens shading map output is available for this camera device, this key will
7708 ON is always supported on devices with the RAW capability.
7709 LEGACY mode devices will always only support OFF.
7715 <clone entry="android.statistics.faceDetectMode"
7716 kind="controls"></clone>
7717 <entry name="faceIds" type="int32" visibility="ndk_public"
7718 container="array" hwlevel="legacy">
7722 <description>List of unique IDs for detected faces.</description>
7724 Each detected face is given a unique ID that is valid for as long as the face is visible
7725 to the camera device. A face that leaves the field of view and later returns may be
7728 Only available if android.statistics.faceDetectMode == FULL</details>
7731 <entry name="faceLandmarks" type="int32" visibility="ndk_public"
7732 type_notes="(leftEyeX, leftEyeY, rightEyeX, rightEyeY, mouthX, mouthY)"
7733 container="array" hwlevel="legacy">
7738 <description>List of landmarks for detected
7739 faces.</description>
7741 The coordinate system is that of android.sensor.info.activeArraySize, with
7742 `(0, 0)` being the top-left pixel of the active array.
7744 Only available if android.statistics.faceDetectMode == FULL</details>
7747 <entry name="faceRectangles" type="int32" visibility="ndk_public"
7748 type_notes="(xmin, ymin, xmax, ymax). (0,0) is top-left of active pixel area"
7749 container="array" typedef="rectangle" hwlevel="legacy">
7754 <description>List of the bounding rectangles for detected
7755 faces.</description>
7757 The coordinate system is that of android.sensor.info.activeArraySize, with
7758 `(0, 0)` being the top-left pixel of the active array.
7760 Only available if android.statistics.faceDetectMode != OFF</details>
7762 The data representation is `int[4]`, which maps to `(left, top, width, height)`.
7766 <entry name="faceScores" type="byte" visibility="ndk_public"
7767 container="array" hwlevel="legacy">
7771 <description>List of the face confidence scores for
7772 detected faces</description>
7773 <range>1-100</range>
7774 <details>Only available if android.statistics.faceDetectMode != OFF.
7777 The value should be meaningful (for example, setting 100 at
7778 all times is illegal).</hal_details>
7781 <entry name="faces" type="int32" visibility="java_public" synthetic="true"
7782 container="array" typedef="face" hwlevel="legacy">
7786 <description>List of the faces detected through camera face detection
7787 in this capture.</description>
7789 Only available if android.statistics.faceDetectMode `!=` OFF.
7792 <entry name="histogram" type="int32"
7793 type_notes="count of pixels for each color channel that fall into each histogram bucket, scaled to be between 0 and maxHistogramCount"
7799 <description>A 3-channel histogram based on the raw
7800 sensor data</description>
7801 <details>The k'th bucket (0-based) covers the input range
7802 (with w = android.sensor.info.whiteLevel) of [ k * w/N,
7803 (k + 1) * w / N ). If only a monochrome sharpness map is
7804 supported, all channels should have the same data</details>
7807 <clone entry="android.statistics.histogramMode"
7808 kind="controls"></clone>
7809 <entry name="sharpnessMap" type="int32"
7810 type_notes="estimated sharpness for each region of the input image. Normalized to be between 0 and maxSharpnessMapValue. Higher values mean sharper (better focused)"
7817 <description>A 3-channel sharpness map, based on the raw
7818 sensor data</description>
7819 <details>If only a monochrome sharpness map is supported,
7820 all channels should have the same data</details>
7823 <clone entry="android.statistics.sharpnessMapMode"
7824 kind="controls"></clone>
7825 <entry name="lensShadingCorrectionMap" type="byte" visibility="java_public"
7826 typedef="lensShadingMap" hwlevel="full">
7827 <description>The shading map is a low-resolution floating-point map
7828 that lists the coefficients used to correct for vignetting, for each
7829 Bayer color channel.</description>
7830 <range>Each gain factor is &gt;= 1</range>
7832 The map provided here is the same map that is used by the camera device to
7833 correct both color shading and vignetting for output non-RAW images.
7835 When there is no lens shading correction applied to RAW
7836 output images (android.sensor.info.lensShadingApplied `==`
7837 false), this map is the complete lens shading correction
7838 map; when there is some lens shading correction applied to
7839 the RAW output image (android.sensor.info.lensShadingApplied
7840 `==` true), this map reports the remaining lens shading
7841 correction map that needs to be applied to get shading
7842 corrected images that match the camera device's output for
7845 For a complete shading correction map, the least shaded
7846 section of the image will have a gain factor of 1; all
7847 other sections will have gains above 1.
7849 When android.colorCorrection.mode = TRANSFORM_MATRIX, the map
7850 will take into account the colorCorrection settings.
7852 The shading map is for the entire active pixel array, and is not
7853 affected by the crop region specified in the request. Each shading map
7854 entry is the value of the shading compensation map over a specific
7855 pixel on the sensor. Specifically, with a (N x M) resolution shading
7856 map, and an active pixel array size (W x H), shading map entry
7857 (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
7858 pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
7859 The map is assumed to be bilinearly interpolated between the sample points.
7861 The channel order is [R, Geven, Godd, B], where Geven is the green
7862 channel for the even rows of a Bayer pattern, and Godd is the odd rows.
7863 The shading map is stored in a fully interleaved format.
7865 The shading map will generally have on the order of 30-40 rows and columns,
7866 and will be smaller than 64x64.
7868 As an example, given a very small map defined as:
7870 width,height = [ 4, 3 ]
7872 [ 1.3, 1.2, 1.15, 1.2, 1.2, 1.2, 1.15, 1.2,
7873 1.1, 1.2, 1.2, 1.2, 1.3, 1.2, 1.3, 1.3,
7874 1.2, 1.2, 1.25, 1.1, 1.1, 1.1, 1.1, 1.0,
7875 1.0, 1.0, 1.0, 1.0, 1.2, 1.3, 1.25, 1.2,
7876 1.3, 1.2, 1.2, 1.3, 1.2, 1.15, 1.1, 1.2,
7877 1.2, 1.1, 1.0, 1.2, 1.3, 1.15, 1.2, 1.3 ]
7879 The low-resolution scaling map images for each channel are
7880 (displayed using nearest-neighbor interpolation):
7882 ![Red lens shading map](android.statistics.lensShadingMap/red_shading.png)
7883 ![Green (even rows) lens shading map](android.statistics.lensShadingMap/green_e_shading.png)
7884 ![Green (odd rows) lens shading map](android.statistics.lensShadingMap/green_o_shading.png)
7885 ![Blue lens shading map](android.statistics.lensShadingMap/blue_shading.png)
7887 As a visualization only, inverting the full-color map to recover an
7888 image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:
7890 ![Image of a uniform white wall (inverse shading map)](android.statistics.lensShadingMap/inv_shading.png)
7893 <entry name="lensShadingMap" type="float" visibility="ndk_public"
7894 type_notes="2D array of float gain factors per channel to correct lens shading"
7895 container="array" hwlevel="full">
7901 <description>The shading map is a low-resolution floating-point map
7902 that lists the coefficients used to correct for vignetting and color shading,
7903 for each Bayer color channel of RAW image data.</description>
7904 <range>Each gain factor is &gt;= 1</range>
7906 The map provided here is the same map that is used by the camera device to
7907 correct both color shading and vignetting for output non-RAW images.
7909 When there is no lens shading correction applied to RAW
7910 output images (android.sensor.info.lensShadingApplied `==`
7911 false), this map is the complete lens shading correction
7912 map; when there is some lens shading correction applied to
7913 the RAW output image (android.sensor.info.lensShadingApplied
7914 `==` true), this map reports the remaining lens shading
7915 correction map that needs to be applied to get shading
7916 corrected images that match the camera device's output for
7919 For a complete shading correction map, the least shaded
7920 section of the image will have a gain factor of 1; all
7921 other sections will have gains above 1.
7923 When android.colorCorrection.mode = TRANSFORM_MATRIX, the map
7924 will take into account the colorCorrection settings.
7926 The shading map is for the entire active pixel array, and is not
7927 affected by the crop region specified in the request. Each shading map
7928 entry is the value of the shading compensation map over a specific
7929 pixel on the sensor. Specifically, with a (N x M) resolution shading
7930 map, and an active pixel array size (W x H), shading map entry
7931 (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
7932 pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
7933 The map is assumed to be bilinearly interpolated between the sample points.
7935 The channel order is [R, Geven, Godd, B], where Geven is the green
7936 channel for the even rows of a Bayer pattern, and Godd is the odd rows.
7937 The shading map is stored in a fully interleaved format, and its size
7938 is provided in the camera static metadata by android.lens.info.shadingMapSize.
7940 The shading map will generally have on the order of 30-40 rows and columns,
7941 and will be smaller than 64x64.
7943 As an example, given a very small map defined as:
7945 android.lens.info.shadingMapSize = [ 4, 3 ]
7946 android.statistics.lensShadingMap =
7947 [ 1.3, 1.2, 1.15, 1.2, 1.2, 1.2, 1.15, 1.2,
7948 1.1, 1.2, 1.2, 1.2, 1.3, 1.2, 1.3, 1.3,
7949 1.2, 1.2, 1.25, 1.1, 1.1, 1.1, 1.1, 1.0,
7950 1.0, 1.0, 1.0, 1.0, 1.2, 1.3, 1.25, 1.2,
7951 1.3, 1.2, 1.2, 1.3, 1.2, 1.15, 1.1, 1.2,
7952 1.2, 1.1, 1.0, 1.2, 1.3, 1.15, 1.2, 1.3 ]
7954 The low-resolution scaling map images for each channel are
7955 (displayed using nearest-neighbor interpolation):
7957 ![Red lens shading map](android.statistics.lensShadingMap/red_shading.png)
7958 ![Green (even rows) lens shading map](android.statistics.lensShadingMap/green_e_shading.png)
7959 ![Green (odd rows) lens shading map](android.statistics.lensShadingMap/green_o_shading.png)
7960 ![Blue lens shading map](android.statistics.lensShadingMap/blue_shading.png)
7962 As a visualization only, inverting the full-color map to recover an
7963 image of a gray wall (using bicubic interpolation for visual quality)
7964 as captured by the sensor gives:
7966 ![Image of a uniform white wall (inverse shading map)](android.statistics.lensShadingMap/inv_shading.png)
7968 Note that the RAW image data might be subject to lens shading
7969 correction not reported on this map. Query
7970 android.sensor.info.lensShadingApplied to see if RAW image data has subject
7971 to lens shading correction. If android.sensor.info.lensShadingApplied
7972 is TRUE, the RAW image data is subject to partial or full lens shading
7973 correction. In the case full lens shading correction is applied to RAW
7974 images, the gain factor map reported in this key will contain all 1.0 gains.
7975 In other words, the map reported in this key is the remaining lens shading
7976 that needs to be applied on the RAW image to get images without lens shading
7977 artifacts. See android.request.maxNumOutputRaw for a list of RAW image
7981 The lens shading map calculation may depend on exposure and white balance statistics.
7982 When AE and AWB are in AUTO modes
7983 (android.control.aeMode `!=` OFF and android.control.awbMode `!=` OFF), the HAL
7984 may have all the information it need to generate most accurate lens shading map. When
7985 AE or AWB are in manual mode
7986 (android.control.aeMode `==` OFF or android.control.awbMode `==` OFF), the shading map
7987 may be adversely impacted by manual exposure or white balance parameters. To avoid
7988 generating unreliable shading map data, the HAL may choose to lock the shading map with
7989 the latest known good map generated when the AE and AWB are in AUTO modes.
7992 <entry name="predictedColorGains" type="float"
7996 type_notes="A 1D array of floats for 4 color channel gains"
8001 <description>The best-fit color channel gains calculated
8002 by the camera device's statistics units for the current output frame.
8005 This may be different than the gains used for this frame,
8006 since statistics processing on data from a new frame
8007 typically completes after the transform has already been
8008 applied to that frame.
8010 The 4 channel gains are defined in Bayer domain,
8011 see android.colorCorrection.gains for details.
8013 This value should always be calculated by the auto-white balance (AWB) block,
8014 regardless of the android.control.* current values.
8017 <entry name="predictedColorTransform" type="rational"
8021 type_notes="3x3 rational matrix in row-major order"
8027 <description>The best-fit color transform matrix estimate
8028 calculated by the camera device's statistics units for the current
8029 output frame.</description>
8030 <details>The camera device will provide the estimate from its
8031 statistics unit on the white balance transforms to use
8032 for the next frame. These are the values the camera device believes
8033 are the best fit for the current output frame. This may
8034 be different than the transform used for this frame, since
8035 statistics processing on data from a new frame typically
8036 completes after the transform has already been applied to
8039 These estimates must be provided for all frames, even if
8040 capture settings and color transforms are set by the application.
8042 This value should always be calculated by the auto-white balance (AWB) block,
8043 regardless of the android.control.* current values.
8046 <entry name="sceneFlicker" type="byte" visibility="public" enum="true"
8050 <notes>The camera device does not detect any flickering illumination
8051 in the current scene.</notes></value>
8053 <notes>The camera device detects illumination flickering at 50Hz
8054 in the current scene.</notes></value>
8056 <notes>The camera device detects illumination flickering at 60Hz
8057 in the current scene.</notes></value>
8059 <description>The camera device estimated scene illumination lighting
8060 frequency.</description>
8062 Many light sources, such as most fluorescent lights, flicker at a rate
8063 that depends on the local utility power standards. This flicker must be
8064 accounted for by auto-exposure routines to avoid artifacts in captured images.
8065 The camera device uses this entry to tell the application what the scene
8066 illuminant frequency is.
8068 When manual exposure control is enabled
8069 (`android.control.aeMode == OFF` or `android.control.mode ==
8070 OFF`), the android.control.aeAntibandingMode doesn't perform
8071 antibanding, and the application can ensure it selects
8072 exposure times that do not cause banding issues by looking
8073 into this metadata field. See
8074 android.control.aeAntibandingMode for more details.
8076 Reports NONE if there doesn't appear to be flickering illumination.
8079 <clone entry="android.statistics.hotPixelMapMode" kind="controls">
8081 <entry name="hotPixelMap" type="int32" visibility="public"
8082 type_notes="list of coordinates based on android.sensor.pixelArraySize"
8083 container="array" typedef="point">
8089 List of `(x, y)` coordinates of hot/defective pixels on the sensor.
8092 n <= number of pixels on the sensor.
8093 The `(x, y)` coordinates must be bounded by
8094 android.sensor.info.pixelArraySize.
8097 A coordinate `(x, y)` must lie between `(0, 0)`, and
8098 `(width - 1, height - 1)` (inclusive), which are the top-left and
8099 bottom-right of the pixel array, respectively. The width and
8100 height dimensions are given in android.sensor.info.pixelArraySize.
8101 This may include hot pixels that lie outside of the active array
8102 bounds given by android.sensor.info.activeArraySize.
8105 A hotpixel map contains the coordinates of pixels on the camera
8106 sensor that do report valid values (usually due to defects in
8107 the camera sensor). This includes pixels that are stuck at certain
8108 values, or have a response that does not accuractly encode the
8109 incoming light from the scene.
8111 To avoid performance issues, there should be significantly fewer hot
8112 pixels than actual pixels on the camera sensor.
8119 <entry name="lensShadingMapMode" type="byte" visibility="public" enum="true" hwlevel="full">
8122 <notes>Do not include a lens shading map in the capture result.</notes></value>
8124 <notes>Include a lens shading map in the capture result.</notes></value>
8126 <description>Whether the camera device will output the lens
8127 shading map in output result metadata.</description>
8128 <range>android.statistics.info.availableLensShadingMapModes</range>
8129 <details>When set to ON,
8130 android.statistics.lensShadingMap will be provided in
8131 the output result metadata.
8133 ON is always supported on devices with the RAW capability.
8139 <clone entry="android.statistics.lensShadingMapMode" kind="controls">
8143 <section name="tonemap">
8145 <entry name="curveBlue" type="float" visibility="ndk_public"
8146 type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints."
8147 container="array" hwlevel="full">
8152 <description>Tonemapping / contrast / gamma curve for the blue
8153 channel, to use when android.tonemap.mode is
8154 CONTRAST_CURVE.</description>
8155 <details>See android.tonemap.curveRed for more details.</details>
8157 <entry name="curveGreen" type="float" visibility="ndk_public"
8158 type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints."
8159 container="array" hwlevel="full">
8164 <description>Tonemapping / contrast / gamma curve for the green
8165 channel, to use when android.tonemap.mode is
8166 CONTRAST_CURVE.</description>
8167 <details>See android.tonemap.curveRed for more details.</details>
8169 <entry name="curveRed" type="float" visibility="ndk_public"
8170 type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints."
8171 container="array" hwlevel="full">
8176 <description>Tonemapping / contrast / gamma curve for the red
8177 channel, to use when android.tonemap.mode is
8178 CONTRAST_CURVE.</description>
8179 <range>0-1 on both input and output coordinates, normalized
8180 as a floating-point value such that 0 == black and 1 == white.
8183 Each channel's curve is defined by an array of control points:
8185 android.tonemap.curveRed =
8186 [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ]
8187 2 <= N <= android.tonemap.maxCurvePoints
8189 These are sorted in order of increasing `Pin`; it is
8190 required that input values 0.0 and 1.0 are included in the list to
8191 define a complete mapping. For input values between control points,
8192 the camera device must linearly interpolate between the control
8195 Each curve can have an independent number of points, and the number
8196 of points can be less than max (that is, the request doesn't have to
8197 always provide a curve with number of points equivalent to
8198 android.tonemap.maxCurvePoints).
8200 A few examples, and their corresponding graphical mappings; these
8201 only specify the red channel and the precision is limited to 4
8202 digits, for conciseness.
8206 android.tonemap.curveRed = [ 0, 0, 1.0, 1.0 ]
8208 ![Linear mapping curve](android.tonemap.curveRed/linear_tonemap.png)
8212 android.tonemap.curveRed = [ 0, 1.0, 1.0, 0 ]
8214 ![Inverting mapping curve](android.tonemap.curveRed/inverse_tonemap.png)
8216 Gamma 1/2.2 mapping, with 16 control points:
8218 android.tonemap.curveRed = [
8219 0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812,
8220 0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072,
8221 0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685,
8222 0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ]
8224 ![Gamma = 1/2.2 tonemapping curve](android.tonemap.curveRed/gamma_tonemap.png)
8226 Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:
8228 android.tonemap.curveRed = [
8229 0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845,
8230 0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130,
8231 0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721,
8232 0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ]
8234 ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png)
8237 For good quality of mapping, at least 128 control points are
8240 A typical use case of this would be a gamma-1/2.2 curve, with as many
8241 control points used as are available.
8244 <entry name="curve" type="float" visibility="java_public" synthetic="true"
8245 typedef="tonemapCurve"
8247 <description>Tonemapping / contrast / gamma curve to use when android.tonemap.mode
8248 is CONTRAST_CURVE.</description>
8250 The tonemapCurve consist of three curves for each of red, green, and blue
8251 channels respectively. The following example uses the red channel as an
8252 example. The same logic applies to green and blue channel.
8253 Each channel's curve is defined by an array of control points:
8256 [ P0(in, out), P1(in, out), P2(in, out), P3(in, out), ..., PN(in, out) ]
8257 2 <= N <= android.tonemap.maxCurvePoints
8259 These are sorted in order of increasing `Pin`; it is always
8260 guaranteed that input values 0.0 and 1.0 are included in the list to
8261 define a complete mapping. For input values between control points,
8262 the camera device must linearly interpolate between the control
8265 Each curve can have an independent number of points, and the number
8266 of points can be less than max (that is, the request doesn't have to
8267 always provide a curve with number of points equivalent to
8268 android.tonemap.maxCurvePoints).
8270 A few examples, and their corresponding graphical mappings; these
8271 only specify the red channel and the precision is limited to 4
8272 digits, for conciseness.
8276 curveRed = [ (0, 0), (1.0, 1.0) ]
8278 ![Linear mapping curve](android.tonemap.curveRed/linear_tonemap.png)
8282 curveRed = [ (0, 1.0), (1.0, 0) ]
8284 ![Inverting mapping curve](android.tonemap.curveRed/inverse_tonemap.png)
8286 Gamma 1/2.2 mapping, with 16 control points:
8289 (0.0000, 0.0000), (0.0667, 0.2920), (0.1333, 0.4002), (0.2000, 0.4812),
8290 (0.2667, 0.5484), (0.3333, 0.6069), (0.4000, 0.6594), (0.4667, 0.7072),
8291 (0.5333, 0.7515), (0.6000, 0.7928), (0.6667, 0.8317), (0.7333, 0.8685),
8292 (0.8000, 0.9035), (0.8667, 0.9370), (0.9333, 0.9691), (1.0000, 1.0000) ]
8294 ![Gamma = 1/2.2 tonemapping curve](android.tonemap.curveRed/gamma_tonemap.png)
8296 Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:
8299 (0.0000, 0.0000), (0.0667, 0.2864), (0.1333, 0.4007), (0.2000, 0.4845),
8300 (0.2667, 0.5532), (0.3333, 0.6125), (0.4000, 0.6652), (0.4667, 0.7130),
8301 (0.5333, 0.7569), (0.6000, 0.7977), (0.6667, 0.8360), (0.7333, 0.8721),
8302 (0.8000, 0.9063), (0.8667, 0.9389), (0.9333, 0.9701), (1.0000, 1.0000) ]
8304 ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png)
8307 This entry is created by the framework from the curveRed, curveGreen and
8311 <entry name="mode" type="byte" visibility="public" enum="true"
8314 <value>CONTRAST_CURVE
8315 <notes>Use the tone mapping curve specified in
8316 the android.tonemap.curve* entries.
8318 All color enhancement and tonemapping must be disabled, except
8319 for applying the tonemapping curve specified by
8320 android.tonemap.curve.
8322 Must not slow down frame rate relative to raw
8328 Advanced gamma mapping and color enhancement may be applied, without
8329 reducing frame rate compared to raw sensor output.
8334 High-quality gamma mapping and color enhancement will be applied, at
8335 the cost of possibly reduced frame rate compared to raw sensor output.
8340 Use the gamma value specified in android.tonemap.gamma to peform
8343 All color enhancement and tonemapping must be disabled, except
8344 for applying the tonemapping curve specified by android.tonemap.gamma.
8346 Must not slow down frame rate relative to raw sensor output.
8351 Use the preset tonemapping curve specified in
8352 android.tonemap.presetCurve to peform tonemapping.
8354 All color enhancement and tonemapping must be disabled, except
8355 for applying the tonemapping curve specified by
8356 android.tonemap.presetCurve.
8358 Must not slow down frame rate relative to raw sensor output.
8362 <description>High-level global contrast/gamma/tonemapping control.
8364 <range>android.tonemap.availableToneMapModes</range>
8366 When switching to an application-defined contrast curve by setting
8367 android.tonemap.mode to CONTRAST_CURVE, the curve is defined
8368 per-channel with a set of `(in, out)` points that specify the
8369 mapping from input high-bit-depth pixel value to the output
8370 low-bit-depth value. Since the actual pixel ranges of both input
8371 and output may change depending on the camera pipeline, the values
8372 are specified by normalized floating-point numbers.
8374 More-complex color mapping operations such as 3D color look-up
8375 tables, selective chroma enhancement, or other non-linear color
8376 transforms will be disabled when android.tonemap.mode is
8379 When using either FAST or HIGH_QUALITY, the camera device will
8380 emit its own tonemap curve in android.tonemap.curve.
8381 These values are always available, and as close as possible to the
8382 actually used nonlinear/nonglobal transforms.
8384 If a request is sent with CONTRAST_CURVE with the camera device's
8385 provided curve in FAST or HIGH_QUALITY, the image's tonemap will be
8386 roughly the same.</details>
8390 <entry name="maxCurvePoints" type="int32" visibility="public"
8392 <description>Maximum number of supported points in the
8393 tonemap curve that can be used for android.tonemap.curve.
8396 If the actual number of points provided by the application (in android.tonemap.curve*) is
8397 less than this maximum, the camera device will resample the curve to its internal
8398 representation, using linear interpolation.
8400 The output curves in the result metadata may have a different number
8401 of points than the input curves, and will represent the actual
8402 hardware curves used as closely as possible when linearly interpolated.
8405 This value must be at least 64. This should be at least 128.
8408 <entry name="availableToneMapModes" type="byte" visibility="public"
8409 type_notes="list of enums" container="array" typedef="enumList" hwlevel="full">
8414 List of tonemapping modes for android.tonemap.mode that are supported by this camera
8417 <range>Any value listed in android.tonemap.mode</range>
8419 Camera devices that support the MANUAL_POST_PROCESSING capability will always contain
8420 at least one of below mode combinations:
8422 * CONTRAST_CURVE, FAST and HIGH_QUALITY
8423 * GAMMA_VALUE, PRESET_CURVE, FAST and HIGH_QUALITY
8425 This includes all FULL level devices.
8428 HAL must support both FAST and HIGH_QUALITY if automatic tonemap control is available
8429 on the camera device, but the underlying implementation can be the same for both modes.
8430 That is, if the highest quality implementation on the camera device does not slow down
8431 capture rate, then FAST and HIGH_QUALITY will generate the same output.
8436 <clone entry="android.tonemap.curveBlue" kind="controls">
8438 <clone entry="android.tonemap.curveGreen" kind="controls">
8440 <clone entry="android.tonemap.curveRed" kind="controls">
8442 <clone entry="android.tonemap.curve" kind="controls">
8444 <clone entry="android.tonemap.mode" kind="controls">
8448 <entry name="gamma" type="float" visibility="public">
8449 <description> Tonemapping curve to use when android.tonemap.mode is
8453 The tonemap curve will be defined the following formula:
8454 * OUT = pow(IN, 1.0 / gamma)
8455 where IN and OUT is the input pixel value scaled to range [0.0, 1.0],
8456 pow is the power function and gamma is the gamma value specified by this
8459 The same curve will be applied to all color channels. The camera device
8460 may clip the input gamma value to its supported range. The actual applied
8461 value will be returned in capture result.
8463 The valid range of gamma value varies on different devices, but values
8464 within [1.0, 5.0] are guaranteed not to be clipped.
8467 <entry name="presetCurve" type="byte" visibility="public" enum="true">
8470 <notes>Tonemapping curve is defined by sRGB</notes>
8473 <notes>Tonemapping curve is defined by ITU-R BT.709</notes>
8476 <description> Tonemapping curve to use when android.tonemap.mode is
8480 The tonemap curve will be defined by specified standard.
8482 sRGB (approximated by 16 control points):
8484 ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png)
8486 Rec. 709 (approximated by 16 control points):
8488 ![Rec. 709 tonemapping curve](android.tonemap.curveRed/rec709_tonemap.png)
8490 Note that above figures show a 16 control points approximation of preset
8491 curves. Camera devices may apply a different approximation to the curve.
8496 <clone entry="android.tonemap.gamma" kind="controls">
8498 <clone entry="android.tonemap.presetCurve" kind="controls">
8502 <section name="led">
8504 <entry name="transmit" type="byte" visibility="hidden" optional="true"
8505 enum="true" typedef="boolean">
8510 <description>This LED is nominally used to indicate to the user
8511 that the camera is powered on and may be streaming images back to the
8512 Application Processor. In certain rare circumstances, the OS may
8513 disable this when video is processed locally and not transmitted to
8514 any untrusted applications.
8516 In particular, the LED *must* always be on when the data could be
8517 transmitted off the device. The LED *should* always be on whenever
8518 data is stored locally on the device.
8520 The LED *may* be off if a trusted application is using the data that
8521 doesn't violate the above rules.
8526 <clone entry="android.led.transmit" kind="controls"></clone>
8529 <entry name="availableLeds" type="byte" visibility="hidden" optional="true"
8537 <notes>android.led.transmit control is used.</notes>
8540 <description>A list of camera LEDs that are available on this system.
8545 <section name="info">
8547 <entry name="supportedHardwareLevel" type="byte" visibility="public"
8548 enum="true" hwlevel="legacy">
8553 This camera device does not have enough capabilities to qualify as a `FULL` device or
8556 Only the stream configurations listed in the `LEGACY` and `LIMITED` tables in the
8557 {@link android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession
8558 createCaptureSession} documentation are guaranteed to be supported.
8560 All `LIMITED` devices support the `BACKWARDS_COMPATIBLE` capability, indicating basic
8561 support for color image capture. The only exception is that the device may
8562 alternatively support only the `DEPTH_OUTPUT` capability, if it can only output depth
8563 measurements and not color images.
8565 `LIMITED` devices and above require the use of android.control.aePrecaptureTrigger
8566 to lock exposure metering (and calculate flash power, for cameras with flash) before
8567 capturing a high-quality still image.
8569 A `LIMITED` device that only lists the `BACKWARDS_COMPATIBLE` capability is only
8570 required to support full-automatic operation and post-processing (`OFF` is not
8571 supported for android.control.aeMode, android.control.afMode, or
8572 android.control.awbMode)
8574 Additional capabilities may optionally be supported by a `LIMITED`-level device, and
8575 can be checked for in android.request.availableCapabilities.
8581 This camera device is capable of supporting advanced imaging applications.
8583 The stream configurations listed in the `FULL`, `LEGACY` and `LIMITED` tables in the
8584 {@link android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession
8585 createCaptureSession} documentation are guaranteed to be supported.
8587 A `FULL` device will support below capabilities:
8589 * `BURST_CAPTURE` capability (android.request.availableCapabilities contains
8591 * Per frame control (android.sync.maxLatency `==` PER_FRAME_CONTROL)
8592 * Manual sensor control (android.request.availableCapabilities contains `MANUAL_SENSOR`)
8593 * Manual post-processing control (android.request.availableCapabilities contains
8594 `MANUAL_POST_PROCESSING`)
8595 * The required exposure time range defined in android.sensor.info.exposureTimeRange
8596 * The required maxFrameDuration defined in android.sensor.info.maxFrameDuration
8599 Pre-API level 23, FULL devices also supported arbitrary cropping region
8600 (android.scaler.croppingType `== FREEFORM`); this requirement was relaxed in API level
8601 23, and `FULL` devices may only support `CENTERED` cropping.
8607 This camera device is running in backward compatibility mode.
8609 Only the stream configurations listed in the `LEGACY` table in the {@link
8610 android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession
8611 createCaptureSession} documentation are supported.
8613 A `LEGACY` device does not support per-frame control, manual sensor control, manual
8614 post-processing, arbitrary cropping regions, and has relaxed performance constraints.
8615 No additional capabilities beyond `BACKWARD_COMPATIBLE` will ever be listed by a
8616 `LEGACY` device in android.request.availableCapabilities.
8618 In addition, the android.control.aePrecaptureTrigger is not functional on `LEGACY`
8619 devices. Instead, every request that includes a JPEG-format output target is treated
8620 as triggering a still capture, internally executing a precapture trigger. This may
8621 fire the flash for flash power metering during precapture, and then fire the flash
8622 for the final capture, if a flash is available on the device and the AE mode is set to
8629 This camera device is capable of YUV reprocessing and RAW data capture, in addition to
8630 FULL-level capabilities.
8632 The stream configurations listed in the `LEVEL_3`, `RAW`, `FULL`, `LEGACY` and
8633 `LIMITED` tables in the {@link
8634 android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession
8635 createCaptureSession} documentation are guaranteed to be supported.
8637 The following additional capabilities are guaranteed to be supported:
8639 * `YUV_REPROCESSING` capability (android.request.availableCapabilities contains
8641 * `RAW` capability (android.request.availableCapabilities contains
8647 Generally classifies the overall set of the camera device functionality.
8650 The supported hardware level is a high-level description of the camera device's
8651 capabilities, summarizing several capabilities into one field. Each level adds additional
8652 features to the previous one, and is always a strict superset of the previous level.
8653 The ordering is `LEGACY < LIMITED < FULL < LEVEL_3`.
8655 Starting from `LEVEL_3`, the level enumerations are guaranteed to be in increasing
8656 numerical value as well. To check if a given device is at least at a given hardware level,
8657 the following code snippet can be used:
8659 // Returns true if the device supports the required hardware level, or better.
8660 boolean isHardwareLevelSupported(CameraCharacteristics c, int requiredLevel) {
8661 int deviceLevel = c.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
8662 if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
8663 return requiredLevel == deviceLevel;
8665 // deviceLevel is not LEGACY, can use numerical sort
8666 return requiredLevel <= deviceLevel;
8669 At a high level, the levels are:
8671 * `LEGACY` devices operate in a backwards-compatibility mode for older
8672 Android devices, and have very limited capabilities.
8673 * `LIMITED` devices represent the
8674 baseline feature set, and may also include additional capabilities that are
8676 * `FULL` devices additionally support per-frame manual control of sensor, flash, lens and
8677 post-processing settings, and image capture at a high rate.
8678 * `LEVEL_3` devices additionally support YUV reprocessing and RAW image capture, along
8679 with additional output stream configurations.
8681 See the individual level enums for full descriptions of the supported capabilities. The
8682 android.request.availableCapabilities entry describes the device's capabilities at a
8683 finer-grain level, if needed. In addition, many controls have their available settings or
8684 ranges defined in individual entries from {@link
8685 android.hardware.camera2.CameraCharacteristics|ACameraManager_getCameraCharacteristics}.
8687 Some features are not part of any particular hardware level or capability and must be
8688 queried separately. These include:
8690 * Calibrated timestamps (android.sensor.info.timestampSource `==` REALTIME)
8691 * Precision lens control (android.lens.info.focusDistanceCalibration `==` CALIBRATED)
8692 * Face detection (android.statistics.info.availableFaceDetectModes)
8693 * Optical or electrical image stabilization
8694 (android.lens.info.availableOpticalStabilization,
8695 android.control.availableVideoStabilizationModes)
8699 A camera HALv3 device can implement one of three possible operational modes; LIMITED,
8702 FULL support or better is expected from new higher-end devices. Limited
8703 mode has hardware requirements roughly in line with those for a camera HAL device v1
8704 implementation, and is expected from older or inexpensive devices. Each level is a strict
8705 superset of the previous level, and they share the same essential operational flow.
8707 For full details refer to "S3. Operational Modes" in camera3.h
8709 Camera HAL3+ must not implement LEGACY mode. It is there for backwards compatibility in
8710 the `android.hardware.camera2` user-facing API only on legacy HALv1 devices, and is
8711 implemented by the camera framework code.
8716 <section name="blackLevel">
8718 <entry name="lock" type="byte" visibility="public" enum="true"
8719 typedef="boolean" hwlevel="full">
8724 <description> Whether black-level compensation is locked
8725 to its current values, or is free to vary.</description>
8726 <details>When set to `true` (ON), the values used for black-level
8727 compensation will not change until the lock is set to
8730 Since changes to certain capture parameters (such as
8731 exposure time) may require resetting of black level
8732 compensation, the camera device must report whether setting
8733 the black level lock was successful in the output result
8736 For example, if a sequence of requests is as follows:
8738 * Request 1: Exposure = 10ms, Black level lock = OFF
8739 * Request 2: Exposure = 10ms, Black level lock = ON
8740 * Request 3: Exposure = 10ms, Black level lock = ON
8741 * Request 4: Exposure = 20ms, Black level lock = ON
8742 * Request 5: Exposure = 20ms, Black level lock = ON
8743 * Request 6: Exposure = 20ms, Black level lock = ON
8745 And the exposure change in Request 4 requires the camera
8746 device to reset the black level offsets, then the output
8747 result metadata is expected to be:
8749 * Result 1: Exposure = 10ms, Black level lock = OFF
8750 * Result 2: Exposure = 10ms, Black level lock = ON
8751 * Result 3: Exposure = 10ms, Black level lock = ON
8752 * Result 4: Exposure = 20ms, Black level lock = OFF
8753 * Result 5: Exposure = 20ms, Black level lock = ON
8754 * Result 6: Exposure = 20ms, Black level lock = ON
8756 This indicates to the application that on frame 4, black
8757 levels were reset due to exposure value changes, and pixel
8758 values may not be consistent across captures.
8760 The camera device will maintain the lock to the extent
8761 possible, only overriding the lock to OFF when changes to
8762 other request parameters require a black level recalculation
8766 If for some reason black level locking is no longer possible
8767 (for example, the analog gain has changed, which forces
8768 black level offsets to be recalculated), then the HAL must
8769 override this request (and it must report 'OFF' when this
8770 does happen) until the next capture for which locking is
8771 possible again.</hal_details>
8776 <clone entry="android.blackLevel.lock"
8779 Whether the black level offset was locked for this frame. Should be
8780 ON if android.blackLevel.lock was ON in the capture request, unless
8781 a change in other capture settings forced the camera device to
8782 perform a black level reset.
8787 <section name="sync">
8789 <entry name="frameNumber" type="int64" visibility="ndk_public"
8790 enum="true" hwlevel="legacy">
8792 <value id="-1">CONVERGING
8794 The current result is not yet fully synchronized to any request.
8796 Synchronization is in progress, and reading metadata from this
8797 result may include a mix of data that have taken effect since the
8798 last synchronization time.
8800 In some future result, within android.sync.maxLatency frames,
8801 this value will update to the actual frame number frame number
8802 the result is guaranteed to be synchronized to (as long as the
8803 request settings remain constant).
8806 <value id="-2">UNKNOWN
8808 The current result's synchronization status is unknown.
8810 The result may have already converged, or it may be in
8811 progress. Reading from this result may include some mix
8812 of settings from past requests.
8814 After a settings change, the new settings will eventually all
8815 take effect for the output buffers and results. However, this
8816 value will not change when that happens. Altering settings
8817 rapidly may provide outcomes using mixes of settings from recent
8820 This value is intended primarily for backwards compatibility with
8821 the older camera implementations (for android.hardware.Camera).
8825 <description>The frame number corresponding to the last request
8826 with which the output result (metadata + buffers) has been fully
8827 synchronized.</description>
8828 <range>Either a non-negative value corresponding to a
8829 `frame_number`, or one of the two enums (CONVERGING / UNKNOWN).
8832 When a request is submitted to the camera device, there is usually a
8833 delay of several frames before the controls get applied. A camera
8834 device may either choose to account for this delay by implementing a
8835 pipeline and carefully submit well-timed atomic control updates, or
8836 it may start streaming control changes that span over several frame
8839 In the latter case, whenever a request's settings change relative to
8840 the previous submitted request, the full set of changes may take
8841 multiple frame durations to fully take effect. Some settings may
8842 take effect sooner (in less frame durations) than others.
8844 While a set of control changes are being propagated, this value
8847 Once it is fully known that a set of control changes have been
8848 finished propagating, and the resulting updated control settings
8849 have been read back by the camera device, this value will be set
8850 to a non-negative frame number (corresponding to the request to
8851 which the results have synchronized to).
8853 Older camera device implementations may not have a way to detect
8854 when all camera controls have been applied, and will always set this
8857 FULL capability devices will always have this value set to the
8858 frame number of the request corresponding to this result.
8862 * Whenever a request differs from the last request, any future
8863 results not yet returned may have this value set to CONVERGING (this
8864 could include any in-progress captures not yet returned by the camera
8865 device, for more details see pipeline considerations below).
8866 * Submitting a series of multiple requests that differ from the
8867 previous request (e.g. r1, r2, r3 s.t. r1 != r2 != r3)
8868 moves the new synchronization frame to the last non-repeating
8869 request (using the smallest frame number from the contiguous list of
8870 repeating requests).
8871 * Submitting the same request repeatedly will not change this value
8872 to CONVERGING, if it was already a non-negative value.
8873 * When this value changes to non-negative, that means that all of the
8874 metadata controls from the request have been applied, all of the
8875 metadata controls from the camera device have been read to the
8876 updated values (into the result), and all of the graphics buffers
8877 corresponding to this result are also synchronized to the request.
8879 _Pipeline considerations_:
8881 Submitting a request with updated controls relative to the previously
8882 submitted requests may also invalidate the synchronization state
8883 of all the results corresponding to currently in-flight requests.
8885 In other words, results for this current request and up to
8886 android.request.pipelineMaxDepth prior requests may have their
8887 android.sync.frameNumber change to CONVERGING.
8890 Using UNKNOWN here is illegal unless android.sync.maxLatency
8893 FULL capability devices should simply set this value to the
8894 `frame_number` of the request this result corresponds to.
8900 <entry name="maxLatency" type="int32" visibility="public" enum="true"
8903 <value id="0">PER_FRAME_CONTROL
8905 Every frame has the requests immediately applied.
8907 Changing controls over multiple requests one after another will
8908 produce results that have those controls applied atomically
8911 All FULL capability devices will have this as their maxLatency.
8914 <value id="-1">UNKNOWN
8916 Each new frame has some subset (potentially the entire set)
8917 of the past requests applied to the camera settings.
8919 By submitting a series of identical requests, the camera device
8920 will eventually have the camera settings applied, but it is
8921 unknown when that exact point will be.
8923 All LEGACY capability devices will have this as their maxLatency.
8928 The maximum number of frames that can occur after a request
8929 (different than the previous) has been submitted, and before the
8930 result's state becomes synchronized.
8932 <units>Frame counts</units>
8933 <range>A positive value, PER_FRAME_CONTROL, or UNKNOWN.</range>
8935 This defines the maximum distance (in number of metadata results),
8936 between the frame number of the request that has new controls to apply
8937 and the frame number of the result that has all the controls applied.
8939 In other words this acts as an upper boundary for how many frames
8940 must occur before the camera device knows for a fact that the new
8941 submitted camera settings have been applied in outgoing frames.
8944 For example if maxLatency was 2,
8946 initial request = X (repeating)
8952 where requestN has frameNumber N, and the first of the repeating
8953 initial request's has frameNumber F (and F < 1).
8955 initial result = X' + { android.sync.frameNumber == F }
8956 result1 = X' + { android.sync.frameNumber == F }
8957 result2 = X' + { android.sync.frameNumber == CONVERGING }
8958 result3 = X' + { android.sync.frameNumber == CONVERGING }
8959 result4 = X' + { android.sync.frameNumber == 2 }
8961 where resultN has frameNumber N.
8963 Since `result4` has a `frameNumber == 4` and
8964 `android.sync.frameNumber == 2`, the distance is clearly
8967 Use `frame_count` from camera3_request_t instead of
8968 android.request.frameCount or
8969 `{@link android.hardware.camera2.CaptureResult#getFrameNumber}`.
8971 LIMITED devices are strongly encouraged to use a non-negative
8972 value. If UNKNOWN is used here then app developers do not have a way
8973 to know when sensor settings have been applied.
8979 <section name="reprocess">
8981 <entry name="effectiveExposureFactor" type="float" visibility="java_public" hwlevel="limited">
8983 The amount of exposure time increase factor applied to the original output
8984 frame by the application processing before sending for reprocessing.
8986 <units>Relative exposure time increase factor.</units>
8987 <range> &gt;= 1.0</range>
8989 This is optional, and will be supported if the camera device supports YUV_REPROCESSING
8990 capability (android.request.availableCapabilities contains YUV_REPROCESSING).
8992 For some YUV reprocessing use cases, the application may choose to filter the original
8993 output frames to effectively reduce the noise to the same level as a frame that was
8994 captured with longer exposure time. To be more specific, assuming the original captured
8995 images were captured with a sensitivity of S and an exposure time of T, the model in
8996 the camera device is that the amount of noise in the image would be approximately what
8997 would be expected if the original capture parameters had been a sensitivity of
8998 S/effectiveExposureFactor and an exposure time of T*effectiveExposureFactor, rather
8999 than S and T respectively. If the captured images were processed by the application
9000 before being sent for reprocessing, then the application may have used image processing
9001 algorithms and/or multi-frame image fusion to reduce the noise in the
9002 application-processed images (input images). By using the effectiveExposureFactor
9003 control, the application can communicate to the camera device the actual noise level
9004 improvement in the application-processed image. With this information, the camera
9005 device can select appropriate noise reduction and edge enhancement parameters to avoid
9006 excessive noise reduction (android.noiseReduction.mode) and insufficient edge
9007 enhancement (android.edge.mode) being applied to the reprocessed frames.
9009 For example, for multi-frame image fusion use case, the application may fuse
9010 multiple output frames together to a final frame for reprocessing. When N image are
9011 fused into 1 image for reprocessing, the exposure time increase factor could be up to
9012 square root of N (based on a simple photon shot noise model). The camera device will
9013 adjust the reprocessing noise reduction and edge enhancement parameters accordingly to
9014 produce the best quality images.
9016 This is relative factor, 1.0 indicates the application hasn't processed the input
9017 buffer in a way that affects its effective exposure time.
9019 This control is only effective for YUV reprocessing capture request. For noise
9020 reduction reprocessing, it is only effective when `android.noiseReduction.mode != OFF`.
9021 Similarly, for edge enhancement reprocessing, it is only effective when
9022 `android.edge.mode != OFF`.
9028 <clone entry="android.reprocess.effectiveExposureFactor" kind="controls">
9032 <entry name="maxCaptureStall" type="int32" visibility="java_public" hwlevel="limited">
9034 The maximal camera capture pipeline stall (in unit of frame count) introduced by a
9035 reprocess capture request.
9037 <units>Number of frames.</units>
9038 <range> &lt;= 4</range>
9040 The key describes the maximal interference that one reprocess (input) request
9041 can introduce to the camera simultaneous streaming of regular (output) capture
9042 requests, including repeating requests.
9044 When a reprocessing capture request is submitted while a camera output repeating request
9045 (e.g. preview) is being served by the camera device, it may preempt the camera capture
9046 pipeline for at least one frame duration so that the camera device is unable to process
9047 the following capture request in time for the next sensor start of exposure boundary.
9048 When this happens, the application may observe a capture time gap (longer than one frame
9049 duration) between adjacent capture output frames, which usually exhibits as preview
9050 glitch if the repeating request output targets include a preview surface. This key gives
9051 the worst-case number of frame stall introduced by one reprocess request with any kind of
9052 formats/sizes combination.
9054 If this key reports 0, it means a reprocess request doesn't introduce any glitch to the
9055 ongoing camera repeating request outputs, as if this reprocess request is never issued.
9057 This key is supported if the camera device supports PRIVATE or YUV reprocessing (
9058 i.e. android.request.availableCapabilities contains PRIVATE_REPROCESSING or
9065 <section name="depth">
9067 <entry name="maxDepthSamples" type="int32" visibility="system" hwlevel="limited">
9068 <description>Maximum number of points that a depth point cloud may contain.
9071 If a camera device supports outputting depth range data in the form of a depth point
9072 cloud ({@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD}), this is the maximum
9073 number of points an output buffer may contain.
9075 Any given buffer may contain between 0 and maxDepthSamples points, inclusive.
9076 If output in the depth point cloud format is not supported, this entry will
9081 <entry name="availableDepthStreamConfigurations" type="int32" visibility="ndk_public"
9082 enum="true" container="array" typedef="streamConfiguration" hwlevel="limited">
9088 <value>OUTPUT</value>
9089 <value>INPUT</value>
9091 <description>The available depth dataspace stream
9092 configurations that this camera device supports
9093 (i.e. format, width, height, output/input stream).
9096 These are output stream configurations for use with
9097 dataSpace HAL_DATASPACE_DEPTH. The configurations are
9098 listed as `(format, width, height, input?)` tuples.
9100 Only devices that support depth output for at least
9101 the HAL_PIXEL_FORMAT_Y16 dense depth map may include
9104 A device that also supports the HAL_PIXEL_FORMAT_BLOB
9105 sparse depth point cloud must report a single entry for
9106 the format in this list as `(HAL_PIXEL_FORMAT_BLOB,
9107 android.depth.maxDepthSamples, 1, OUTPUT)` in addition to
9108 the entries for HAL_PIXEL_FORMAT_Y16.
9112 <entry name="availableDepthMinFrameDurations" type="int64" visibility="ndk_public"
9113 container="array" typedef="streamConfigurationDuration" hwlevel="limited">
9118 <description>This lists the minimum frame duration for each
9119 format/size combination for depth output formats.
9121 <units>(format, width, height, ns) x n</units>
9123 This should correspond to the frame duration when only that
9124 stream is active, with all processing (typically in android.*.mode)
9125 set to either OFF or FAST.
9127 When multiple streams are used in a request, the minimum frame
9128 duration will be max(individual stream min durations).
9130 The minimum frame duration of a stream (of a particular format, size)
9131 is the same regardless of whether the stream is input or output.
9133 See android.sensor.frameDuration and
9134 android.scaler.availableStallDurations for more details about
9135 calculating the max frame rate.
9139 <entry name="availableDepthStallDurations" type="int64" visibility="ndk_public"
9140 container="array" typedef="streamConfigurationDuration" hwlevel="limited">
9145 <description>This lists the maximum stall duration for each
9146 output format/size combination for depth streams.
9148 <units>(format, width, height, ns) x n</units>
9150 A stall duration is how much extra time would get added
9151 to the normal minimum frame duration for a repeating request
9152 that has streams with non-zero stall.
9154 This functions similarly to
9155 android.scaler.availableStallDurations for depth
9158 All depth output stream formats may have a nonzero stall
9163 <entry name="depthIsExclusive" type="byte" visibility="public"
9164 enum="true" typedef="boolean" hwlevel="limited">
9166 <value>FALSE</value>
9169 <description>Indicates whether a capture request may target both a
9170 DEPTH16 / DEPTH_POINT_CLOUD output, and normal color outputs (such as
9171 YUV_420_888, JPEG, or RAW) simultaneously.
9174 If TRUE, including both depth and color outputs in a single
9175 capture request is not supported. An application must interleave color
9176 and depth requests. If FALSE, a single request can target both types
9179 Typically, this restriction exists on camera devices that
9180 need to emit a specific pattern or wavelength of light to
9181 measure depth values, which causes the color image to be
9182 corrupted during depth measurement.